Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: niagara2 - switch to skcipher API

Commit 7a7ffe65c8c5 ("crypto: skcipher - Add top-level skcipher interface")
dated 20 august 2015 introduced the new skcipher API which is supposed to
replace both blkcipher and ablkcipher. While all consumers of the API have
been converted long ago, some producers of the ablkcipher remain, forcing
us to keep the ablkcipher support routines alive, along with the matching
code to expose [a]blkciphers via the skcipher API.

So switch this driver to the skcipher API, allowing us to finally drop the
ablkcipher code in the near future.

Acked-by: David S. Miller <davem@davemloft.net>
Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
23a6564a 47ece481

+95 -97
+95 -97
drivers/crypto/n2_core.c
··· 23 23 #include <linux/sched.h> 24 24 25 25 #include <crypto/internal/hash.h> 26 + #include <crypto/internal/skcipher.h> 26 27 #include <crypto/scatterwalk.h> 27 28 #include <crypto/algapi.h> 28 29 ··· 658 657 ctx->hash_key_len); 659 658 } 660 659 661 - struct n2_cipher_context { 660 + struct n2_skcipher_context { 662 661 int key_len; 663 662 int enc_type; 664 663 union { ··· 684 683 }; 685 684 686 685 struct n2_request_context { 687 - struct ablkcipher_walk walk; 686 + struct skcipher_walk walk; 688 687 struct list_head chunk_list; 689 688 struct n2_crypto_chunk chunk; 690 689 u8 temp_iv[16]; ··· 709 708 * is not a valid sequence. 710 709 */ 711 710 712 - struct n2_cipher_alg { 711 + struct n2_skcipher_alg { 713 712 struct list_head entry; 714 713 u8 enc_type; 715 - struct crypto_alg alg; 714 + struct skcipher_alg skcipher; 716 715 }; 717 716 718 - static inline struct n2_cipher_alg *n2_cipher_alg(struct crypto_tfm *tfm) 717 + static inline struct n2_skcipher_alg *n2_skcipher_alg(struct crypto_skcipher *tfm) 719 718 { 720 - struct crypto_alg *alg = tfm->__crt_alg; 719 + struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 721 720 722 - return container_of(alg, struct n2_cipher_alg, alg); 721 + return container_of(alg, struct n2_skcipher_alg, skcipher); 723 722 } 724 723 725 - struct n2_cipher_request_context { 726 - struct ablkcipher_walk walk; 724 + struct n2_skcipher_request_context { 725 + struct skcipher_walk walk; 727 726 }; 728 727 729 - static int n2_aes_setkey(struct crypto_ablkcipher *cipher, const u8 *key, 728 + static int n2_aes_setkey(struct crypto_skcipher *skcipher, const u8 *key, 730 729 unsigned int keylen) 731 730 { 732 - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); 733 - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); 734 - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); 731 + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 732 + struct n2_skcipher_context *ctx = crypto_tfm_ctx(tfm); 733 + struct n2_skcipher_alg *n2alg = n2_skcipher_alg(skcipher); 735 734 736 735 ctx->enc_type = (n2alg->enc_type & ENC_TYPE_CHAINING_MASK); 737 736 ··· 746 745 ctx->enc_type |= ENC_TYPE_ALG_AES256; 747 746 break; 748 747 default: 749 - crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 748 + crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 750 749 return -EINVAL; 751 750 } 752 751 ··· 755 754 return 0; 756 755 } 757 756 758 - static int n2_des_setkey(struct crypto_ablkcipher *cipher, const u8 *key, 757 + static int n2_des_setkey(struct crypto_skcipher *skcipher, const u8 *key, 759 758 unsigned int keylen) 760 759 { 761 - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); 762 - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); 763 - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); 760 + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 761 + struct n2_skcipher_context *ctx = crypto_tfm_ctx(tfm); 762 + struct n2_skcipher_alg *n2alg = n2_skcipher_alg(skcipher); 764 763 int err; 765 764 766 - err = verify_ablkcipher_des_key(cipher, key); 765 + err = verify_skcipher_des_key(skcipher, key); 767 766 if (err) 768 767 return err; 769 768 ··· 774 773 return 0; 775 774 } 776 775 777 - static int n2_3des_setkey(struct crypto_ablkcipher *cipher, const u8 *key, 776 + static int n2_3des_setkey(struct crypto_skcipher *skcipher, const u8 *key, 778 777 unsigned int keylen) 779 778 { 780 - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); 781 - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); 782 - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); 779 + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 780 + struct n2_skcipher_context *ctx = crypto_tfm_ctx(tfm); 781 + struct n2_skcipher_alg *n2alg = n2_skcipher_alg(skcipher); 783 782 int err; 784 783 785 - err = verify_ablkcipher_des3_key(cipher, key); 784 + err = verify_skcipher_des3_key(skcipher, key); 786 785 if (err) 787 786 return err; 788 787 ··· 793 792 return 0; 794 793 } 795 794 796 - static int n2_arc4_setkey(struct crypto_ablkcipher *cipher, const u8 *key, 795 + static int n2_arc4_setkey(struct crypto_skcipher *skcipher, const u8 *key, 797 796 unsigned int keylen) 798 797 { 799 - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); 800 - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); 801 - struct n2_cipher_alg *n2alg = n2_cipher_alg(tfm); 798 + struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 799 + struct n2_skcipher_context *ctx = crypto_tfm_ctx(tfm); 800 + struct n2_skcipher_alg *n2alg = n2_skcipher_alg(skcipher); 802 801 u8 *s = ctx->key.arc4; 803 802 u8 *x = s + 256; 804 803 u8 *y = x + 1; ··· 823 822 return 0; 824 823 } 825 824 826 - static inline int cipher_descriptor_len(int nbytes, unsigned int block_size) 825 + static inline int skcipher_descriptor_len(int nbytes, unsigned int block_size) 827 826 { 828 827 int this_len = nbytes; 829 828 ··· 831 830 return this_len > (1 << 16) ? (1 << 16) : this_len; 832 831 } 833 832 834 - static int __n2_crypt_chunk(struct crypto_tfm *tfm, struct n2_crypto_chunk *cp, 833 + static int __n2_crypt_chunk(struct crypto_skcipher *skcipher, 834 + struct n2_crypto_chunk *cp, 835 835 struct spu_queue *qp, bool encrypt) 836 836 { 837 - struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm); 837 + struct n2_skcipher_context *ctx = crypto_skcipher_ctx(skcipher); 838 838 struct cwq_initial_entry *ent; 839 839 bool in_place; 840 840 int i; ··· 879 877 return (spu_queue_submit(qp, ent) != HV_EOK) ? -EINVAL : 0; 880 878 } 881 879 882 - static int n2_compute_chunks(struct ablkcipher_request *req) 880 + static int n2_compute_chunks(struct skcipher_request *req) 883 881 { 884 - struct n2_request_context *rctx = ablkcipher_request_ctx(req); 885 - struct ablkcipher_walk *walk = &rctx->walk; 882 + struct n2_request_context *rctx = skcipher_request_ctx(req); 883 + struct skcipher_walk *walk = &rctx->walk; 886 884 struct n2_crypto_chunk *chunk; 887 885 unsigned long dest_prev; 888 886 unsigned int tot_len; 889 887 bool prev_in_place; 890 888 int err, nbytes; 891 889 892 - ablkcipher_walk_init(walk, req->dst, req->src, req->nbytes); 893 - err = ablkcipher_walk_phys(req, walk); 890 + err = skcipher_walk_async(walk, req); 894 891 if (err) 895 892 return err; 896 893 ··· 911 910 bool in_place; 912 911 int this_len; 913 912 914 - src_paddr = (page_to_phys(walk->src.page) + 915 - walk->src.offset); 916 - dest_paddr = (page_to_phys(walk->dst.page) + 917 - walk->dst.offset); 913 + src_paddr = (page_to_phys(walk->src.phys.page) + 914 + walk->src.phys.offset); 915 + dest_paddr = (page_to_phys(walk->dst.phys.page) + 916 + walk->dst.phys.offset); 918 917 in_place = (src_paddr == dest_paddr); 919 - this_len = cipher_descriptor_len(nbytes, walk->blocksize); 918 + this_len = skcipher_descriptor_len(nbytes, walk->blocksize); 920 919 921 920 if (chunk->arr_len != 0) { 922 921 if (in_place != prev_in_place || ··· 947 946 prev_in_place = in_place; 948 947 tot_len += this_len; 949 948 950 - err = ablkcipher_walk_done(req, walk, nbytes - this_len); 949 + err = skcipher_walk_done(walk, nbytes - this_len); 951 950 if (err) 952 951 break; 953 952 } ··· 959 958 return err; 960 959 } 961 960 962 - static void n2_chunk_complete(struct ablkcipher_request *req, void *final_iv) 961 + static void n2_chunk_complete(struct skcipher_request *req, void *final_iv) 963 962 { 964 - struct n2_request_context *rctx = ablkcipher_request_ctx(req); 963 + struct n2_request_context *rctx = skcipher_request_ctx(req); 965 964 struct n2_crypto_chunk *c, *tmp; 966 965 967 966 if (final_iv) 968 967 memcpy(rctx->walk.iv, final_iv, rctx->walk.blocksize); 969 968 970 - ablkcipher_walk_complete(&rctx->walk); 971 969 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { 972 970 list_del(&c->entry); 973 971 if (unlikely(c != &rctx->chunk)) ··· 975 975 976 976 } 977 977 978 - static int n2_do_ecb(struct ablkcipher_request *req, bool encrypt) 978 + static int n2_do_ecb(struct skcipher_request *req, bool encrypt) 979 979 { 980 - struct n2_request_context *rctx = ablkcipher_request_ctx(req); 981 - struct crypto_tfm *tfm = req->base.tfm; 980 + struct n2_request_context *rctx = skcipher_request_ctx(req); 981 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 982 982 int err = n2_compute_chunks(req); 983 983 struct n2_crypto_chunk *c, *tmp; 984 984 unsigned long flags, hv_ret; ··· 1017 1017 return err; 1018 1018 } 1019 1019 1020 - static int n2_encrypt_ecb(struct ablkcipher_request *req) 1020 + static int n2_encrypt_ecb(struct skcipher_request *req) 1021 1021 { 1022 1022 return n2_do_ecb(req, true); 1023 1023 } 1024 1024 1025 - static int n2_decrypt_ecb(struct ablkcipher_request *req) 1025 + static int n2_decrypt_ecb(struct skcipher_request *req) 1026 1026 { 1027 1027 return n2_do_ecb(req, false); 1028 1028 } 1029 1029 1030 - static int n2_do_chaining(struct ablkcipher_request *req, bool encrypt) 1030 + static int n2_do_chaining(struct skcipher_request *req, bool encrypt) 1031 1031 { 1032 - struct n2_request_context *rctx = ablkcipher_request_ctx(req); 1033 - struct crypto_tfm *tfm = req->base.tfm; 1032 + struct n2_request_context *rctx = skcipher_request_ctx(req); 1033 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 1034 1034 unsigned long flags, hv_ret, iv_paddr; 1035 1035 int err = n2_compute_chunks(req); 1036 1036 struct n2_crypto_chunk *c, *tmp; ··· 1107 1107 return err; 1108 1108 } 1109 1109 1110 - static int n2_encrypt_chaining(struct ablkcipher_request *req) 1110 + static int n2_encrypt_chaining(struct skcipher_request *req) 1111 1111 { 1112 1112 return n2_do_chaining(req, true); 1113 1113 } 1114 1114 1115 - static int n2_decrypt_chaining(struct ablkcipher_request *req) 1115 + static int n2_decrypt_chaining(struct skcipher_request *req) 1116 1116 { 1117 1117 return n2_do_chaining(req, false); 1118 1118 } 1119 1119 1120 - struct n2_cipher_tmpl { 1120 + struct n2_skcipher_tmpl { 1121 1121 const char *name; 1122 1122 const char *drv_name; 1123 1123 u8 block_size; 1124 1124 u8 enc_type; 1125 - struct ablkcipher_alg ablkcipher; 1125 + struct skcipher_alg skcipher; 1126 1126 }; 1127 1127 1128 - static const struct n2_cipher_tmpl cipher_tmpls[] = { 1128 + static const struct n2_skcipher_tmpl skcipher_tmpls[] = { 1129 1129 /* ARC4: only ECB is supported (chaining bits ignored) */ 1130 1130 { .name = "ecb(arc4)", 1131 1131 .drv_name = "ecb-arc4", 1132 1132 .block_size = 1, 1133 1133 .enc_type = (ENC_TYPE_ALG_RC4_STREAM | 1134 1134 ENC_TYPE_CHAINING_ECB), 1135 - .ablkcipher = { 1135 + .skcipher = { 1136 1136 .min_keysize = 1, 1137 1137 .max_keysize = 256, 1138 1138 .setkey = n2_arc4_setkey, ··· 1147 1147 .block_size = DES_BLOCK_SIZE, 1148 1148 .enc_type = (ENC_TYPE_ALG_DES | 1149 1149 ENC_TYPE_CHAINING_ECB), 1150 - .ablkcipher = { 1150 + .skcipher = { 1151 1151 .min_keysize = DES_KEY_SIZE, 1152 1152 .max_keysize = DES_KEY_SIZE, 1153 1153 .setkey = n2_des_setkey, ··· 1160 1160 .block_size = DES_BLOCK_SIZE, 1161 1161 .enc_type = (ENC_TYPE_ALG_DES | 1162 1162 ENC_TYPE_CHAINING_CBC), 1163 - .ablkcipher = { 1163 + .skcipher = { 1164 1164 .ivsize = DES_BLOCK_SIZE, 1165 1165 .min_keysize = DES_KEY_SIZE, 1166 1166 .max_keysize = DES_KEY_SIZE, ··· 1174 1174 .block_size = DES_BLOCK_SIZE, 1175 1175 .enc_type = (ENC_TYPE_ALG_DES | 1176 1176 ENC_TYPE_CHAINING_CFB), 1177 - .ablkcipher = { 1177 + .skcipher = { 1178 1178 .min_keysize = DES_KEY_SIZE, 1179 1179 .max_keysize = DES_KEY_SIZE, 1180 1180 .setkey = n2_des_setkey, ··· 1189 1189 .block_size = DES_BLOCK_SIZE, 1190 1190 .enc_type = (ENC_TYPE_ALG_3DES | 1191 1191 ENC_TYPE_CHAINING_ECB), 1192 - .ablkcipher = { 1192 + .skcipher = { 1193 1193 .min_keysize = 3 * DES_KEY_SIZE, 1194 1194 .max_keysize = 3 * DES_KEY_SIZE, 1195 1195 .setkey = n2_3des_setkey, ··· 1202 1202 .block_size = DES_BLOCK_SIZE, 1203 1203 .enc_type = (ENC_TYPE_ALG_3DES | 1204 1204 ENC_TYPE_CHAINING_CBC), 1205 - .ablkcipher = { 1205 + .skcipher = { 1206 1206 .ivsize = DES_BLOCK_SIZE, 1207 1207 .min_keysize = 3 * DES_KEY_SIZE, 1208 1208 .max_keysize = 3 * DES_KEY_SIZE, ··· 1216 1216 .block_size = DES_BLOCK_SIZE, 1217 1217 .enc_type = (ENC_TYPE_ALG_3DES | 1218 1218 ENC_TYPE_CHAINING_CFB), 1219 - .ablkcipher = { 1219 + .skcipher = { 1220 1220 .min_keysize = 3 * DES_KEY_SIZE, 1221 1221 .max_keysize = 3 * DES_KEY_SIZE, 1222 1222 .setkey = n2_3des_setkey, ··· 1230 1230 .block_size = AES_BLOCK_SIZE, 1231 1231 .enc_type = (ENC_TYPE_ALG_AES128 | 1232 1232 ENC_TYPE_CHAINING_ECB), 1233 - .ablkcipher = { 1233 + .skcipher = { 1234 1234 .min_keysize = AES_MIN_KEY_SIZE, 1235 1235 .max_keysize = AES_MAX_KEY_SIZE, 1236 1236 .setkey = n2_aes_setkey, ··· 1243 1243 .block_size = AES_BLOCK_SIZE, 1244 1244 .enc_type = (ENC_TYPE_ALG_AES128 | 1245 1245 ENC_TYPE_CHAINING_CBC), 1246 - .ablkcipher = { 1246 + .skcipher = { 1247 1247 .ivsize = AES_BLOCK_SIZE, 1248 1248 .min_keysize = AES_MIN_KEY_SIZE, 1249 1249 .max_keysize = AES_MAX_KEY_SIZE, ··· 1257 1257 .block_size = AES_BLOCK_SIZE, 1258 1258 .enc_type = (ENC_TYPE_ALG_AES128 | 1259 1259 ENC_TYPE_CHAINING_COUNTER), 1260 - .ablkcipher = { 1260 + .skcipher = { 1261 1261 .ivsize = AES_BLOCK_SIZE, 1262 1262 .min_keysize = AES_MIN_KEY_SIZE, 1263 1263 .max_keysize = AES_MAX_KEY_SIZE, ··· 1268 1268 }, 1269 1269 1270 1270 }; 1271 - #define NUM_CIPHER_TMPLS ARRAY_SIZE(cipher_tmpls) 1271 + #define NUM_CIPHER_TMPLS ARRAY_SIZE(skcipher_tmpls) 1272 1272 1273 - static LIST_HEAD(cipher_algs); 1273 + static LIST_HEAD(skcipher_algs); 1274 1274 1275 1275 struct n2_hash_tmpl { 1276 1276 const char *name; ··· 1344 1344 1345 1345 static void __n2_unregister_algs(void) 1346 1346 { 1347 - struct n2_cipher_alg *cipher, *cipher_tmp; 1347 + struct n2_skcipher_alg *skcipher, *skcipher_tmp; 1348 1348 struct n2_ahash_alg *alg, *alg_tmp; 1349 1349 struct n2_hmac_alg *hmac, *hmac_tmp; 1350 1350 1351 - list_for_each_entry_safe(cipher, cipher_tmp, &cipher_algs, entry) { 1352 - crypto_unregister_alg(&cipher->alg); 1353 - list_del(&cipher->entry); 1354 - kfree(cipher); 1351 + list_for_each_entry_safe(skcipher, skcipher_tmp, &skcipher_algs, entry) { 1352 + crypto_unregister_skcipher(&skcipher->skcipher); 1353 + list_del(&skcipher->entry); 1354 + kfree(skcipher); 1355 1355 } 1356 1356 list_for_each_entry_safe(hmac, hmac_tmp, &hmac_algs, derived.entry) { 1357 1357 crypto_unregister_ahash(&hmac->derived.alg); ··· 1365 1365 } 1366 1366 } 1367 1367 1368 - static int n2_cipher_cra_init(struct crypto_tfm *tfm) 1368 + static int n2_skcipher_init_tfm(struct crypto_skcipher *tfm) 1369 1369 { 1370 - tfm->crt_ablkcipher.reqsize = sizeof(struct n2_request_context); 1370 + crypto_skcipher_set_reqsize(tfm, sizeof(struct n2_request_context)); 1371 1371 return 0; 1372 1372 } 1373 1373 1374 - static int __n2_register_one_cipher(const struct n2_cipher_tmpl *tmpl) 1374 + static int __n2_register_one_skcipher(const struct n2_skcipher_tmpl *tmpl) 1375 1375 { 1376 - struct n2_cipher_alg *p = kzalloc(sizeof(*p), GFP_KERNEL); 1377 - struct crypto_alg *alg; 1376 + struct n2_skcipher_alg *p = kzalloc(sizeof(*p), GFP_KERNEL); 1377 + struct skcipher_alg *alg; 1378 1378 int err; 1379 1379 1380 1380 if (!p) 1381 1381 return -ENOMEM; 1382 1382 1383 - alg = &p->alg; 1383 + alg = &p->skcipher; 1384 + *alg = tmpl->skcipher; 1384 1385 1385 - snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1386 - snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name); 1387 - alg->cra_priority = N2_CRA_PRIORITY; 1388 - alg->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1389 - CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC; 1390 - alg->cra_blocksize = tmpl->block_size; 1386 + snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1387 + snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s-n2", tmpl->drv_name); 1388 + alg->base.cra_priority = N2_CRA_PRIORITY; 1389 + alg->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC; 1390 + alg->base.cra_blocksize = tmpl->block_size; 1391 1391 p->enc_type = tmpl->enc_type; 1392 - alg->cra_ctxsize = sizeof(struct n2_cipher_context); 1393 - alg->cra_type = &crypto_ablkcipher_type; 1394 - alg->cra_u.ablkcipher = tmpl->ablkcipher; 1395 - alg->cra_init = n2_cipher_cra_init; 1396 - alg->cra_module = THIS_MODULE; 1392 + alg->base.cra_ctxsize = sizeof(struct n2_skcipher_context); 1393 + alg->base.cra_module = THIS_MODULE; 1394 + alg->init = n2_skcipher_init_tfm; 1397 1395 1398 - list_add(&p->entry, &cipher_algs); 1399 - err = crypto_register_alg(alg); 1396 + list_add(&p->entry, &skcipher_algs); 1397 + err = crypto_register_skcipher(alg); 1400 1398 if (err) { 1401 - pr_err("%s alg registration failed\n", alg->cra_name); 1399 + pr_err("%s alg registration failed\n", alg->base.cra_name); 1402 1400 list_del(&p->entry); 1403 1401 kfree(p); 1404 1402 } else { 1405 - pr_info("%s alg registered\n", alg->cra_name); 1403 + pr_info("%s alg registered\n", alg->base.cra_name); 1406 1404 } 1407 1405 return err; 1408 1406 } ··· 1515 1517 } 1516 1518 } 1517 1519 for (i = 0; i < NUM_CIPHER_TMPLS; i++) { 1518 - err = __n2_register_one_cipher(&cipher_tmpls[i]); 1520 + err = __n2_register_one_skcipher(&skcipher_tmpls[i]); 1519 1521 if (err) { 1520 1522 __n2_unregister_algs(); 1521 1523 goto out;