Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: chacha20 - convert generic and x86 versions to skcipher

This converts the ChaCha20 code from a blkcipher to a skcipher, which
is now the preferred way to implement symmetric block and stream ciphers.

This ports the generic and x86 versions at the same time because the
latter reuses routines of the former.

Note that the skcipher_walk() API guarantees that all presented blocks
except the final one are a multiple of the chunk size, so we can simplify
the encrypt() routine somewhat.

Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
9ae433bc 80966672

+64 -84
+31 -38
arch/x86/crypto/chacha20_glue.c
··· 11 11 12 12 #include <crypto/algapi.h> 13 13 #include <crypto/chacha20.h> 14 - #include <linux/crypto.h> 14 + #include <crypto/internal/skcipher.h> 15 15 #include <linux/kernel.h> 16 16 #include <linux/module.h> 17 17 #include <asm/fpu/api.h> ··· 63 63 } 64 64 } 65 65 66 - static int chacha20_simd(struct blkcipher_desc *desc, struct scatterlist *dst, 67 - struct scatterlist *src, unsigned int nbytes) 66 + static int chacha20_simd(struct skcipher_request *req) 68 67 { 69 - u32 *state, state_buf[16 + (CHACHA20_STATE_ALIGN / sizeof(u32)) - 1]; 70 - struct blkcipher_walk walk; 68 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 69 + struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm); 70 + u32 state[16] __aligned(CHACHA20_STATE_ALIGN); 71 + struct skcipher_walk walk; 71 72 int err; 72 73 73 - if (nbytes <= CHACHA20_BLOCK_SIZE || !may_use_simd()) 74 - return crypto_chacha20_crypt(desc, dst, src, nbytes); 74 + if (req->cryptlen <= CHACHA20_BLOCK_SIZE || !may_use_simd()) 75 + return crypto_chacha20_crypt(req); 75 76 76 - state = (u32 *)roundup((uintptr_t)state_buf, CHACHA20_STATE_ALIGN); 77 + err = skcipher_walk_virt(&walk, req, true); 77 78 78 - blkcipher_walk_init(&walk, dst, src, nbytes); 79 - err = blkcipher_walk_virt_block(desc, &walk, CHACHA20_BLOCK_SIZE); 80 - 81 - crypto_chacha20_init(state, crypto_blkcipher_ctx(desc->tfm), walk.iv); 79 + crypto_chacha20_init(state, ctx, walk.iv); 82 80 83 81 kernel_fpu_begin(); 84 82 85 83 while (walk.nbytes >= CHACHA20_BLOCK_SIZE) { 86 84 chacha20_dosimd(state, walk.dst.virt.addr, walk.src.virt.addr, 87 85 rounddown(walk.nbytes, CHACHA20_BLOCK_SIZE)); 88 - err = blkcipher_walk_done(desc, &walk, 89 - walk.nbytes % CHACHA20_BLOCK_SIZE); 86 + err = skcipher_walk_done(&walk, 87 + walk.nbytes % CHACHA20_BLOCK_SIZE); 90 88 } 91 89 92 90 if (walk.nbytes) { 93 91 chacha20_dosimd(state, walk.dst.virt.addr, walk.src.virt.addr, 94 92 walk.nbytes); 95 - err = blkcipher_walk_done(desc, &walk, 0); 93 + err = skcipher_walk_done(&walk, 0); 96 94 } 97 95 98 96 kernel_fpu_end(); ··· 98 100 return err; 99 101 } 100 102 101 - static struct crypto_alg alg = { 102 - .cra_name = "chacha20", 103 - .cra_driver_name = "chacha20-simd", 104 - .cra_priority = 300, 105 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 106 - .cra_blocksize = 1, 107 - .cra_type = &crypto_blkcipher_type, 108 - .cra_ctxsize = sizeof(struct chacha20_ctx), 109 - .cra_alignmask = sizeof(u32) - 1, 110 - .cra_module = THIS_MODULE, 111 - .cra_u = { 112 - .blkcipher = { 113 - .min_keysize = CHACHA20_KEY_SIZE, 114 - .max_keysize = CHACHA20_KEY_SIZE, 115 - .ivsize = CHACHA20_IV_SIZE, 116 - .geniv = "seqiv", 117 - .setkey = crypto_chacha20_setkey, 118 - .encrypt = chacha20_simd, 119 - .decrypt = chacha20_simd, 120 - }, 121 - }, 103 + static struct skcipher_alg alg = { 104 + .base.cra_name = "chacha20", 105 + .base.cra_driver_name = "chacha20-simd", 106 + .base.cra_priority = 300, 107 + .base.cra_blocksize = 1, 108 + .base.cra_ctxsize = sizeof(struct chacha20_ctx), 109 + .base.cra_alignmask = sizeof(u32) - 1, 110 + .base.cra_module = THIS_MODULE, 111 + 112 + .min_keysize = CHACHA20_KEY_SIZE, 113 + .max_keysize = CHACHA20_KEY_SIZE, 114 + .ivsize = CHACHA20_IV_SIZE, 115 + .chunksize = CHACHA20_BLOCK_SIZE, 116 + .setkey = crypto_chacha20_setkey, 117 + .encrypt = chacha20_simd, 118 + .decrypt = chacha20_simd, 122 119 }; 123 120 124 121 static int __init chacha20_simd_mod_init(void) ··· 126 133 boot_cpu_has(X86_FEATURE_AVX2) && 127 134 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL); 128 135 #endif 129 - return crypto_register_alg(&alg); 136 + return crypto_register_skcipher(&alg); 130 137 } 131 138 132 139 static void __exit chacha20_simd_mod_fini(void) 133 140 { 134 - crypto_unregister_alg(&alg); 141 + crypto_unregister_skcipher(&alg); 135 142 } 136 143 137 144 module_init(chacha20_simd_mod_init);
+30 -43
crypto/chacha20_generic.c
··· 10 10 */ 11 11 12 12 #include <crypto/algapi.h> 13 - #include <linux/crypto.h> 14 - #include <linux/kernel.h> 15 - #include <linux/module.h> 16 13 #include <crypto/chacha20.h> 14 + #include <crypto/internal/skcipher.h> 15 + #include <linux/module.h> 17 16 18 17 static inline u32 le32_to_cpuvp(const void *p) 19 18 { ··· 62 63 } 63 64 EXPORT_SYMBOL_GPL(crypto_chacha20_init); 64 65 65 - int crypto_chacha20_setkey(struct crypto_tfm *tfm, const u8 *key, 66 + int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, 66 67 unsigned int keysize) 67 68 { 68 - struct chacha20_ctx *ctx = crypto_tfm_ctx(tfm); 69 + struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm); 69 70 int i; 70 71 71 72 if (keysize != CHACHA20_KEY_SIZE) ··· 78 79 } 79 80 EXPORT_SYMBOL_GPL(crypto_chacha20_setkey); 80 81 81 - int crypto_chacha20_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, 82 - struct scatterlist *src, unsigned int nbytes) 82 + int crypto_chacha20_crypt(struct skcipher_request *req) 83 83 { 84 - struct blkcipher_walk walk; 84 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 85 + struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm); 86 + struct skcipher_walk walk; 85 87 u32 state[16]; 86 88 int err; 87 89 88 - blkcipher_walk_init(&walk, dst, src, nbytes); 89 - err = blkcipher_walk_virt_block(desc, &walk, CHACHA20_BLOCK_SIZE); 90 + err = skcipher_walk_virt(&walk, req, true); 90 91 91 - crypto_chacha20_init(state, crypto_blkcipher_ctx(desc->tfm), walk.iv); 92 + crypto_chacha20_init(state, ctx, walk.iv); 92 93 93 - while (walk.nbytes >= CHACHA20_BLOCK_SIZE) { 94 - chacha20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, 95 - rounddown(walk.nbytes, CHACHA20_BLOCK_SIZE)); 96 - err = blkcipher_walk_done(desc, &walk, 97 - walk.nbytes % CHACHA20_BLOCK_SIZE); 98 - } 99 - 100 - if (walk.nbytes) { 94 + while (walk.nbytes > 0) { 101 95 chacha20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, 102 96 walk.nbytes); 103 - err = blkcipher_walk_done(desc, &walk, 0); 97 + err = skcipher_walk_done(&walk, 0); 104 98 } 105 99 106 100 return err; 107 101 } 108 102 EXPORT_SYMBOL_GPL(crypto_chacha20_crypt); 109 103 110 - static struct crypto_alg alg = { 111 - .cra_name = "chacha20", 112 - .cra_driver_name = "chacha20-generic", 113 - .cra_priority = 100, 114 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 115 - .cra_blocksize = 1, 116 - .cra_type = &crypto_blkcipher_type, 117 - .cra_ctxsize = sizeof(struct chacha20_ctx), 118 - .cra_alignmask = sizeof(u32) - 1, 119 - .cra_module = THIS_MODULE, 120 - .cra_u = { 121 - .blkcipher = { 122 - .min_keysize = CHACHA20_KEY_SIZE, 123 - .max_keysize = CHACHA20_KEY_SIZE, 124 - .ivsize = CHACHA20_IV_SIZE, 125 - .geniv = "seqiv", 126 - .setkey = crypto_chacha20_setkey, 127 - .encrypt = crypto_chacha20_crypt, 128 - .decrypt = crypto_chacha20_crypt, 129 - }, 130 - }, 104 + static struct skcipher_alg alg = { 105 + .base.cra_name = "chacha20", 106 + .base.cra_driver_name = "chacha20-generic", 107 + .base.cra_priority = 100, 108 + .base.cra_blocksize = 1, 109 + .base.cra_ctxsize = sizeof(struct chacha20_ctx), 110 + .base.cra_alignmask = sizeof(u32) - 1, 111 + .base.cra_module = THIS_MODULE, 112 + 113 + .min_keysize = CHACHA20_KEY_SIZE, 114 + .max_keysize = CHACHA20_KEY_SIZE, 115 + .ivsize = CHACHA20_IV_SIZE, 116 + .chunksize = CHACHA20_BLOCK_SIZE, 117 + .setkey = crypto_chacha20_setkey, 118 + .encrypt = crypto_chacha20_crypt, 119 + .decrypt = crypto_chacha20_crypt, 131 120 }; 132 121 133 122 static int __init chacha20_generic_mod_init(void) 134 123 { 135 - return crypto_register_alg(&alg); 124 + return crypto_register_skcipher(&alg); 136 125 } 137 126 138 127 static void __exit chacha20_generic_mod_fini(void) 139 128 { 140 - crypto_unregister_alg(&alg); 129 + crypto_unregister_skcipher(&alg); 141 130 } 142 131 143 132 module_init(chacha20_generic_mod_init);
+3 -3
include/crypto/chacha20.h
··· 5 5 #ifndef _CRYPTO_CHACHA20_H 6 6 #define _CRYPTO_CHACHA20_H 7 7 8 + #include <crypto/skcipher.h> 8 9 #include <linux/types.h> 9 10 #include <linux/crypto.h> 10 11 ··· 19 18 20 19 void chacha20_block(u32 *state, void *stream); 21 20 void crypto_chacha20_init(u32 *state, struct chacha20_ctx *ctx, u8 *iv); 22 - int crypto_chacha20_setkey(struct crypto_tfm *tfm, const u8 *key, 21 + int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, 23 22 unsigned int keysize); 24 - int crypto_chacha20_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, 25 - struct scatterlist *src, unsigned int nbytes); 23 + int crypto_chacha20_crypt(struct skcipher_request *req); 26 24 27 25 #endif