Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: chacha - move existing library code into lib/crypto

Currently, our generic ChaCha implementation consists of a permute
function in lib/chacha.c that operates on the 64-byte ChaCha state
directly [and which is always included into the core kernel since it
is used by the /dev/random driver], and the crypto API plumbing to
expose it as a skcipher.

In order to support in-kernel users that need the ChaCha streamcipher
but have no need [or tolerance] for going through the abstractions of
the crypto API, let's expose the streamcipher bits via a library API
as well, in a way that permits the implementation to be superseded by
an architecture specific one if provided.

So move the streamcipher code into a separate module in lib/crypto,
and expose the init() and crypt() routines to users of the library.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
5fb8ef25 746b2e02

+199 -86
+1 -1
arch/arm/crypto/chacha-neon-glue.c
··· 20 20 */ 21 21 22 22 #include <crypto/algapi.h> 23 - #include <crypto/chacha.h> 23 + #include <crypto/internal/chacha.h> 24 24 #include <crypto/internal/simd.h> 25 25 #include <crypto/internal/skcipher.h> 26 26 #include <linux/kernel.h>
+1 -1
arch/arm64/crypto/chacha-neon-glue.c
··· 20 20 */ 21 21 22 22 #include <crypto/algapi.h> 23 - #include <crypto/chacha.h> 23 + #include <crypto/internal/chacha.h> 24 24 #include <crypto/internal/simd.h> 25 25 #include <crypto/internal/skcipher.h> 26 26 #include <linux/kernel.h>
+1 -1
arch/x86/crypto/chacha_glue.c
··· 7 7 */ 8 8 9 9 #include <crypto/algapi.h> 10 - #include <crypto/chacha.h> 10 + #include <crypto/internal/chacha.h> 11 11 #include <crypto/internal/simd.h> 12 12 #include <crypto/internal/skcipher.h> 13 13 #include <linux/kernel.h>
+1
crypto/Kconfig
··· 1412 1412 1413 1413 config CRYPTO_CHACHA20 1414 1414 tristate "ChaCha stream cipher algorithms" 1415 + select CRYPTO_LIB_CHACHA_GENERIC 1415 1416 select CRYPTO_SKCIPHER 1416 1417 help 1417 1418 The ChaCha20, XChaCha20, and XChaCha12 stream cipher algorithms.
+5 -55
crypto/chacha_generic.c
··· 8 8 9 9 #include <asm/unaligned.h> 10 10 #include <crypto/algapi.h> 11 - #include <crypto/chacha.h> 11 + #include <crypto/internal/chacha.h> 12 12 #include <crypto/internal/skcipher.h> 13 13 #include <linux/module.h> 14 - 15 - static void chacha_docrypt(u32 *state, u8 *dst, const u8 *src, 16 - unsigned int bytes, int nrounds) 17 - { 18 - /* aligned to potentially speed up crypto_xor() */ 19 - u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long)); 20 - 21 - while (bytes >= CHACHA_BLOCK_SIZE) { 22 - chacha_block(state, stream, nrounds); 23 - crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE); 24 - bytes -= CHACHA_BLOCK_SIZE; 25 - dst += CHACHA_BLOCK_SIZE; 26 - src += CHACHA_BLOCK_SIZE; 27 - } 28 - if (bytes) { 29 - chacha_block(state, stream, nrounds); 30 - crypto_xor_cpy(dst, src, stream, bytes); 31 - } 32 - } 33 14 34 15 static int chacha_stream_xor(struct skcipher_request *req, 35 16 const struct chacha_ctx *ctx, const u8 *iv) ··· 29 48 if (nbytes < walk.total) 30 49 nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE); 31 50 32 - chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, 33 - nbytes, ctx->nrounds); 51 + chacha_crypt_generic(state, walk.dst.virt.addr, 52 + walk.src.virt.addr, nbytes, ctx->nrounds); 34 53 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); 35 54 } 36 55 ··· 39 58 40 59 void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv) 41 60 { 42 - state[0] = 0x61707865; /* "expa" */ 43 - state[1] = 0x3320646e; /* "nd 3" */ 44 - state[2] = 0x79622d32; /* "2-by" */ 45 - state[3] = 0x6b206574; /* "te k" */ 46 - state[4] = ctx->key[0]; 47 - state[5] = ctx->key[1]; 48 - state[6] = ctx->key[2]; 49 - state[7] = ctx->key[3]; 50 - state[8] = ctx->key[4]; 51 - state[9] = ctx->key[5]; 52 - state[10] = ctx->key[6]; 53 - state[11] = ctx->key[7]; 54 - state[12] = get_unaligned_le32(iv + 0); 55 - state[13] = get_unaligned_le32(iv + 4); 56 - state[14] = get_unaligned_le32(iv + 8); 57 - state[15] = get_unaligned_le32(iv + 12); 61 + chacha_init_generic(state, ctx->key, iv); 58 62 } 59 63 EXPORT_SYMBOL_GPL(crypto_chacha_init); 60 - 61 - static int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key, 62 - unsigned int keysize, int nrounds) 63 - { 64 - struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); 65 - int i; 66 - 67 - if (keysize != CHACHA_KEY_SIZE) 68 - return -EINVAL; 69 - 70 - for (i = 0; i < ARRAY_SIZE(ctx->key); i++) 71 - ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32)); 72 - 73 - ctx->nrounds = nrounds; 74 - return 0; 75 - } 76 64 77 65 int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, 78 66 unsigned int keysize) ··· 76 126 77 127 /* Compute the subkey given the original key and first 128 nonce bits */ 78 128 crypto_chacha_init(state, ctx, req->iv); 79 - hchacha_block(state, subctx.key, ctx->nrounds); 129 + hchacha_block_generic(state, subctx.key, ctx->nrounds); 80 130 subctx.nrounds = ctx->nrounds; 81 131 82 132 /* Build the real IV */
+60 -17
include/crypto/chacha.h
··· 15 15 #ifndef _CRYPTO_CHACHA_H 16 16 #define _CRYPTO_CHACHA_H 17 17 18 - #include <crypto/skcipher.h> 18 + #include <asm/unaligned.h> 19 19 #include <linux/types.h> 20 - #include <linux/crypto.h> 21 20 22 21 /* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */ 23 22 #define CHACHA_IV_SIZE 16 ··· 28 29 /* 192-bit nonce, then 64-bit stream position */ 29 30 #define XCHACHA_IV_SIZE 32 30 31 31 - struct chacha_ctx { 32 - u32 key[8]; 33 - int nrounds; 34 - }; 35 - 36 - void chacha_block(u32 *state, u8 *stream, int nrounds); 32 + void chacha_block_generic(u32 *state, u8 *stream, int nrounds); 37 33 static inline void chacha20_block(u32 *state, u8 *stream) 38 34 { 39 - chacha_block(state, stream, 20); 35 + chacha_block_generic(state, stream, 20); 40 36 } 41 - void hchacha_block(const u32 *in, u32 *out, int nrounds); 42 37 43 - void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv); 38 + void hchacha_block_arch(const u32 *state, u32 *out, int nrounds); 39 + void hchacha_block_generic(const u32 *state, u32 *out, int nrounds); 44 40 45 - int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, 46 - unsigned int keysize); 47 - int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key, 48 - unsigned int keysize); 41 + static inline void hchacha_block(const u32 *state, u32 *out, int nrounds) 42 + { 43 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA)) 44 + hchacha_block_arch(state, out, nrounds); 45 + else 46 + hchacha_block_generic(state, out, nrounds); 47 + } 49 48 50 - int crypto_chacha_crypt(struct skcipher_request *req); 51 - int crypto_xchacha_crypt(struct skcipher_request *req); 49 + void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv); 50 + static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv) 51 + { 52 + state[0] = 0x61707865; /* "expa" */ 53 + state[1] = 0x3320646e; /* "nd 3" */ 54 + state[2] = 0x79622d32; /* "2-by" */ 55 + state[3] = 0x6b206574; /* "te k" */ 56 + state[4] = key[0]; 57 + state[5] = key[1]; 58 + state[6] = key[2]; 59 + state[7] = key[3]; 60 + state[8] = key[4]; 61 + state[9] = key[5]; 62 + state[10] = key[6]; 63 + state[11] = key[7]; 64 + state[12] = get_unaligned_le32(iv + 0); 65 + state[13] = get_unaligned_le32(iv + 4); 66 + state[14] = get_unaligned_le32(iv + 8); 67 + state[15] = get_unaligned_le32(iv + 12); 68 + } 69 + 70 + static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv) 71 + { 72 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA)) 73 + chacha_init_arch(state, key, iv); 74 + else 75 + chacha_init_generic(state, key, iv); 76 + } 77 + 78 + void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, 79 + unsigned int bytes, int nrounds); 80 + void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src, 81 + unsigned int bytes, int nrounds); 82 + 83 + static inline void chacha_crypt(u32 *state, u8 *dst, const u8 *src, 84 + unsigned int bytes, int nrounds) 85 + { 86 + if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA)) 87 + chacha_crypt_arch(state, dst, src, bytes, nrounds); 88 + else 89 + chacha_crypt_generic(state, dst, src, bytes, nrounds); 90 + } 91 + 92 + static inline void chacha20_crypt(u32 *state, u8 *dst, const u8 *src, 93 + unsigned int bytes) 94 + { 95 + chacha_crypt(state, dst, src, bytes, 20); 96 + } 52 97 53 98 #endif /* _CRYPTO_CHACHA_H */
+53
include/crypto/internal/chacha.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + 3 + #ifndef _CRYPTO_INTERNAL_CHACHA_H 4 + #define _CRYPTO_INTERNAL_CHACHA_H 5 + 6 + #include <crypto/chacha.h> 7 + #include <crypto/internal/skcipher.h> 8 + #include <linux/crypto.h> 9 + 10 + struct chacha_ctx { 11 + u32 key[8]; 12 + int nrounds; 13 + }; 14 + 15 + void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv); 16 + 17 + static inline int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key, 18 + unsigned int keysize, int nrounds) 19 + { 20 + struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); 21 + int i; 22 + 23 + if (keysize != CHACHA_KEY_SIZE) 24 + return -EINVAL; 25 + 26 + for (i = 0; i < ARRAY_SIZE(ctx->key); i++) 27 + ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32)); 28 + 29 + ctx->nrounds = nrounds; 30 + return 0; 31 + } 32 + 33 + static inline int chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, 34 + unsigned int keysize) 35 + { 36 + return chacha_setkey(tfm, key, keysize, 20); 37 + } 38 + 39 + static int inline chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key, 40 + unsigned int keysize) 41 + { 42 + return chacha_setkey(tfm, key, keysize, 12); 43 + } 44 + 45 + int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, 46 + unsigned int keysize); 47 + int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key, 48 + unsigned int keysize); 49 + 50 + int crypto_chacha_crypt(struct skcipher_request *req); 51 + int crypto_xchacha_crypt(struct skcipher_request *req); 52 + 53 + #endif /* _CRYPTO_CHACHA_H */
+1 -2
lib/Makefile
··· 26 26 27 27 lib-y := ctype.o string.o vsprintf.o cmdline.o \ 28 28 rbtree.o radix-tree.o timerqueue.o xarray.o \ 29 - idr.o extable.o \ 30 - sha1.o chacha.o irq_regs.o argv_split.o \ 29 + idr.o extable.o sha1.o irq_regs.o argv_split.o \ 31 30 flex_proportions.o ratelimit.o show_mem.o \ 32 31 is_single_threaded.o plist.o decompress.o kobject_uevent.o \ 33 32 earlycpio.o seq_buf.o siphash.o dec_and_lock.o \
+11 -9
lib/chacha.c lib/crypto/chacha.c
··· 5 5 * Copyright (C) 2015 Martin Willi 6 6 */ 7 7 8 + #include <linux/bug.h> 8 9 #include <linux/kernel.h> 9 10 #include <linux/export.h> 10 11 #include <linux/bitops.h> 12 + #include <linux/string.h> 11 13 #include <linux/cryptohash.h> 12 14 #include <asm/unaligned.h> 13 15 #include <crypto/chacha.h> ··· 74 72 * The caller has already converted the endianness of the input. This function 75 73 * also handles incrementing the block counter in the input matrix. 76 74 */ 77 - void chacha_block(u32 *state, u8 *stream, int nrounds) 75 + void chacha_block_generic(u32 *state, u8 *stream, int nrounds) 78 76 { 79 77 u32 x[16]; 80 78 int i; ··· 88 86 89 87 state[12]++; 90 88 } 91 - EXPORT_SYMBOL(chacha_block); 89 + EXPORT_SYMBOL(chacha_block_generic); 92 90 93 91 /** 94 - * hchacha_block - abbreviated ChaCha core, for XChaCha 95 - * @in: input state matrix (16 32-bit words) 92 + * hchacha_block_generic - abbreviated ChaCha core, for XChaCha 93 + * @state: input state matrix (16 32-bit words) 96 94 * @out: output (8 32-bit words) 97 95 * @nrounds: number of rounds (20 or 12; 20 is recommended) 98 96 * ··· 101 99 * skips the final addition of the initial state, and outputs only certain words 102 100 * of the state. It should not be used for streaming directly. 103 101 */ 104 - void hchacha_block(const u32 *in, u32 *out, int nrounds) 102 + void hchacha_block_generic(const u32 *state, u32 *stream, int nrounds) 105 103 { 106 104 u32 x[16]; 107 105 108 - memcpy(x, in, 64); 106 + memcpy(x, state, 64); 109 107 110 108 chacha_permute(x, nrounds); 111 109 112 - memcpy(&out[0], &x[0], 16); 113 - memcpy(&out[4], &x[12], 16); 110 + memcpy(&stream[0], &x[0], 16); 111 + memcpy(&stream[4], &x[12], 16); 114 112 } 115 - EXPORT_SYMBOL(hchacha_block); 113 + EXPORT_SYMBOL(hchacha_block_generic);
+26
lib/crypto/Kconfig
··· 8 8 config CRYPTO_LIB_ARC4 9 9 tristate 10 10 11 + config CRYPTO_ARCH_HAVE_LIB_CHACHA 12 + tristate 13 + help 14 + Declares whether the architecture provides an arch-specific 15 + accelerated implementation of the ChaCha library interface, 16 + either builtin or as a module. 17 + 18 + config CRYPTO_LIB_CHACHA_GENERIC 19 + tristate 20 + select CRYPTO_ALGAPI 21 + help 22 + This symbol can be depended upon by arch implementations of the 23 + ChaCha library interface that require the generic code as a 24 + fallback, e.g., for SIMD implementations. If no arch specific 25 + implementation is enabled, this implementation serves the users 26 + of CRYPTO_LIB_CHACHA. 27 + 28 + config CRYPTO_LIB_CHACHA 29 + tristate "ChaCha library interface" 30 + depends on CRYPTO_ARCH_HAVE_LIB_CHACHA || !CRYPTO_ARCH_HAVE_LIB_CHACHA 31 + select CRYPTO_LIB_CHACHA_GENERIC if CRYPTO_ARCH_HAVE_LIB_CHACHA=n 32 + help 33 + Enable the ChaCha library interface. This interface may be fulfilled 34 + by either the generic implementation or an arch-specific one, if one 35 + is available and enabled. 36 + 11 37 config CRYPTO_LIB_DES 12 38 tristate 13 39
+4
lib/crypto/Makefile
··· 1 1 # SPDX-License-Identifier: GPL-2.0 2 2 3 + # chacha is used by the /dev/random driver which is always builtin 4 + obj-y += chacha.o 5 + obj-$(CONFIG_CRYPTO_LIB_CHACHA_GENERIC) += libchacha.o 6 + 3 7 obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o 4 8 libaes-y := aes.o 5 9
+35
lib/crypto/libchacha.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-or-later 2 + /* 3 + * The ChaCha stream cipher (RFC7539) 4 + * 5 + * Copyright (C) 2015 Martin Willi 6 + */ 7 + 8 + #include <linux/kernel.h> 9 + #include <linux/export.h> 10 + #include <linux/module.h> 11 + 12 + #include <crypto/algapi.h> // for crypto_xor_cpy 13 + #include <crypto/chacha.h> 14 + 15 + void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src, 16 + unsigned int bytes, int nrounds) 17 + { 18 + /* aligned to potentially speed up crypto_xor() */ 19 + u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long)); 20 + 21 + while (bytes >= CHACHA_BLOCK_SIZE) { 22 + chacha_block_generic(state, stream, nrounds); 23 + crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE); 24 + bytes -= CHACHA_BLOCK_SIZE; 25 + dst += CHACHA_BLOCK_SIZE; 26 + src += CHACHA_BLOCK_SIZE; 27 + } 28 + if (bytes) { 29 + chacha_block_generic(state, stream, nrounds); 30 + crypto_xor_cpy(dst, src, stream, bytes); 31 + } 32 + } 33 + EXPORT_SYMBOL(chacha_crypt_generic); 34 + 35 + MODULE_LICENSE("GPL");