Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: ccp - Remove VLA usage of skcipher

In the quest to remove all stack VLA usage from the kernel[1], this
replaces struct crypto_skcipher and SKCIPHER_REQUEST_ON_STACK() usage
with struct crypto_sync_skcipher and SYNC_SKCIPHER_REQUEST_ON_STACK(),
which uses a fixed stack size.

[1] https://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com

Cc: Tom Lendacky <thomas.lendacky@amd.com>
Cc: Gary Hook <gary.hook@amd.com>
Signed-off-by: Kees Cook <keescook@chromium.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Kees Cook and committed by
Herbert Xu
7f28615d d2841f22

+8 -7
+7 -6
drivers/crypto/ccp/ccp-crypto-aes-xts.c
··· 102 102 ctx->u.aes.key_len = key_len / 2; 103 103 sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len); 104 104 105 - return crypto_skcipher_setkey(ctx->u.aes.tfm_skcipher, key, key_len); 105 + return crypto_sync_skcipher_setkey(ctx->u.aes.tfm_skcipher, key, key_len); 106 106 } 107 107 108 108 static int ccp_aes_xts_crypt(struct ablkcipher_request *req, ··· 151 151 (ctx->u.aes.key_len != AES_KEYSIZE_256)) 152 152 fallback = 1; 153 153 if (fallback) { 154 - SKCIPHER_REQUEST_ON_STACK(subreq, ctx->u.aes.tfm_skcipher); 154 + SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, 155 + ctx->u.aes.tfm_skcipher); 155 156 156 157 /* Use the fallback to process the request for any 157 158 * unsupported unit sizes or key sizes 158 159 */ 159 - skcipher_request_set_tfm(subreq, ctx->u.aes.tfm_skcipher); 160 + skcipher_request_set_sync_tfm(subreq, ctx->u.aes.tfm_skcipher); 160 161 skcipher_request_set_callback(subreq, req->base.flags, 161 162 NULL, NULL); 162 163 skcipher_request_set_crypt(subreq, req->src, req->dst, ··· 204 203 static int ccp_aes_xts_cra_init(struct crypto_tfm *tfm) 205 204 { 206 205 struct ccp_ctx *ctx = crypto_tfm_ctx(tfm); 207 - struct crypto_skcipher *fallback_tfm; 206 + struct crypto_sync_skcipher *fallback_tfm; 208 207 209 208 ctx->complete = ccp_aes_xts_complete; 210 209 ctx->u.aes.key_len = 0; 211 210 212 - fallback_tfm = crypto_alloc_skcipher("xts(aes)", 0, 211 + fallback_tfm = crypto_alloc_sync_skcipher("xts(aes)", 0, 213 212 CRYPTO_ALG_ASYNC | 214 213 CRYPTO_ALG_NEED_FALLBACK); 215 214 if (IS_ERR(fallback_tfm)) { ··· 227 226 { 228 227 struct ccp_ctx *ctx = crypto_tfm_ctx(tfm); 229 228 230 - crypto_free_skcipher(ctx->u.aes.tfm_skcipher); 229 + crypto_free_sync_skcipher(ctx->u.aes.tfm_skcipher); 231 230 } 232 231 233 232 static int ccp_register_aes_xts_alg(struct list_head *head,
+1 -1
drivers/crypto/ccp/ccp-crypto.h
··· 88 88 /***** AES related defines *****/ 89 89 struct ccp_aes_ctx { 90 90 /* Fallback cipher for XTS with unsupported unit sizes */ 91 - struct crypto_skcipher *tfm_skcipher; 91 + struct crypto_sync_skcipher *tfm_skcipher; 92 92 93 93 /* Cipher used to generate CMAC K1/K2 keys */ 94 94 struct crypto_cipher *tfm_cipher;