Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: arm64/sm4-gcm - Fix possible crash in GCM cryption

An often overlooked aspect of the skcipher walker API is that an
error is not just indicated by a non-zero return value, but by the
fact that walk->nbytes is zero.

Thus it is an error to call skcipher_walk_done after getting back
walk->nbytes == 0 from the previous interaction with the walker.

This is because when walk->nbytes is zero the walker is left in
an undefined state and any further calls to it may try to free
uninitialised stack memory.

The sm4 arm64 ccm code gets this wrong and ends up calling
skcipher_walk_done even when walk->nbytes is zero.

This patch rewrites the loop in a form that resembles other callers.

Reported-by: Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
Fixes: ae1b83c7d572 ("crypto: arm64/sm4 - add CE implementation for GCM mode")
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Tested-by: Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+25 -26
+25 -26
arch/arm64/crypto/sm4-ce-gcm-glue.c
··· 135 135 } 136 136 137 137 static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk, 138 - struct sm4_gcm_ctx *ctx, u8 ghash[], 138 + u8 ghash[], int err, 139 139 void (*sm4_ce_pmull_gcm_crypt)(const u32 *rkey_enc, 140 140 u8 *dst, const u8 *src, u8 *iv, 141 141 unsigned int nbytes, u8 *ghash, 142 142 const u8 *ghash_table, const u8 *lengths)) 143 143 { 144 + struct crypto_aead *aead = crypto_aead_reqtfm(req); 145 + struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead); 144 146 u8 __aligned(8) iv[SM4_BLOCK_SIZE]; 145 147 be128 __aligned(8) lengths; 146 - int err; 147 148 148 149 memset(ghash, 0, SM4_BLOCK_SIZE); 149 150 150 151 lengths.a = cpu_to_be64(req->assoclen * 8); 151 152 lengths.b = cpu_to_be64(walk->total * 8); 152 153 153 - memcpy(iv, walk->iv, GCM_IV_SIZE); 154 + memcpy(iv, req->iv, GCM_IV_SIZE); 154 155 put_unaligned_be32(2, iv + GCM_IV_SIZE); 155 156 156 157 kernel_neon_begin(); ··· 159 158 if (req->assoclen) 160 159 gcm_calculate_auth_mac(req, ghash); 161 160 162 - do { 161 + while (walk->nbytes) { 163 162 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE; 164 163 const u8 *src = walk->src.virt.addr; 165 164 u8 *dst = walk->dst.virt.addr; 166 165 167 166 if (walk->nbytes == walk->total) { 168 - tail = 0; 169 - 170 167 sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv, 171 168 walk->nbytes, ghash, 172 169 ctx->ghash_table, 173 170 (const u8 *)&lengths); 174 - } else if (walk->nbytes - tail) { 175 - sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv, 176 - walk->nbytes - tail, ghash, 177 - ctx->ghash_table, NULL); 171 + 172 + kernel_neon_end(); 173 + 174 + return skcipher_walk_done(walk, 0); 178 175 } 176 + 177 + sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv, 178 + walk->nbytes - tail, ghash, 179 + ctx->ghash_table, NULL); 179 180 180 181 kernel_neon_end(); 181 182 182 183 err = skcipher_walk_done(walk, tail); 183 - if (err) 184 - return err; 185 - if (walk->nbytes) 186 - kernel_neon_begin(); 187 - } while (walk->nbytes > 0); 188 184 189 - return 0; 185 + kernel_neon_begin(); 186 + } 187 + 188 + sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, NULL, NULL, iv, 189 + walk->nbytes, ghash, ctx->ghash_table, 190 + (const u8 *)&lengths); 191 + 192 + kernel_neon_end(); 193 + 194 + return err; 190 195 } 191 196 192 197 static int gcm_encrypt(struct aead_request *req) 193 198 { 194 199 struct crypto_aead *aead = crypto_aead_reqtfm(req); 195 - struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead); 196 200 u8 __aligned(8) ghash[SM4_BLOCK_SIZE]; 197 201 struct skcipher_walk walk; 198 202 int err; 199 203 200 204 err = skcipher_walk_aead_encrypt(&walk, req, false); 201 - if (err) 202 - return err; 203 - 204 - err = gcm_crypt(req, &walk, ctx, ghash, sm4_ce_pmull_gcm_enc); 205 + err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_enc); 205 206 if (err) 206 207 return err; 207 208 ··· 218 215 { 219 216 struct crypto_aead *aead = crypto_aead_reqtfm(req); 220 217 unsigned int authsize = crypto_aead_authsize(aead); 221 - struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead); 222 218 u8 __aligned(8) ghash[SM4_BLOCK_SIZE]; 223 219 u8 authtag[SM4_BLOCK_SIZE]; 224 220 struct skcipher_walk walk; 225 221 int err; 226 222 227 223 err = skcipher_walk_aead_decrypt(&walk, req, false); 228 - if (err) 229 - return err; 230 - 231 - err = gcm_crypt(req, &walk, ctx, ghash, sm4_ce_pmull_gcm_dec); 224 + err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_dec); 232 225 if (err) 233 226 return err; 234 227