Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: gcm - Use GCM IV size constant

This patch replace GCM IV size value by their constant name.

Signed-off-by: Corentin Labbe <clabbe.montjoie@gmail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Corentin LABBE and committed by
Herbert Xu
e0ab7e9c cb3f3817

+12 -11
+12 -11
crypto/gcm.c
··· 14 14 #include <crypto/internal/hash.h> 15 15 #include <crypto/null.h> 16 16 #include <crypto/scatterwalk.h> 17 + #include <crypto/gcm.h> 17 18 #include <crypto/hash.h> 18 19 #include "internal.h" 19 20 #include <linux/completion.h> ··· 198 197 struct scatterlist *sg; 199 198 200 199 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag)); 201 - memcpy(pctx->iv, req->iv, 12); 202 - memcpy(pctx->iv + 12, &counter, 4); 200 + memcpy(pctx->iv, req->iv, GCM_AES_IV_SIZE); 201 + memcpy(pctx->iv + GCM_AES_IV_SIZE, &counter, 4); 203 202 204 203 sg_init_table(pctx->src, 3); 205 204 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag)); ··· 696 695 inst->alg.base.cra_alignmask = ghash->base.cra_alignmask | 697 696 ctr->base.cra_alignmask; 698 697 inst->alg.base.cra_ctxsize = sizeof(struct crypto_gcm_ctx); 699 - inst->alg.ivsize = 12; 698 + inst->alg.ivsize = GCM_AES_IV_SIZE; 700 699 inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr); 701 700 inst->alg.maxauthsize = 16; 702 701 inst->alg.init = crypto_gcm_init_tfm; ··· 833 832 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), 834 833 crypto_aead_alignmask(child) + 1); 835 834 836 - scatterwalk_map_and_copy(iv + 12, req->src, 0, req->assoclen - 8, 0); 835 + scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0); 837 836 838 837 memcpy(iv, ctx->nonce, 4); 839 838 memcpy(iv + 4, req->iv, 8); 840 839 841 840 sg_init_table(rctx->src, 3); 842 - sg_set_buf(rctx->src, iv + 12, req->assoclen - 8); 841 + sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8); 843 842 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); 844 843 if (sg != rctx->src + 1) 845 844 sg_chain(rctx->src, 2, sg); 846 845 847 846 if (req->src != req->dst) { 848 847 sg_init_table(rctx->dst, 3); 849 - sg_set_buf(rctx->dst, iv + 12, req->assoclen - 8); 848 + sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8); 850 849 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); 851 850 if (sg != rctx->dst + 1) 852 851 sg_chain(rctx->dst, 2, sg); ··· 958 957 err = -EINVAL; 959 958 960 959 /* Underlying IV size must be 12. */ 961 - if (crypto_aead_alg_ivsize(alg) != 12) 960 + if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE) 962 961 goto out_drop_alg; 963 962 964 963 /* Not a stream cipher? */ ··· 981 980 982 981 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx); 983 982 984 - inst->alg.ivsize = 8; 983 + inst->alg.ivsize = GCM_RFC4106_IV_SIZE; 985 984 inst->alg.chunksize = crypto_aead_alg_chunksize(alg); 986 985 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); 987 986 ··· 1135 1134 tfm, 1136 1135 sizeof(struct crypto_rfc4543_req_ctx) + 1137 1136 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + 1138 - align + 12); 1137 + align + GCM_AES_IV_SIZE); 1139 1138 1140 1139 return 0; 1141 1140 ··· 1200 1199 err = -EINVAL; 1201 1200 1202 1201 /* Underlying IV size must be 12. */ 1203 - if (crypto_aead_alg_ivsize(alg) != 12) 1202 + if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE) 1204 1203 goto out_drop_alg; 1205 1204 1206 1205 /* Not a stream cipher? */ ··· 1223 1222 1224 1223 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx); 1225 1224 1226 - inst->alg.ivsize = 8; 1225 + inst->alg.ivsize = GCM_RFC4543_IV_SIZE; 1227 1226 inst->alg.chunksize = crypto_aead_alg_chunksize(alg); 1228 1227 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); 1229 1228