Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: xcbc - Fix shash conversion

Although xcbc was converted to shash, it didn't obey the new
requirement that all hash state must be stored in the descriptor
rather than the transform.

This patch fixes this issue and also optimises away the rekeying
by precomputing K2 and K3 within setkey.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+77 -95
+77 -95
crypto/xcbc.c
··· 26 26 static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, 27 27 0x02020202, 0x02020202, 0x02020202, 0x02020202, 28 28 0x03030303, 0x03030303, 0x03030303, 0x03030303}; 29 + 29 30 /* 30 31 * +------------------------ 31 32 * | <parent tfm> 32 33 * +------------------------ 33 - * | crypto_xcbc_ctx 34 + * | xcbc_tfm_ctx 35 + * +------------------------ 36 + * | consts (block size * 2) 37 + * +------------------------ 38 + */ 39 + struct xcbc_tfm_ctx { 40 + struct crypto_cipher *child; 41 + u8 ctx[]; 42 + }; 43 + 44 + /* 45 + * +------------------------ 46 + * | <shash desc> 47 + * +------------------------ 48 + * | xcbc_desc_ctx 34 49 * +------------------------ 35 50 * | odds (block size) 36 51 * +------------------------ 37 52 * | prev (block size) 38 53 * +------------------------ 39 - * | key (block size) 40 - * +------------------------ 41 - * | consts (block size * 3) 42 - * +------------------------ 43 54 */ 44 - struct crypto_xcbc_ctx { 45 - struct crypto_cipher *child; 46 - u8 *odds; 47 - u8 *prev; 48 - u8 *key; 49 - u8 *consts; 50 - unsigned int keylen; 55 + struct xcbc_desc_ctx { 51 56 unsigned int len; 57 + u8 ctx[]; 52 58 }; 53 - 54 - static int _crypto_xcbc_digest_setkey(struct crypto_shash *parent, 55 - struct crypto_xcbc_ctx *ctx) 56 - { 57 - int bs = crypto_shash_blocksize(parent); 58 - int err = 0; 59 - u8 key1[bs]; 60 - 61 - if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen))) 62 - return err; 63 - 64 - crypto_cipher_encrypt_one(ctx->child, key1, ctx->consts); 65 - 66 - return crypto_cipher_setkey(ctx->child, key1, bs); 67 - } 68 59 69 60 static int crypto_xcbc_digest_setkey(struct crypto_shash *parent, 70 61 const u8 *inkey, unsigned int keylen) 71 62 { 72 - struct crypto_xcbc_ctx *ctx = crypto_shash_ctx(parent); 63 + unsigned long alignmask = crypto_shash_alignmask(parent); 64 + struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent); 65 + int bs = crypto_shash_blocksize(parent); 66 + u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); 67 + int err = 0; 68 + u8 key1[bs]; 73 69 74 - if (keylen != crypto_cipher_blocksize(ctx->child)) 75 - return -EINVAL; 70 + if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen))) 71 + return err; 76 72 77 - ctx->keylen = keylen; 78 - memcpy(ctx->key, inkey, keylen); 79 - ctx->consts = (u8*)ks; 73 + crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs); 74 + crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2); 75 + crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks); 80 76 81 - return _crypto_xcbc_digest_setkey(parent, ctx); 77 + return crypto_cipher_setkey(ctx->child, key1, bs); 78 + 82 79 } 83 80 84 81 static int crypto_xcbc_digest_init(struct shash_desc *pdesc) 85 82 { 86 - struct crypto_xcbc_ctx *ctx = crypto_shash_ctx(pdesc->tfm); 83 + unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); 84 + struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); 87 85 int bs = crypto_shash_blocksize(pdesc->tfm); 86 + u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs; 88 87 89 88 ctx->len = 0; 90 - memset(ctx->odds, 0, bs); 91 - memset(ctx->prev, 0, bs); 89 + memset(prev, 0, bs); 92 90 93 91 return 0; 94 92 } ··· 95 97 unsigned int len) 96 98 { 97 99 struct crypto_shash *parent = pdesc->tfm; 98 - struct crypto_xcbc_ctx *ctx = crypto_shash_ctx(parent); 99 - struct crypto_cipher *tfm = ctx->child; 100 + unsigned long alignmask = crypto_shash_alignmask(parent); 101 + struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); 102 + struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); 103 + struct crypto_cipher *tfm = tctx->child; 100 104 int bs = crypto_shash_blocksize(parent); 105 + u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); 106 + u8 *prev = odds + bs; 101 107 102 108 /* checking the data can fill the block */ 103 109 if ((ctx->len + len) <= bs) { 104 - memcpy(ctx->odds + ctx->len, p, len); 110 + memcpy(odds + ctx->len, p, len); 105 111 ctx->len += len; 106 112 return 0; 107 113 } 108 114 109 115 /* filling odds with new data and encrypting it */ 110 - memcpy(ctx->odds + ctx->len, p, bs - ctx->len); 116 + memcpy(odds + ctx->len, p, bs - ctx->len); 111 117 len -= bs - ctx->len; 112 118 p += bs - ctx->len; 113 119 114 - crypto_xor(ctx->prev, ctx->odds, bs); 115 - crypto_cipher_encrypt_one(tfm, ctx->prev, ctx->prev); 120 + crypto_xor(prev, odds, bs); 121 + crypto_cipher_encrypt_one(tfm, prev, prev); 116 122 117 123 /* clearing the length */ 118 124 ctx->len = 0; 119 125 120 126 /* encrypting the rest of data */ 121 127 while (len > bs) { 122 - crypto_xor(ctx->prev, p, bs); 123 - crypto_cipher_encrypt_one(tfm, ctx->prev, ctx->prev); 128 + crypto_xor(prev, p, bs); 129 + crypto_cipher_encrypt_one(tfm, prev, prev); 124 130 p += bs; 125 131 len -= bs; 126 132 } 127 133 128 134 /* keeping the surplus of blocksize */ 129 135 if (len) { 130 - memcpy(ctx->odds, p, len); 136 + memcpy(odds, p, len); 131 137 ctx->len = len; 132 138 } 133 139 ··· 141 139 static int crypto_xcbc_digest_final(struct shash_desc *pdesc, u8 *out) 142 140 { 143 141 struct crypto_shash *parent = pdesc->tfm; 144 - struct crypto_xcbc_ctx *ctx = crypto_shash_ctx(parent); 145 - struct crypto_cipher *tfm = ctx->child; 142 + unsigned long alignmask = crypto_shash_alignmask(parent); 143 + struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); 144 + struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); 145 + struct crypto_cipher *tfm = tctx->child; 146 146 int bs = crypto_shash_blocksize(parent); 147 - int err = 0; 147 + u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1); 148 + u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); 149 + u8 *prev = odds + bs; 150 + unsigned int offset = 0; 148 151 149 - if (ctx->len == bs) { 150 - u8 key2[bs]; 151 - 152 - if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) 153 - return err; 154 - 155 - crypto_cipher_encrypt_one(tfm, key2, 156 - (u8 *)(ctx->consts + bs)); 157 - 158 - crypto_xor(ctx->prev, ctx->odds, bs); 159 - crypto_xor(ctx->prev, key2, bs); 160 - _crypto_xcbc_digest_setkey(parent, ctx); 161 - 162 - crypto_cipher_encrypt_one(tfm, out, ctx->prev); 163 - } else { 164 - u8 key3[bs]; 152 + if (ctx->len != bs) { 165 153 unsigned int rlen; 166 - u8 *p = ctx->odds + ctx->len; 154 + u8 *p = odds + ctx->len; 155 + 167 156 *p = 0x80; 168 157 p++; 169 158 ··· 162 169 if (rlen) 163 170 memset(p, 0, rlen); 164 171 165 - if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) 166 - return err; 167 - 168 - crypto_cipher_encrypt_one(tfm, key3, 169 - (u8 *)(ctx->consts + bs * 2)); 170 - 171 - crypto_xor(ctx->prev, ctx->odds, bs); 172 - crypto_xor(ctx->prev, key3, bs); 173 - 174 - _crypto_xcbc_digest_setkey(parent, ctx); 175 - 176 - crypto_cipher_encrypt_one(tfm, out, ctx->prev); 172 + offset += bs; 177 173 } 174 + 175 + crypto_xor(prev, odds, bs); 176 + crypto_xor(prev, consts + offset, bs); 177 + 178 + crypto_cipher_encrypt_one(tfm, out, prev); 178 179 179 180 return 0; 180 181 } ··· 178 191 struct crypto_cipher *cipher; 179 192 struct crypto_instance *inst = (void *)tfm->__crt_alg; 180 193 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 181 - struct crypto_xcbc_ctx *ctx = crypto_tfm_ctx(tfm); 182 - int bs = crypto_tfm_alg_blocksize(tfm); 194 + struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 183 195 184 196 cipher = crypto_spawn_cipher(spawn); 185 197 if (IS_ERR(cipher)) 186 198 return PTR_ERR(cipher); 187 199 188 - switch(bs) { 189 - case 16: 190 - break; 191 - default: 192 - return -EINVAL; 193 - } 194 - 195 200 ctx->child = cipher; 196 - ctx->odds = (u8*)(ctx+1); 197 - ctx->prev = ctx->odds + bs; 198 - ctx->key = ctx->prev + bs; 199 201 200 202 return 0; 201 203 }; 202 204 203 205 static void xcbc_exit_tfm(struct crypto_tfm *tfm) 204 206 { 205 - struct crypto_xcbc_ctx *ctx = crypto_tfm_ctx(tfm); 207 + struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 206 208 crypto_free_cipher(ctx->child); 207 209 } 208 210 ··· 230 254 231 255 inst->alg.base.cra_priority = alg->cra_priority; 232 256 inst->alg.base.cra_blocksize = alg->cra_blocksize; 233 - inst->alg.base.cra_alignmask = alg->cra_alignmask; 257 + inst->alg.base.cra_alignmask = alg->cra_alignmask | 3; 234 258 235 259 inst->alg.digestsize = alg->cra_blocksize; 236 - inst->alg.base.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) + 237 - ALIGN(alg->cra_blocksize * 3, 238 - sizeof(void *)); 260 + inst->alg.descsize = ALIGN(sizeof(struct xcbc_desc_ctx), 261 + crypto_tfm_ctx_alignment()) + 262 + (alg->cra_alignmask & 263 + ~(crypto_tfm_ctx_alignment() - 1)) + 264 + alg->cra_blocksize * 2; 265 + 266 + inst->alg.base.cra_ctxsize = ALIGN(sizeof(struct xcbc_tfm_ctx), 267 + alg->cra_alignmask) + 268 + alg->cra_blocksize * 2; 239 269 inst->alg.base.cra_init = xcbc_init_tfm; 240 270 inst->alg.base.cra_exit = xcbc_exit_tfm; 241 271