Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

[CRYPTO] seqiv: Add AEAD support

This patch adds support for using seqiv with AEAD algorithms. This is
useful for those AEAD algorithms that performs authentication before
encryption because the IV generated by the underlying encryption algorithm
won't be available for authentication.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+175 -16
+175 -16
crypto/seqiv.c
··· 13 13 * 14 14 */ 15 15 16 + #include <crypto/internal/aead.h> 16 17 #include <crypto/internal/skcipher.h> 17 18 #include <linux/err.h> 18 19 #include <linux/init.h> ··· 54 53 skcipher_givcrypt_complete(req, err); 55 54 } 56 55 56 + static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err) 57 + { 58 + struct aead_request *subreq = aead_givcrypt_reqctx(req); 59 + struct crypto_aead *geniv; 60 + 61 + if (err == -EINPROGRESS) 62 + return; 63 + 64 + if (err) 65 + goto out; 66 + 67 + geniv = aead_givcrypt_reqtfm(req); 68 + memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv)); 69 + 70 + out: 71 + kfree(subreq->iv); 72 + } 73 + 74 + static void seqiv_aead_complete(struct crypto_async_request *base, int err) 75 + { 76 + struct aead_givcrypt_request *req = base->data; 77 + 78 + seqiv_aead_complete2(req, err); 79 + aead_givcrypt_complete(req, err); 80 + } 81 + 82 + static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq, 83 + unsigned int ivsize) 84 + { 85 + unsigned int len = ivsize; 86 + 87 + if (ivsize > sizeof(u64)) { 88 + memset(info, 0, ivsize - sizeof(u64)); 89 + len = sizeof(u64); 90 + } 91 + seq = cpu_to_be64(seq); 92 + memcpy(info + ivsize - len, &seq, len); 93 + crypto_xor(info, ctx->salt, ivsize); 94 + } 95 + 57 96 static int seqiv_givencrypt(struct skcipher_givcrypt_request *req) 58 97 { 59 98 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); ··· 102 61 crypto_completion_t complete; 103 62 void *data; 104 63 u8 *info; 105 - __be64 seq; 106 64 unsigned int ivsize; 107 - unsigned int len; 108 65 int err; 109 66 110 67 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); ··· 130 91 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, 131 92 req->creq.nbytes, info); 132 93 133 - len = ivsize; 134 - if (ivsize > sizeof(u64)) { 135 - memset(info, 0, ivsize - sizeof(u64)); 136 - len = sizeof(u64); 137 - } 138 - seq = cpu_to_be64(req->seq); 139 - memcpy(info + ivsize - len, &seq, len); 140 - crypto_xor(info, ctx->salt, ivsize); 141 - 94 + seqiv_geniv(ctx, info, req->seq, ivsize); 142 95 memcpy(req->giv, info, ivsize); 143 96 144 97 err = crypto_ablkcipher_encrypt(subreq); 145 98 if (unlikely(info != req->creq.info)) 146 99 seqiv_complete2(req, err); 100 + return err; 101 + } 102 + 103 + static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req) 104 + { 105 + struct crypto_aead *geniv = aead_givcrypt_reqtfm(req); 106 + struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); 107 + struct aead_request *areq = &req->areq; 108 + struct aead_request *subreq = aead_givcrypt_reqctx(req); 109 + crypto_completion_t complete; 110 + void *data; 111 + u8 *info; 112 + unsigned int ivsize; 113 + int err; 114 + 115 + aead_request_set_tfm(subreq, aead_geniv_base(geniv)); 116 + 117 + complete = areq->base.complete; 118 + data = areq->base.data; 119 + info = areq->iv; 120 + 121 + ivsize = crypto_aead_ivsize(geniv); 122 + 123 + if (unlikely(!IS_ALIGNED((unsigned long)info, 124 + crypto_aead_alignmask(geniv) + 1))) { 125 + info = kmalloc(ivsize, areq->base.flags & 126 + CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL: 127 + GFP_ATOMIC); 128 + if (!info) 129 + return -ENOMEM; 130 + 131 + complete = seqiv_aead_complete; 132 + data = req; 133 + } 134 + 135 + aead_request_set_callback(subreq, areq->base.flags, complete, data); 136 + aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen, 137 + info); 138 + aead_request_set_assoc(subreq, areq->assoc, areq->assoclen); 139 + 140 + seqiv_geniv(ctx, info, req->seq, ivsize); 141 + memcpy(req->giv, info, ivsize); 142 + 143 + err = crypto_aead_encrypt(subreq); 144 + if (unlikely(info != areq->iv)) 145 + seqiv_aead_complete2(req, err); 147 146 return err; 148 147 } 149 148 ··· 203 126 return seqiv_givencrypt(req); 204 127 } 205 128 129 + static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req) 130 + { 131 + struct crypto_aead *geniv = aead_givcrypt_reqtfm(req); 132 + struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); 133 + 134 + spin_lock_bh(&ctx->lock); 135 + if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first) 136 + goto unlock; 137 + 138 + crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt; 139 + get_random_bytes(ctx->salt, crypto_aead_ivsize(geniv)); 140 + 141 + unlock: 142 + spin_unlock_bh(&ctx->lock); 143 + 144 + return seqiv_aead_givencrypt(req); 145 + } 146 + 206 147 static int seqiv_init(struct crypto_tfm *tfm) 207 148 { 208 149 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); ··· 233 138 return skcipher_geniv_init(tfm); 234 139 } 235 140 141 + static int seqiv_aead_init(struct crypto_tfm *tfm) 142 + { 143 + struct crypto_aead *geniv = __crypto_aead_cast(tfm); 144 + struct seqiv_ctx *ctx = crypto_aead_ctx(geniv); 145 + 146 + spin_lock_init(&ctx->lock); 147 + 148 + tfm->crt_aead.reqsize = sizeof(struct aead_request); 149 + 150 + return aead_geniv_init(tfm); 151 + } 152 + 236 153 static struct crypto_template seqiv_tmpl; 237 154 238 - static struct crypto_instance *seqiv_alloc(struct rtattr **tb) 155 + static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb) 239 156 { 240 157 struct crypto_instance *inst; 241 158 242 159 inst = skcipher_geniv_alloc(&seqiv_tmpl, tb, 0, 0); 160 + 243 161 if (IS_ERR(inst)) 244 162 goto out; 245 163 ··· 261 153 inst->alg.cra_init = seqiv_init; 262 154 inst->alg.cra_exit = skcipher_geniv_exit; 263 155 264 - inst->alg.cra_alignmask |= __alignof__(u32) - 1; 265 - 266 - inst->alg.cra_ctxsize = sizeof(struct seqiv_ctx); 267 156 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; 268 157 269 158 out: 270 159 return inst; 271 160 } 272 161 162 + static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb) 163 + { 164 + struct crypto_instance *inst; 165 + 166 + inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0); 167 + 168 + if (IS_ERR(inst)) 169 + goto out; 170 + 171 + inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first; 172 + 173 + inst->alg.cra_init = seqiv_aead_init; 174 + inst->alg.cra_exit = aead_geniv_exit; 175 + 176 + inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize; 177 + 178 + out: 179 + return inst; 180 + } 181 + 182 + static struct crypto_instance *seqiv_alloc(struct rtattr **tb) 183 + { 184 + struct crypto_attr_type *algt; 185 + struct crypto_instance *inst; 186 + int err; 187 + 188 + algt = crypto_get_attr_type(tb); 189 + err = PTR_ERR(algt); 190 + if (IS_ERR(algt)) 191 + return ERR_PTR(err); 192 + 193 + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) 194 + inst = seqiv_ablkcipher_alloc(tb); 195 + else 196 + inst = seqiv_aead_alloc(tb); 197 + 198 + if (IS_ERR(inst)) 199 + goto out; 200 + 201 + inst->alg.cra_alignmask |= __alignof__(u32) - 1; 202 + inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx); 203 + 204 + out: 205 + return inst; 206 + } 207 + 208 + static void seqiv_free(struct crypto_instance *inst) 209 + { 210 + if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) 211 + skcipher_geniv_free(inst); 212 + else 213 + aead_geniv_free(inst); 214 + } 215 + 273 216 static struct crypto_template seqiv_tmpl = { 274 217 .name = "seqiv", 275 218 .alloc = seqiv_alloc, 276 - .free = skcipher_geniv_free, 219 + .free = seqiv_free, 277 220 .module = THIS_MODULE, 278 221 }; 279 222