Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

[CRYPTO] skcipher: Create default givcipher instances

This patch makes crypto_alloc_ablkcipher/crypto_grab_skcipher always
return algorithms that are capable of generating their own IVs through
givencrypt and givdecrypt. Each algorithm may specify its default IV
generator through the geniv field.

For algorithms that do not set the geniv field, the blkcipher layer will
pick a default. Currently it's chainiv for synchronous algorithms and
eseqiv for asynchronous algorithms. Note that if these wrappers do not
work on an algorithm then that algorithm must specify its own geniv or
it can't be used at all.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+181 -13
+157 -1
crypto/ablkcipher.c
··· 18 18 #include <linux/init.h> 19 19 #include <linux/kernel.h> 20 20 #include <linux/module.h> 21 + #include <linux/rtnetlink.h> 22 + #include <linux/sched.h> 21 23 #include <linux/slab.h> 22 24 #include <linux/seq_file.h> 23 25 ··· 70 68 return alg->cra_ctxsize; 71 69 } 72 70 71 + int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req) 72 + { 73 + return crypto_ablkcipher_encrypt(&req->creq); 74 + } 75 + 76 + int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req) 77 + { 78 + return crypto_ablkcipher_decrypt(&req->creq); 79 + } 80 + 73 81 static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type, 74 82 u32 mask) 75 83 { ··· 92 80 crt->setkey = setkey; 93 81 crt->encrypt = alg->encrypt; 94 82 crt->decrypt = alg->decrypt; 83 + if (!alg->ivsize) { 84 + crt->givencrypt = skcipher_null_givencrypt; 85 + crt->givdecrypt = skcipher_null_givdecrypt; 86 + } 95 87 crt->base = __crypto_ablkcipher_cast(tfm); 96 88 crt->ivsize = alg->ivsize; 97 89 ··· 179 163 return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv"; 180 164 } 181 165 166 + static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) 167 + { 168 + struct rtattr *tb[3]; 169 + struct { 170 + struct rtattr attr; 171 + struct crypto_attr_type data; 172 + } ptype; 173 + struct { 174 + struct rtattr attr; 175 + struct crypto_attr_alg data; 176 + } palg; 177 + struct crypto_template *tmpl; 178 + struct crypto_instance *inst; 179 + struct crypto_alg *larval; 180 + const char *geniv; 181 + int err; 182 + 183 + larval = crypto_larval_lookup(alg->cra_driver_name, 184 + CRYPTO_ALG_TYPE_GIVCIPHER, 185 + CRYPTO_ALG_TYPE_MASK); 186 + err = PTR_ERR(larval); 187 + if (IS_ERR(larval)) 188 + goto out; 189 + 190 + err = -EAGAIN; 191 + if (!crypto_is_larval(larval)) 192 + goto drop_larval; 193 + 194 + ptype.attr.rta_len = sizeof(ptype); 195 + ptype.attr.rta_type = CRYPTOA_TYPE; 196 + ptype.data.type = type | CRYPTO_ALG_GENIV; 197 + /* GENIV tells the template that we're making a default geniv. */ 198 + ptype.data.mask = mask | CRYPTO_ALG_GENIV; 199 + tb[0] = &ptype.attr; 200 + 201 + palg.attr.rta_len = sizeof(palg); 202 + palg.attr.rta_type = CRYPTOA_ALG; 203 + /* Must use the exact name to locate ourselves. */ 204 + memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME); 205 + tb[1] = &palg.attr; 206 + 207 + tb[2] = NULL; 208 + 209 + if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 210 + CRYPTO_ALG_TYPE_BLKCIPHER) 211 + geniv = alg->cra_blkcipher.geniv; 212 + else 213 + geniv = alg->cra_ablkcipher.geniv; 214 + 215 + if (!geniv) 216 + geniv = crypto_default_geniv(alg); 217 + 218 + tmpl = crypto_lookup_template(geniv); 219 + err = -ENOENT; 220 + if (!tmpl) 221 + goto kill_larval; 222 + 223 + inst = tmpl->alloc(tb); 224 + err = PTR_ERR(inst); 225 + if (IS_ERR(inst)) 226 + goto put_tmpl; 227 + 228 + if ((err = crypto_register_instance(tmpl, inst))) { 229 + tmpl->free(inst); 230 + goto put_tmpl; 231 + } 232 + 233 + /* Redo the lookup to use the instance we just registered. */ 234 + err = -EAGAIN; 235 + 236 + put_tmpl: 237 + crypto_tmpl_put(tmpl); 238 + kill_larval: 239 + crypto_larval_kill(larval); 240 + drop_larval: 241 + crypto_mod_put(larval); 242 + out: 243 + crypto_mod_put(alg); 244 + return err; 245 + } 246 + 247 + static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type, 248 + u32 mask) 249 + { 250 + struct crypto_alg *alg; 251 + 252 + alg = crypto_alg_mod_lookup(name, type, mask); 253 + if (IS_ERR(alg)) 254 + return alg; 255 + 256 + if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 257 + CRYPTO_ALG_TYPE_GIVCIPHER) 258 + return alg; 259 + 260 + if (!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 261 + CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : 262 + alg->cra_ablkcipher.ivsize)) 263 + return alg; 264 + 265 + return ERR_PTR(crypto_givcipher_default(alg, type, mask)); 266 + } 267 + 182 268 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, 183 269 u32 type, u32 mask) 184 270 { ··· 290 172 type = crypto_skcipher_type(type); 291 173 mask = crypto_skcipher_mask(mask); 292 174 293 - alg = crypto_alg_mod_lookup(name, type, mask); 175 + alg = crypto_lookup_skcipher(name, type, mask); 294 176 if (IS_ERR(alg)) 295 177 return PTR_ERR(alg); 296 178 ··· 299 181 return err; 300 182 } 301 183 EXPORT_SYMBOL_GPL(crypto_grab_skcipher); 184 + 185 + struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, 186 + u32 type, u32 mask) 187 + { 188 + struct crypto_tfm *tfm; 189 + int err; 190 + 191 + type = crypto_skcipher_type(type); 192 + mask = crypto_skcipher_mask(mask); 193 + 194 + for (;;) { 195 + struct crypto_alg *alg; 196 + 197 + alg = crypto_lookup_skcipher(alg_name, type, mask); 198 + if (IS_ERR(alg)) { 199 + err = PTR_ERR(alg); 200 + goto err; 201 + } 202 + 203 + tfm = __crypto_alloc_tfm(alg, type, mask); 204 + if (!IS_ERR(tfm)) 205 + return __crypto_ablkcipher_cast(tfm); 206 + 207 + crypto_mod_put(alg); 208 + err = PTR_ERR(tfm); 209 + 210 + err: 211 + if (err != -EAGAIN) 212 + break; 213 + if (signal_pending(current)) { 214 + err = -EINTR; 215 + break; 216 + } 217 + } 218 + 219 + return ERR_PTR(err); 220 + } 221 + EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); 302 222 303 223 MODULE_LICENSE("GPL"); 304 224 MODULE_DESCRIPTION("Asynchronous block chaining cipher type");
+14 -5
crypto/api.c
··· 137 137 return alg; 138 138 } 139 139 140 - static void crypto_larval_kill(struct crypto_alg *alg) 140 + void crypto_larval_kill(struct crypto_alg *alg) 141 141 { 142 142 struct crypto_larval *larval = (void *)alg; 143 143 ··· 147 147 complete_all(&larval->completion); 148 148 crypto_alg_put(alg); 149 149 } 150 + EXPORT_SYMBOL_GPL(crypto_larval_kill); 150 151 151 152 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) 152 153 { ··· 177 176 return alg; 178 177 } 179 178 180 - struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 179 + struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) 181 180 { 182 181 struct crypto_alg *alg; 183 - struct crypto_alg *larval; 184 - int ok; 185 182 186 183 if (!name) 187 184 return ERR_PTR(-ENOENT); ··· 192 193 if (alg) 193 194 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; 194 195 195 - larval = crypto_larval_alloc(name, type, mask); 196 + return crypto_larval_alloc(name, type, mask); 197 + } 198 + EXPORT_SYMBOL_GPL(crypto_larval_lookup); 199 + 200 + struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 201 + { 202 + struct crypto_alg *alg; 203 + struct crypto_alg *larval; 204 + int ok; 205 + 206 + larval = crypto_larval_lookup(name, type, mask); 196 207 if (IS_ERR(larval) || !crypto_is_larval(larval)) 197 208 return larval; 198 209
+4
crypto/blkcipher.c
··· 450 450 crt->setkey = async_setkey; 451 451 crt->encrypt = async_encrypt; 452 452 crt->decrypt = async_decrypt; 453 + if (!alg->ivsize) { 454 + crt->givencrypt = skcipher_null_givencrypt; 455 + crt->givdecrypt = skcipher_null_givdecrypt; 456 + } 453 457 crt->base = __crypto_ablkcipher_cast(tfm); 454 458 crt->ivsize = alg->ivsize; 455 459
+2
crypto/internal.h
··· 93 93 void crypto_exit_cipher_ops(struct crypto_tfm *tfm); 94 94 void crypto_exit_compress_ops(struct crypto_tfm *tfm); 95 95 96 + void crypto_larval_kill(struct crypto_alg *alg); 97 + struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask); 96 98 void crypto_larval_error(const char *name, u32 type, u32 mask); 97 99 98 100 void crypto_shoot_alg(struct crypto_alg *alg);
+2
include/crypto/internal/skcipher.h
··· 53 53 crypto_skcipher_mask(0))); 54 54 } 55 55 56 + int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req); 57 + int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req); 56 58 const char *crypto_default_geniv(const struct crypto_alg *alg); 57 59 58 60 struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
+2 -7
include/linux/crypto.h
··· 561 561 return mask; 562 562 } 563 563 564 - static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher( 565 - const char *alg_name, u32 type, u32 mask) 566 - { 567 - return __crypto_ablkcipher_cast( 568 - crypto_alloc_base(alg_name, crypto_skcipher_type(type), 569 - crypto_skcipher_mask(mask))); 570 - } 564 + struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, 565 + u32 type, u32 mask); 571 566 572 567 static inline struct crypto_tfm *crypto_ablkcipher_tfm( 573 568 struct crypto_ablkcipher *tfm)