crypto: vmx - Use skcipher for cbc fallback

Cc: stable@vger.kernel.org #4.10
Signed-off-by: Paulo Flabiano Smorigo <pfsmorigo@linux.vnet.ibm.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by Paulo Flabiano Smorigo and committed by Herbert Xu c96d0a1c 1c68bb0f

+24 -23
+24 -23
drivers/crypto/vmx/aes_cbc.c
··· 27 27 #include <asm/switch_to.h> 28 28 #include <crypto/aes.h> 29 29 #include <crypto/scatterwalk.h> 30 + #include <crypto/skcipher.h> 30 31 31 32 #include "aesp8-ppc.h" 32 33 33 34 struct p8_aes_cbc_ctx { 34 - struct crypto_blkcipher *fallback; 35 + struct crypto_skcipher *fallback; 35 36 struct aes_key enc_key; 36 37 struct aes_key dec_key; 37 38 }; ··· 40 39 static int p8_aes_cbc_init(struct crypto_tfm *tfm) 41 40 { 42 41 const char *alg; 43 - struct crypto_blkcipher *fallback; 42 + struct crypto_skcipher *fallback; 44 43 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 45 44 46 45 if (!(alg = crypto_tfm_alg_name(tfm))) { ··· 48 47 return -ENOENT; 49 48 } 50 49 51 - fallback = 52 - crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 50 + fallback = crypto_alloc_skcipher(alg, 0, 51 + CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); 52 + 53 53 if (IS_ERR(fallback)) { 54 54 printk(KERN_ERR 55 55 "Failed to allocate transformation for '%s': %ld\n", ··· 58 56 return PTR_ERR(fallback); 59 57 } 60 58 printk(KERN_INFO "Using '%s' as fallback implementation.\n", 61 - crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 59 + crypto_skcipher_driver_name(fallback)); 62 60 63 - crypto_blkcipher_set_flags( 61 + 62 + crypto_skcipher_set_flags( 64 63 fallback, 65 - crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); 64 + crypto_skcipher_get_flags((struct crypto_skcipher *)tfm)); 66 65 ctx->fallback = fallback; 67 66 68 67 return 0; ··· 74 71 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 75 72 76 73 if (ctx->fallback) { 77 - crypto_free_blkcipher(ctx->fallback); 74 + crypto_free_skcipher(ctx->fallback); 78 75 ctx->fallback = NULL; 79 76 } 80 77 } ··· 94 91 pagefault_enable(); 95 92 preempt_enable(); 96 93 97 - ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 94 + ret += crypto_skcipher_setkey(ctx->fallback, key, keylen); 98 95 return ret; 99 96 } 100 97 ··· 106 103 struct blkcipher_walk walk; 107 104 struct p8_aes_cbc_ctx *ctx = 108 105 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 109 - struct blkcipher_desc fallback_desc = { 110 - .tfm = ctx->fallback, 111 - .info = desc->info, 112 - .flags = desc->flags 113 - }; 114 106 115 107 if (in_interrupt()) { 116 - ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, 117 - nbytes); 108 + SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 109 + skcipher_request_set_tfm(req, ctx->fallback); 110 + skcipher_request_set_callback(req, desc->flags, NULL, NULL); 111 + skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 112 + ret = crypto_skcipher_encrypt(req); 113 + skcipher_request_zero(req); 118 114 } else { 119 115 preempt_disable(); 120 116 pagefault_disable(); ··· 146 144 struct blkcipher_walk walk; 147 145 struct p8_aes_cbc_ctx *ctx = 148 146 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 149 - struct blkcipher_desc fallback_desc = { 150 - .tfm = ctx->fallback, 151 - .info = desc->info, 152 - .flags = desc->flags 153 - }; 154 147 155 148 if (in_interrupt()) { 156 - ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, 157 - nbytes); 149 + SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 150 + skcipher_request_set_tfm(req, ctx->fallback); 151 + skcipher_request_set_callback(req, desc->flags, NULL, NULL); 152 + skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); 153 + ret = crypto_skcipher_decrypt(req); 154 + skcipher_request_zero(req); 158 155 } else { 159 156 preempt_disable(); 160 157 pagefault_disable();