Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: vmx - Reindent to kernel style

This patch reidents the vmx code-base to the kernel coding style.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+484 -460
+77 -75
drivers/crypto/vmx/aes.c
··· 30 30 #include "aesp8-ppc.h" 31 31 32 32 struct p8_aes_ctx { 33 - struct crypto_cipher *fallback; 34 - struct aes_key enc_key; 35 - struct aes_key dec_key; 33 + struct crypto_cipher *fallback; 34 + struct aes_key enc_key; 35 + struct aes_key dec_key; 36 36 }; 37 37 38 38 static int p8_aes_init(struct crypto_tfm *tfm) 39 39 { 40 - const char *alg; 41 - struct crypto_cipher *fallback; 42 - struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 40 + const char *alg; 41 + struct crypto_cipher *fallback; 42 + struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 43 43 44 - if (!(alg = crypto_tfm_alg_name(tfm))) { 45 - printk(KERN_ERR "Failed to get algorithm name.\n"); 46 - return -ENOENT; 47 - } 44 + if (!(alg = crypto_tfm_alg_name(tfm))) { 45 + printk(KERN_ERR "Failed to get algorithm name.\n"); 46 + return -ENOENT; 47 + } 48 48 49 - fallback = crypto_alloc_cipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); 50 - if (IS_ERR(fallback)) { 51 - printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", 52 - alg, PTR_ERR(fallback)); 53 - return PTR_ERR(fallback); 54 - } 55 - printk(KERN_INFO "Using '%s' as fallback implementation.\n", 56 - crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 49 + fallback = crypto_alloc_cipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 50 + if (IS_ERR(fallback)) { 51 + printk(KERN_ERR 52 + "Failed to allocate transformation for '%s': %ld\n", 53 + alg, PTR_ERR(fallback)); 54 + return PTR_ERR(fallback); 55 + } 56 + printk(KERN_INFO "Using '%s' as fallback implementation.\n", 57 + crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 57 58 58 - crypto_cipher_set_flags(fallback, 59 - crypto_cipher_get_flags((struct crypto_cipher *) tfm)); 60 - ctx->fallback = fallback; 59 + crypto_cipher_set_flags(fallback, 60 + crypto_cipher_get_flags((struct 61 + crypto_cipher *) 62 + tfm)); 63 + ctx->fallback = fallback; 61 64 62 - return 0; 65 + return 0; 63 66 } 64 67 65 68 static void p8_aes_exit(struct crypto_tfm *tfm) 66 69 { 67 - struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 70 + struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 68 71 69 - if (ctx->fallback) { 70 - crypto_free_cipher(ctx->fallback); 71 - ctx->fallback = NULL; 72 - } 72 + if (ctx->fallback) { 73 + crypto_free_cipher(ctx->fallback); 74 + ctx->fallback = NULL; 75 + } 73 76 } 74 77 75 78 static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key, 76 - unsigned int keylen) 79 + unsigned int keylen) 77 80 { 78 - int ret; 79 - struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 81 + int ret; 82 + struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 80 83 81 - pagefault_disable(); 82 - enable_kernel_altivec(); 83 - ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 84 - ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); 85 - pagefault_enable(); 86 - 87 - ret += crypto_cipher_setkey(ctx->fallback, key, keylen); 88 - return ret; 84 + pagefault_disable(); 85 + enable_kernel_altivec(); 86 + ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 87 + ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); 88 + pagefault_enable(); 89 + 90 + ret += crypto_cipher_setkey(ctx->fallback, key, keylen); 91 + return ret; 89 92 } 90 93 91 94 static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 92 95 { 93 - struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 96 + struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 94 97 95 - if (in_interrupt()) { 96 - crypto_cipher_encrypt_one(ctx->fallback, dst, src); 97 - } else { 98 - pagefault_disable(); 99 - enable_kernel_altivec(); 100 - aes_p8_encrypt(src, dst, &ctx->enc_key); 101 - pagefault_enable(); 102 - } 98 + if (in_interrupt()) { 99 + crypto_cipher_encrypt_one(ctx->fallback, dst, src); 100 + } else { 101 + pagefault_disable(); 102 + enable_kernel_altivec(); 103 + aes_p8_encrypt(src, dst, &ctx->enc_key); 104 + pagefault_enable(); 105 + } 103 106 } 104 107 105 108 static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 106 109 { 107 - struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 110 + struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 108 111 109 - if (in_interrupt()) { 110 - crypto_cipher_decrypt_one(ctx->fallback, dst, src); 111 - } else { 112 - pagefault_disable(); 113 - enable_kernel_altivec(); 114 - aes_p8_decrypt(src, dst, &ctx->dec_key); 115 - pagefault_enable(); 116 - } 112 + if (in_interrupt()) { 113 + crypto_cipher_decrypt_one(ctx->fallback, dst, src); 114 + } else { 115 + pagefault_disable(); 116 + enable_kernel_altivec(); 117 + aes_p8_decrypt(src, dst, &ctx->dec_key); 118 + pagefault_enable(); 119 + } 117 120 } 118 121 119 122 struct crypto_alg p8_aes_alg = { 120 - .cra_name = "aes", 121 - .cra_driver_name = "p8_aes", 122 - .cra_module = THIS_MODULE, 123 - .cra_priority = 1000, 124 - .cra_type = NULL, 125 - .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_NEED_FALLBACK, 126 - .cra_alignmask = 0, 127 - .cra_blocksize = AES_BLOCK_SIZE, 128 - .cra_ctxsize = sizeof(struct p8_aes_ctx), 129 - .cra_init = p8_aes_init, 130 - .cra_exit = p8_aes_exit, 131 - .cra_cipher = { 132 - .cia_min_keysize = AES_MIN_KEY_SIZE, 133 - .cia_max_keysize = AES_MAX_KEY_SIZE, 134 - .cia_setkey = p8_aes_setkey, 135 - .cia_encrypt = p8_aes_encrypt, 136 - .cia_decrypt = p8_aes_decrypt, 137 - }, 123 + .cra_name = "aes", 124 + .cra_driver_name = "p8_aes", 125 + .cra_module = THIS_MODULE, 126 + .cra_priority = 1000, 127 + .cra_type = NULL, 128 + .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_NEED_FALLBACK, 129 + .cra_alignmask = 0, 130 + .cra_blocksize = AES_BLOCK_SIZE, 131 + .cra_ctxsize = sizeof(struct p8_aes_ctx), 132 + .cra_init = p8_aes_init, 133 + .cra_exit = p8_aes_exit, 134 + .cra_cipher = { 135 + .cia_min_keysize = AES_MIN_KEY_SIZE, 136 + .cia_max_keysize = AES_MAX_KEY_SIZE, 137 + .cia_setkey = p8_aes_setkey, 138 + .cia_encrypt = p8_aes_encrypt, 139 + .cia_decrypt = p8_aes_decrypt, 140 + }, 138 141 }; 139 -
+124 -116
drivers/crypto/vmx/aes_cbc.c
··· 31 31 #include "aesp8-ppc.h" 32 32 33 33 struct p8_aes_cbc_ctx { 34 - struct crypto_blkcipher *fallback; 35 - struct aes_key enc_key; 36 - struct aes_key dec_key; 34 + struct crypto_blkcipher *fallback; 35 + struct aes_key enc_key; 36 + struct aes_key dec_key; 37 37 }; 38 38 39 39 static int p8_aes_cbc_init(struct crypto_tfm *tfm) 40 40 { 41 - const char *alg; 42 - struct crypto_blkcipher *fallback; 43 - struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 41 + const char *alg; 42 + struct crypto_blkcipher *fallback; 43 + struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 44 44 45 - if (!(alg = crypto_tfm_alg_name(tfm))) { 46 - printk(KERN_ERR "Failed to get algorithm name.\n"); 47 - return -ENOENT; 48 - } 45 + if (!(alg = crypto_tfm_alg_name(tfm))) { 46 + printk(KERN_ERR "Failed to get algorithm name.\n"); 47 + return -ENOENT; 48 + } 49 49 50 - fallback = crypto_alloc_blkcipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); 51 - if (IS_ERR(fallback)) { 52 - printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", 53 - alg, PTR_ERR(fallback)); 54 - return PTR_ERR(fallback); 55 - } 56 - printk(KERN_INFO "Using '%s' as fallback implementation.\n", 57 - crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 50 + fallback = 51 + crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 52 + if (IS_ERR(fallback)) { 53 + printk(KERN_ERR 54 + "Failed to allocate transformation for '%s': %ld\n", 55 + alg, PTR_ERR(fallback)); 56 + return PTR_ERR(fallback); 57 + } 58 + printk(KERN_INFO "Using '%s' as fallback implementation.\n", 59 + crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 58 60 59 - crypto_blkcipher_set_flags(fallback, 60 - crypto_blkcipher_get_flags((struct crypto_blkcipher *) tfm)); 61 - ctx->fallback = fallback; 61 + crypto_blkcipher_set_flags( 62 + fallback, 63 + crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); 64 + ctx->fallback = fallback; 62 65 63 - return 0; 66 + return 0; 64 67 } 65 68 66 69 static void p8_aes_cbc_exit(struct crypto_tfm *tfm) 67 70 { 68 - struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 71 + struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 69 72 70 - if (ctx->fallback) { 71 - crypto_free_blkcipher(ctx->fallback); 72 - ctx->fallback = NULL; 73 - } 73 + if (ctx->fallback) { 74 + crypto_free_blkcipher(ctx->fallback); 75 + ctx->fallback = NULL; 76 + } 74 77 } 75 78 76 79 static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key, 77 - unsigned int keylen) 80 + unsigned int keylen) 78 81 { 79 - int ret; 80 - struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 82 + int ret; 83 + struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 81 84 82 - pagefault_disable(); 83 - enable_kernel_altivec(); 84 - ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 85 - ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); 86 - pagefault_enable(); 85 + pagefault_disable(); 86 + enable_kernel_altivec(); 87 + ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 88 + ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); 89 + pagefault_enable(); 87 90 88 - ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 89 - return ret; 91 + ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 92 + return ret; 90 93 } 91 94 92 95 static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc, 93 - struct scatterlist *dst, struct scatterlist *src, 94 - unsigned int nbytes) 96 + struct scatterlist *dst, 97 + struct scatterlist *src, unsigned int nbytes) 95 98 { 96 - int ret; 97 - struct blkcipher_walk walk; 98 - struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx( 99 - crypto_blkcipher_tfm(desc->tfm)); 100 - struct blkcipher_desc fallback_desc = { 101 - .tfm = ctx->fallback, 102 - .info = desc->info, 103 - .flags = desc->flags 104 - }; 99 + int ret; 100 + struct blkcipher_walk walk; 101 + struct p8_aes_cbc_ctx *ctx = 102 + crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 103 + struct blkcipher_desc fallback_desc = { 104 + .tfm = ctx->fallback, 105 + .info = desc->info, 106 + .flags = desc->flags 107 + }; 105 108 106 - if (in_interrupt()) { 107 - ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); 108 - } else { 109 - pagefault_disable(); 110 - enable_kernel_altivec(); 109 + if (in_interrupt()) { 110 + ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, 111 + nbytes); 112 + } else { 113 + pagefault_disable(); 114 + enable_kernel_altivec(); 111 115 112 - blkcipher_walk_init(&walk, dst, src, nbytes); 113 - ret = blkcipher_walk_virt(desc, &walk); 114 - while ((nbytes = walk.nbytes)) { 115 - aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, 116 - nbytes & AES_BLOCK_MASK, &ctx->enc_key, walk.iv, 1); 117 - nbytes &= AES_BLOCK_SIZE - 1; 118 - ret = blkcipher_walk_done(desc, &walk, nbytes); 119 - } 120 - 121 - pagefault_enable(); 122 - } 123 - 124 - return ret; 125 - } 126 - 127 - static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc, 128 - struct scatterlist *dst, struct scatterlist *src, 129 - unsigned int nbytes) 130 - { 131 - int ret; 132 - struct blkcipher_walk walk; 133 - struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx( 134 - crypto_blkcipher_tfm(desc->tfm)); 135 - struct blkcipher_desc fallback_desc = { 136 - .tfm = ctx->fallback, 137 - .info = desc->info, 138 - .flags = desc->flags 139 - }; 140 - 141 - if (in_interrupt()) { 142 - ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes); 143 - } else { 144 - pagefault_disable(); 145 - enable_kernel_altivec(); 146 - 147 - blkcipher_walk_init(&walk, dst, src, nbytes); 148 - ret = blkcipher_walk_virt(desc, &walk); 149 - while ((nbytes = walk.nbytes)) { 150 - aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, 151 - nbytes & AES_BLOCK_MASK, &ctx->dec_key, walk.iv, 0); 116 + blkcipher_walk_init(&walk, dst, src, nbytes); 117 + ret = blkcipher_walk_virt(desc, &walk); 118 + while ((nbytes = walk.nbytes)) { 119 + aes_p8_cbc_encrypt(walk.src.virt.addr, 120 + walk.dst.virt.addr, 121 + nbytes & AES_BLOCK_MASK, 122 + &ctx->enc_key, walk.iv, 1); 152 123 nbytes &= AES_BLOCK_SIZE - 1; 153 124 ret = blkcipher_walk_done(desc, &walk, nbytes); 154 125 } 155 126 156 - pagefault_enable(); 157 - } 127 + pagefault_enable(); 128 + } 158 129 159 - return ret; 130 + return ret; 131 + } 132 + 133 + static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc, 134 + struct scatterlist *dst, 135 + struct scatterlist *src, unsigned int nbytes) 136 + { 137 + int ret; 138 + struct blkcipher_walk walk; 139 + struct p8_aes_cbc_ctx *ctx = 140 + crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 141 + struct blkcipher_desc fallback_desc = { 142 + .tfm = ctx->fallback, 143 + .info = desc->info, 144 + .flags = desc->flags 145 + }; 146 + 147 + if (in_interrupt()) { 148 + ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, 149 + nbytes); 150 + } else { 151 + pagefault_disable(); 152 + enable_kernel_altivec(); 153 + 154 + blkcipher_walk_init(&walk, dst, src, nbytes); 155 + ret = blkcipher_walk_virt(desc, &walk); 156 + while ((nbytes = walk.nbytes)) { 157 + aes_p8_cbc_encrypt(walk.src.virt.addr, 158 + walk.dst.virt.addr, 159 + nbytes & AES_BLOCK_MASK, 160 + &ctx->dec_key, walk.iv, 0); 161 + nbytes &= AES_BLOCK_SIZE - 1; 162 + ret = blkcipher_walk_done(desc, &walk, nbytes); 163 + } 164 + 165 + pagefault_enable(); 166 + } 167 + 168 + return ret; 160 169 } 161 170 162 171 163 172 struct crypto_alg p8_aes_cbc_alg = { 164 - .cra_name = "cbc(aes)", 165 - .cra_driver_name = "p8_aes_cbc", 166 - .cra_module = THIS_MODULE, 167 - .cra_priority = 1000, 168 - .cra_type = &crypto_blkcipher_type, 169 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 170 - .cra_alignmask = 0, 171 - .cra_blocksize = AES_BLOCK_SIZE, 172 - .cra_ctxsize = sizeof(struct p8_aes_cbc_ctx), 173 - .cra_init = p8_aes_cbc_init, 174 - .cra_exit = p8_aes_cbc_exit, 175 - .cra_blkcipher = { 176 - .ivsize = 0, 177 - .min_keysize = AES_MIN_KEY_SIZE, 178 - .max_keysize = AES_MAX_KEY_SIZE, 179 - .setkey = p8_aes_cbc_setkey, 180 - .encrypt = p8_aes_cbc_encrypt, 181 - .decrypt = p8_aes_cbc_decrypt, 182 - }, 173 + .cra_name = "cbc(aes)", 174 + .cra_driver_name = "p8_aes_cbc", 175 + .cra_module = THIS_MODULE, 176 + .cra_priority = 1000, 177 + .cra_type = &crypto_blkcipher_type, 178 + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 179 + .cra_alignmask = 0, 180 + .cra_blocksize = AES_BLOCK_SIZE, 181 + .cra_ctxsize = sizeof(struct p8_aes_cbc_ctx), 182 + .cra_init = p8_aes_cbc_init, 183 + .cra_exit = p8_aes_cbc_exit, 184 + .cra_blkcipher = { 185 + .ivsize = 0, 186 + .min_keysize = AES_MIN_KEY_SIZE, 187 + .max_keysize = AES_MAX_KEY_SIZE, 188 + .setkey = p8_aes_cbc_setkey, 189 + .encrypt = p8_aes_cbc_encrypt, 190 + .decrypt = p8_aes_cbc_decrypt, 191 + }, 183 192 }; 184 -
+108 -99
drivers/crypto/vmx/aes_ctr.c
··· 30 30 #include "aesp8-ppc.h" 31 31 32 32 struct p8_aes_ctr_ctx { 33 - struct crypto_blkcipher *fallback; 34 - struct aes_key enc_key; 33 + struct crypto_blkcipher *fallback; 34 + struct aes_key enc_key; 35 35 }; 36 36 37 37 static int p8_aes_ctr_init(struct crypto_tfm *tfm) 38 38 { 39 - const char *alg; 40 - struct crypto_blkcipher *fallback; 41 - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 39 + const char *alg; 40 + struct crypto_blkcipher *fallback; 41 + struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 42 42 43 - if (!(alg = crypto_tfm_alg_name(tfm))) { 44 - printk(KERN_ERR "Failed to get algorithm name.\n"); 45 - return -ENOENT; 46 - } 43 + if (!(alg = crypto_tfm_alg_name(tfm))) { 44 + printk(KERN_ERR "Failed to get algorithm name.\n"); 45 + return -ENOENT; 46 + } 47 47 48 - fallback = crypto_alloc_blkcipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); 49 - if (IS_ERR(fallback)) { 50 - printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", 51 - alg, PTR_ERR(fallback)); 52 - return PTR_ERR(fallback); 53 - } 54 - printk(KERN_INFO "Using '%s' as fallback implementation.\n", 55 - crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 48 + fallback = 49 + crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 50 + if (IS_ERR(fallback)) { 51 + printk(KERN_ERR 52 + "Failed to allocate transformation for '%s': %ld\n", 53 + alg, PTR_ERR(fallback)); 54 + return PTR_ERR(fallback); 55 + } 56 + printk(KERN_INFO "Using '%s' as fallback implementation.\n", 57 + crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); 56 58 57 - crypto_blkcipher_set_flags(fallback, 58 - crypto_blkcipher_get_flags((struct crypto_blkcipher *) tfm)); 59 - ctx->fallback = fallback; 59 + crypto_blkcipher_set_flags( 60 + fallback, 61 + crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); 62 + ctx->fallback = fallback; 60 63 61 - return 0; 64 + return 0; 62 65 } 63 66 64 67 static void p8_aes_ctr_exit(struct crypto_tfm *tfm) 65 68 { 66 - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 69 + struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 67 70 68 - if (ctx->fallback) { 69 - crypto_free_blkcipher(ctx->fallback); 70 - ctx->fallback = NULL; 71 - } 71 + if (ctx->fallback) { 72 + crypto_free_blkcipher(ctx->fallback); 73 + ctx->fallback = NULL; 74 + } 72 75 } 73 76 74 77 static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key, 75 - unsigned int keylen) 78 + unsigned int keylen) 76 79 { 77 - int ret; 78 - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 80 + int ret; 81 + struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 79 82 80 - pagefault_disable(); 81 - enable_kernel_altivec(); 82 - ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 83 - pagefault_enable(); 83 + pagefault_disable(); 84 + enable_kernel_altivec(); 85 + ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); 86 + pagefault_enable(); 84 87 85 - ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 86 - return ret; 88 + ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); 89 + return ret; 87 90 } 88 91 89 92 static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx, 90 - struct blkcipher_walk *walk) 93 + struct blkcipher_walk *walk) 91 94 { 92 - u8 *ctrblk = walk->iv; 93 - u8 keystream[AES_BLOCK_SIZE]; 94 - u8 *src = walk->src.virt.addr; 95 - u8 *dst = walk->dst.virt.addr; 96 - unsigned int nbytes = walk->nbytes; 95 + u8 *ctrblk = walk->iv; 96 + u8 keystream[AES_BLOCK_SIZE]; 97 + u8 *src = walk->src.virt.addr; 98 + u8 *dst = walk->dst.virt.addr; 99 + unsigned int nbytes = walk->nbytes; 97 100 98 - pagefault_disable(); 99 - enable_kernel_altivec(); 100 - aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); 101 - pagefault_enable(); 101 + pagefault_disable(); 102 + enable_kernel_altivec(); 103 + aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); 104 + pagefault_enable(); 102 105 103 - crypto_xor(keystream, src, nbytes); 104 - memcpy(dst, keystream, nbytes); 105 - crypto_inc(ctrblk, AES_BLOCK_SIZE); 106 + crypto_xor(keystream, src, nbytes); 107 + memcpy(dst, keystream, nbytes); 108 + crypto_inc(ctrblk, AES_BLOCK_SIZE); 106 109 } 107 110 108 111 static int p8_aes_ctr_crypt(struct blkcipher_desc *desc, 109 - struct scatterlist *dst, struct scatterlist *src, 110 - unsigned int nbytes) 112 + struct scatterlist *dst, 113 + struct scatterlist *src, unsigned int nbytes) 111 114 { 112 - int ret; 113 - struct blkcipher_walk walk; 114 - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx( 115 - crypto_blkcipher_tfm(desc->tfm)); 116 - struct blkcipher_desc fallback_desc = { 117 - .tfm = ctx->fallback, 118 - .info = desc->info, 119 - .flags = desc->flags 120 - }; 115 + int ret; 116 + struct blkcipher_walk walk; 117 + struct p8_aes_ctr_ctx *ctx = 118 + crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 119 + struct blkcipher_desc fallback_desc = { 120 + .tfm = ctx->fallback, 121 + .info = desc->info, 122 + .flags = desc->flags 123 + }; 121 124 122 - if (in_interrupt()) { 123 - ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); 124 - } else { 125 - blkcipher_walk_init(&walk, dst, src, nbytes); 126 - ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); 127 - while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { 128 - pagefault_disable(); 129 - enable_kernel_altivec(); 130 - aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, walk.dst.virt.addr, 131 - (nbytes & AES_BLOCK_MASK)/AES_BLOCK_SIZE, &ctx->enc_key, walk.iv); 132 - pagefault_enable(); 125 + if (in_interrupt()) { 126 + ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, 127 + nbytes); 128 + } else { 129 + blkcipher_walk_init(&walk, dst, src, nbytes); 130 + ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); 131 + while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { 132 + pagefault_disable(); 133 + enable_kernel_altivec(); 134 + aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, 135 + walk.dst.virt.addr, 136 + (nbytes & 137 + AES_BLOCK_MASK) / 138 + AES_BLOCK_SIZE, 139 + &ctx->enc_key, 140 + walk.iv); 141 + pagefault_enable(); 133 142 134 - crypto_inc(walk.iv, AES_BLOCK_SIZE); 135 - nbytes &= AES_BLOCK_SIZE - 1; 136 - ret = blkcipher_walk_done(desc, &walk, nbytes); 137 - } 138 - if (walk.nbytes) { 139 - p8_aes_ctr_final(ctx, &walk); 140 - ret = blkcipher_walk_done(desc, &walk, 0); 141 - } 142 - } 143 + crypto_inc(walk.iv, AES_BLOCK_SIZE); 144 + nbytes &= AES_BLOCK_SIZE - 1; 145 + ret = blkcipher_walk_done(desc, &walk, nbytes); 146 + } 147 + if (walk.nbytes) { 148 + p8_aes_ctr_final(ctx, &walk); 149 + ret = blkcipher_walk_done(desc, &walk, 0); 150 + } 151 + } 143 152 144 - return ret; 153 + return ret; 145 154 } 146 155 147 156 struct crypto_alg p8_aes_ctr_alg = { 148 - .cra_name = "ctr(aes)", 149 - .cra_driver_name = "p8_aes_ctr", 150 - .cra_module = THIS_MODULE, 151 - .cra_priority = 1000, 152 - .cra_type = &crypto_blkcipher_type, 153 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 154 - .cra_alignmask = 0, 155 - .cra_blocksize = 1, 156 - .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx), 157 - .cra_init = p8_aes_ctr_init, 158 - .cra_exit = p8_aes_ctr_exit, 159 - .cra_blkcipher = { 160 - .ivsize = 0, 161 - .min_keysize = AES_MIN_KEY_SIZE, 162 - .max_keysize = AES_MAX_KEY_SIZE, 163 - .setkey = p8_aes_ctr_setkey, 164 - .encrypt = p8_aes_ctr_crypt, 165 - .decrypt = p8_aes_ctr_crypt, 166 - }, 157 + .cra_name = "ctr(aes)", 158 + .cra_driver_name = "p8_aes_ctr", 159 + .cra_module = THIS_MODULE, 160 + .cra_priority = 1000, 161 + .cra_type = &crypto_blkcipher_type, 162 + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, 163 + .cra_alignmask = 0, 164 + .cra_blocksize = 1, 165 + .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx), 166 + .cra_init = p8_aes_ctr_init, 167 + .cra_exit = p8_aes_ctr_exit, 168 + .cra_blkcipher = { 169 + .ivsize = 0, 170 + .min_keysize = AES_MIN_KEY_SIZE, 171 + .max_keysize = AES_MAX_KEY_SIZE, 172 + .setkey = p8_aes_ctr_setkey, 173 + .encrypt = p8_aes_ctr_crypt, 174 + .decrypt = p8_aes_ctr_crypt, 175 + }, 167 176 };
+8 -7
drivers/crypto/vmx/aesp8-ppc.h
··· 4 4 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) 5 5 6 6 struct aes_key { 7 - u8 key[AES_MAX_KEYLENGTH]; 8 - int rounds; 7 + u8 key[AES_MAX_KEYLENGTH]; 8 + int rounds; 9 9 }; 10 10 11 11 int aes_p8_set_encrypt_key(const u8 *userKey, const int bits, 12 - struct aes_key *key); 12 + struct aes_key *key); 13 13 int aes_p8_set_decrypt_key(const u8 *userKey, const int bits, 14 - struct aes_key *key); 14 + struct aes_key *key); 15 15 void aes_p8_encrypt(const u8 *in, u8 *out, const struct aes_key *key); 16 - void aes_p8_decrypt(const u8 *in, u8 *out,const struct aes_key *key); 16 + void aes_p8_decrypt(const u8 *in, u8 *out, const struct aes_key *key); 17 17 void aes_p8_cbc_encrypt(const u8 *in, u8 *out, size_t len, 18 - const struct aes_key *key, u8 *iv, const int enc); 18 + const struct aes_key *key, u8 *iv, const int enc); 19 19 void aes_p8_ctr32_encrypt_blocks(const u8 *in, u8 *out, 20 - size_t len, const struct aes_key *key, const u8 *iv); 20 + size_t len, const struct aes_key *key, 21 + const u8 *iv);
+133 -129
drivers/crypto/vmx/ghash.c
··· 39 39 void gcm_init_p8(u128 htable[16], const u64 Xi[2]); 40 40 void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]); 41 41 void gcm_ghash_p8(u64 Xi[2], const u128 htable[16], 42 - const u8 *in,size_t len); 42 + const u8 *in, size_t len); 43 43 44 44 struct p8_ghash_ctx { 45 - u128 htable[16]; 46 - struct crypto_shash *fallback; 45 + u128 htable[16]; 46 + struct crypto_shash *fallback; 47 47 }; 48 48 49 49 struct p8_ghash_desc_ctx { 50 - u64 shash[2]; 51 - u8 buffer[GHASH_DIGEST_SIZE]; 52 - int bytes; 53 - struct shash_desc fallback_desc; 50 + u64 shash[2]; 51 + u8 buffer[GHASH_DIGEST_SIZE]; 52 + int bytes; 53 + struct shash_desc fallback_desc; 54 54 }; 55 55 56 56 static int p8_ghash_init_tfm(struct crypto_tfm *tfm) 57 57 { 58 - const char *alg; 59 - struct crypto_shash *fallback; 60 - struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm); 61 - struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 58 + const char *alg; 59 + struct crypto_shash *fallback; 60 + struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm); 61 + struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 62 62 63 - if (!(alg = crypto_tfm_alg_name(tfm))) { 64 - printk(KERN_ERR "Failed to get algorithm name.\n"); 65 - return -ENOENT; 66 - } 63 + if (!(alg = crypto_tfm_alg_name(tfm))) { 64 + printk(KERN_ERR "Failed to get algorithm name.\n"); 65 + return -ENOENT; 66 + } 67 67 68 - fallback = crypto_alloc_shash(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); 69 - if (IS_ERR(fallback)) { 70 - printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", 71 - alg, PTR_ERR(fallback)); 72 - return PTR_ERR(fallback); 73 - } 74 - printk(KERN_INFO "Using '%s' as fallback implementation.\n", 75 - crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback))); 68 + fallback = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK); 69 + if (IS_ERR(fallback)) { 70 + printk(KERN_ERR 71 + "Failed to allocate transformation for '%s': %ld\n", 72 + alg, PTR_ERR(fallback)); 73 + return PTR_ERR(fallback); 74 + } 75 + printk(KERN_INFO "Using '%s' as fallback implementation.\n", 76 + crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback))); 76 77 77 - crypto_shash_set_flags(fallback, 78 - crypto_shash_get_flags((struct crypto_shash *) tfm)); 79 - ctx->fallback = fallback; 78 + crypto_shash_set_flags(fallback, 79 + crypto_shash_get_flags((struct crypto_shash 80 + *) tfm)); 81 + ctx->fallback = fallback; 80 82 81 - shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx) 82 - + crypto_shash_descsize(fallback); 83 + shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx) 84 + + crypto_shash_descsize(fallback); 83 85 84 - return 0; 86 + return 0; 85 87 } 86 88 87 89 static void p8_ghash_exit_tfm(struct crypto_tfm *tfm) 88 90 { 89 - struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 91 + struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm); 90 92 91 - if (ctx->fallback) { 92 - crypto_free_shash(ctx->fallback); 93 - ctx->fallback = NULL; 94 - } 93 + if (ctx->fallback) { 94 + crypto_free_shash(ctx->fallback); 95 + ctx->fallback = NULL; 96 + } 95 97 } 96 98 97 99 static int p8_ghash_init(struct shash_desc *desc) 98 100 { 99 - struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 100 - struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 101 + struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 102 + struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 101 103 102 - dctx->bytes = 0; 103 - memset(dctx->shash, 0, GHASH_DIGEST_SIZE); 104 - dctx->fallback_desc.tfm = ctx->fallback; 105 - dctx->fallback_desc.flags = desc->flags; 106 - return crypto_shash_init(&dctx->fallback_desc); 104 + dctx->bytes = 0; 105 + memset(dctx->shash, 0, GHASH_DIGEST_SIZE); 106 + dctx->fallback_desc.tfm = ctx->fallback; 107 + dctx->fallback_desc.flags = desc->flags; 108 + return crypto_shash_init(&dctx->fallback_desc); 107 109 } 108 110 109 111 static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key, 110 - unsigned int keylen) 112 + unsigned int keylen) 111 113 { 112 - struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm)); 114 + struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm)); 113 115 114 - if (keylen != GHASH_KEY_LEN) 115 - return -EINVAL; 116 + if (keylen != GHASH_KEY_LEN) 117 + return -EINVAL; 116 118 117 - pagefault_disable(); 118 - enable_kernel_altivec(); 119 - enable_kernel_fp(); 120 - gcm_init_p8(ctx->htable, (const u64 *) key); 121 - pagefault_enable(); 122 - return crypto_shash_setkey(ctx->fallback, key, keylen); 119 + pagefault_disable(); 120 + enable_kernel_altivec(); 121 + enable_kernel_fp(); 122 + gcm_init_p8(ctx->htable, (const u64 *) key); 123 + pagefault_enable(); 124 + return crypto_shash_setkey(ctx->fallback, key, keylen); 123 125 } 124 126 125 127 static int p8_ghash_update(struct shash_desc *desc, 126 - const u8 *src, unsigned int srclen) 128 + const u8 *src, unsigned int srclen) 127 129 { 128 - unsigned int len; 129 - struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 130 - struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 130 + unsigned int len; 131 + struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 132 + struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 131 133 132 - if (IN_INTERRUPT) { 133 - return crypto_shash_update(&dctx->fallback_desc, src, srclen); 134 - } else { 135 - if (dctx->bytes) { 136 - if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) { 137 - memcpy(dctx->buffer + dctx->bytes, src, srclen); 138 - dctx->bytes += srclen; 139 - return 0; 140 - } 141 - memcpy(dctx->buffer + dctx->bytes, src, 142 - GHASH_DIGEST_SIZE - dctx->bytes); 143 - pagefault_disable(); 144 - enable_kernel_altivec(); 145 - enable_kernel_fp(); 146 - gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 147 - GHASH_DIGEST_SIZE); 148 - pagefault_enable(); 149 - src += GHASH_DIGEST_SIZE - dctx->bytes; 150 - srclen -= GHASH_DIGEST_SIZE - dctx->bytes; 151 - dctx->bytes = 0; 152 - } 153 - len = srclen & ~(GHASH_DIGEST_SIZE - 1); 154 - if (len) { 155 - pagefault_disable(); 156 - enable_kernel_altivec(); 157 - enable_kernel_fp(); 158 - gcm_ghash_p8(dctx->shash, ctx->htable, src, len); 159 - pagefault_enable(); 160 - src += len; 161 - srclen -= len; 162 - } 163 - if (srclen) { 164 - memcpy(dctx->buffer, src, srclen); 165 - dctx->bytes = srclen; 166 - } 167 - return 0; 168 - } 134 + if (IN_INTERRUPT) { 135 + return crypto_shash_update(&dctx->fallback_desc, src, 136 + srclen); 137 + } else { 138 + if (dctx->bytes) { 139 + if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) { 140 + memcpy(dctx->buffer + dctx->bytes, src, 141 + srclen); 142 + dctx->bytes += srclen; 143 + return 0; 144 + } 145 + memcpy(dctx->buffer + dctx->bytes, src, 146 + GHASH_DIGEST_SIZE - dctx->bytes); 147 + pagefault_disable(); 148 + enable_kernel_altivec(); 149 + enable_kernel_fp(); 150 + gcm_ghash_p8(dctx->shash, ctx->htable, 151 + dctx->buffer, GHASH_DIGEST_SIZE); 152 + pagefault_enable(); 153 + src += GHASH_DIGEST_SIZE - dctx->bytes; 154 + srclen -= GHASH_DIGEST_SIZE - dctx->bytes; 155 + dctx->bytes = 0; 156 + } 157 + len = srclen & ~(GHASH_DIGEST_SIZE - 1); 158 + if (len) { 159 + pagefault_disable(); 160 + enable_kernel_altivec(); 161 + enable_kernel_fp(); 162 + gcm_ghash_p8(dctx->shash, ctx->htable, src, len); 163 + pagefault_enable(); 164 + src += len; 165 + srclen -= len; 166 + } 167 + if (srclen) { 168 + memcpy(dctx->buffer, src, srclen); 169 + dctx->bytes = srclen; 170 + } 171 + return 0; 172 + } 169 173 } 170 174 171 175 static int p8_ghash_final(struct shash_desc *desc, u8 *out) 172 176 { 173 - int i; 174 - struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 175 - struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 177 + int i; 178 + struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 179 + struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 176 180 177 - if (IN_INTERRUPT) { 178 - return crypto_shash_final(&dctx->fallback_desc, out); 179 - } else { 180 - if (dctx->bytes) { 181 - for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++) 182 - dctx->buffer[i] = 0; 183 - pagefault_disable(); 184 - enable_kernel_altivec(); 185 - enable_kernel_fp(); 186 - gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer, 187 - GHASH_DIGEST_SIZE); 188 - pagefault_enable(); 189 - dctx->bytes = 0; 190 - } 191 - memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); 192 - return 0; 193 - } 181 + if (IN_INTERRUPT) { 182 + return crypto_shash_final(&dctx->fallback_desc, out); 183 + } else { 184 + if (dctx->bytes) { 185 + for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++) 186 + dctx->buffer[i] = 0; 187 + pagefault_disable(); 188 + enable_kernel_altivec(); 189 + enable_kernel_fp(); 190 + gcm_ghash_p8(dctx->shash, ctx->htable, 191 + dctx->buffer, GHASH_DIGEST_SIZE); 192 + pagefault_enable(); 193 + dctx->bytes = 0; 194 + } 195 + memcpy(out, dctx->shash, GHASH_DIGEST_SIZE); 196 + return 0; 197 + } 194 198 } 195 199 196 200 struct shash_alg p8_ghash_alg = { 197 - .digestsize = GHASH_DIGEST_SIZE, 198 - .init = p8_ghash_init, 199 - .update = p8_ghash_update, 200 - .final = p8_ghash_final, 201 - .setkey = p8_ghash_setkey, 202 - .descsize = sizeof(struct p8_ghash_desc_ctx), 203 - .base = { 204 - .cra_name = "ghash", 205 - .cra_driver_name = "p8_ghash", 206 - .cra_priority = 1000, 207 - .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK, 208 - .cra_blocksize = GHASH_BLOCK_SIZE, 209 - .cra_ctxsize = sizeof(struct p8_ghash_ctx), 210 - .cra_module = THIS_MODULE, 211 - .cra_init = p8_ghash_init_tfm, 212 - .cra_exit = p8_ghash_exit_tfm, 213 - }, 201 + .digestsize = GHASH_DIGEST_SIZE, 202 + .init = p8_ghash_init, 203 + .update = p8_ghash_update, 204 + .final = p8_ghash_final, 205 + .setkey = p8_ghash_setkey, 206 + .descsize = sizeof(struct p8_ghash_desc_ctx), 207 + .base = { 208 + .cra_name = "ghash", 209 + .cra_driver_name = "p8_ghash", 210 + .cra_priority = 1000, 211 + .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK, 212 + .cra_blocksize = GHASH_BLOCK_SIZE, 213 + .cra_ctxsize = sizeof(struct p8_ghash_ctx), 214 + .cra_module = THIS_MODULE, 215 + .cra_init = p8_ghash_init_tfm, 216 + .cra_exit = p8_ghash_exit_tfm, 217 + }, 214 218 };
+34 -34
drivers/crypto/vmx/vmx.c
··· 32 32 extern struct crypto_alg p8_aes_cbc_alg; 33 33 extern struct crypto_alg p8_aes_ctr_alg; 34 34 static struct crypto_alg *algs[] = { 35 - &p8_aes_alg, 36 - &p8_aes_cbc_alg, 37 - &p8_aes_ctr_alg, 38 - NULL, 35 + &p8_aes_alg, 36 + &p8_aes_cbc_alg, 37 + &p8_aes_ctr_alg, 38 + NULL, 39 39 }; 40 40 41 41 int __init p8_init(void) 42 42 { 43 - int ret = 0; 44 - struct crypto_alg **alg_it; 43 + int ret = 0; 44 + struct crypto_alg **alg_it; 45 45 46 - if (!(cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO)) 47 - return -ENODEV; 46 + if (!(cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO)) 47 + return -ENODEV; 48 48 49 - for (alg_it = algs; *alg_it; alg_it++) { 50 - ret = crypto_register_alg(*alg_it); 51 - printk(KERN_INFO "crypto_register_alg '%s' = %d\n", 52 - (*alg_it)->cra_name, ret); 53 - if (ret) { 54 - for (alg_it--; alg_it >= algs; alg_it--) 55 - crypto_unregister_alg(*alg_it); 56 - break; 57 - } 58 - } 59 - if (ret) 60 - return ret; 49 + for (alg_it = algs; *alg_it; alg_it++) { 50 + ret = crypto_register_alg(*alg_it); 51 + printk(KERN_INFO "crypto_register_alg '%s' = %d\n", 52 + (*alg_it)->cra_name, ret); 53 + if (ret) { 54 + for (alg_it--; alg_it >= algs; alg_it--) 55 + crypto_unregister_alg(*alg_it); 56 + break; 57 + } 58 + } 59 + if (ret) 60 + return ret; 61 61 62 - ret = crypto_register_shash(&p8_ghash_alg); 63 - if (ret) { 64 - for (alg_it = algs; *alg_it; alg_it++) 65 - crypto_unregister_alg(*alg_it); 66 - } 67 - return ret; 62 + ret = crypto_register_shash(&p8_ghash_alg); 63 + if (ret) { 64 + for (alg_it = algs; *alg_it; alg_it++) 65 + crypto_unregister_alg(*alg_it); 66 + } 67 + return ret; 68 68 } 69 69 70 70 void __exit p8_exit(void) 71 71 { 72 - struct crypto_alg **alg_it; 72 + struct crypto_alg **alg_it; 73 73 74 - for (alg_it = algs; *alg_it; alg_it++) { 75 - printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name); 76 - crypto_unregister_alg(*alg_it); 77 - } 78 - crypto_unregister_shash(&p8_ghash_alg); 74 + for (alg_it = algs; *alg_it; alg_it++) { 75 + printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name); 76 + crypto_unregister_alg(*alg_it); 77 + } 78 + crypto_unregister_shash(&p8_ghash_alg); 79 79 } 80 80 81 81 module_init(p8_init); 82 82 module_exit(p8_exit); 83 83 84 84 MODULE_AUTHOR("Marcelo Cerri<mhcerri@br.ibm.com>"); 85 - MODULE_DESCRIPTION("IBM VMX cryptographic acceleration instructions support on Power 8"); 85 + MODULE_DESCRIPTION("IBM VMX cryptographic acceleration instructions " 86 + "support on Power 8"); 86 87 MODULE_LICENSE("GPL"); 87 88 MODULE_VERSION("1.0.0"); 88 -