Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

crypto: aes-ce - Convert to skcipher

This patch converts aes-ce over to the skcipher interface.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+157 -233
+157 -233
arch/arm/crypto/aes-ce-glue.c
··· 12 12 #include <asm/neon.h> 13 13 #include <asm/hwcap.h> 14 14 #include <crypto/aes.h> 15 - #include <crypto/ablk_helper.h> 16 - #include <crypto/algapi.h> 15 + #include <crypto/internal/simd.h> 16 + #include <crypto/internal/skcipher.h> 17 17 #include <linux/module.h> 18 18 #include <crypto/xts.h> 19 19 ··· 133 133 return 0; 134 134 } 135 135 136 - static int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key, 136 + static int ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key, 137 137 unsigned int key_len) 138 138 { 139 - struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); 139 + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 140 140 int ret; 141 141 142 142 ret = ce_aes_expandkey(ctx, in_key, key_len); 143 143 if (!ret) 144 144 return 0; 145 145 146 - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 146 + crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 147 147 return -EINVAL; 148 148 } 149 149 ··· 152 152 struct crypto_aes_ctx __aligned(8) key2; 153 153 }; 154 154 155 - static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key, 155 + static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key, 156 156 unsigned int key_len) 157 157 { 158 - struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 158 + struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 159 159 int ret; 160 160 161 - ret = xts_check_key(tfm, in_key, key_len); 161 + ret = xts_verify_key(tfm, in_key, key_len); 162 162 if (ret) 163 163 return ret; 164 164 ··· 169 169 if (!ret) 170 170 return 0; 171 171 172 - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 172 + crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 173 173 return -EINVAL; 174 174 } 175 175 176 - static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 177 - struct scatterlist *src, unsigned int nbytes) 176 + static int ecb_encrypt(struct skcipher_request *req) 178 177 { 179 - struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 180 - struct blkcipher_walk walk; 178 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 179 + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 180 + struct skcipher_walk walk; 181 181 unsigned int blocks; 182 182 int err; 183 183 184 - desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 185 - blkcipher_walk_init(&walk, dst, src, nbytes); 186 - err = blkcipher_walk_virt(desc, &walk); 184 + err = skcipher_walk_virt(&walk, req, true); 187 185 188 186 kernel_neon_begin(); 189 187 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { 190 188 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 191 189 (u8 *)ctx->key_enc, num_rounds(ctx), blocks); 192 - err = blkcipher_walk_done(desc, &walk, 193 - walk.nbytes % AES_BLOCK_SIZE); 190 + err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); 194 191 } 195 192 kernel_neon_end(); 196 193 return err; 197 194 } 198 195 199 - static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 200 - struct scatterlist *src, unsigned int nbytes) 196 + static int ecb_decrypt(struct skcipher_request *req) 201 197 { 202 - struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 203 - struct blkcipher_walk walk; 198 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 199 + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 200 + struct skcipher_walk walk; 204 201 unsigned int blocks; 205 202 int err; 206 203 207 - desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 208 - blkcipher_walk_init(&walk, dst, src, nbytes); 209 - err = blkcipher_walk_virt(desc, &walk); 204 + err = skcipher_walk_virt(&walk, req, true); 210 205 211 206 kernel_neon_begin(); 212 207 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { 213 208 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 214 209 (u8 *)ctx->key_dec, num_rounds(ctx), blocks); 215 - err = blkcipher_walk_done(desc, &walk, 216 - walk.nbytes % AES_BLOCK_SIZE); 210 + err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); 217 211 } 218 212 kernel_neon_end(); 219 213 return err; 220 214 } 221 215 222 - static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 223 - struct scatterlist *src, unsigned int nbytes) 216 + static int cbc_encrypt(struct skcipher_request *req) 224 217 { 225 - struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 226 - struct blkcipher_walk walk; 218 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 219 + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 220 + struct skcipher_walk walk; 227 221 unsigned int blocks; 228 222 int err; 229 223 230 - desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 231 - blkcipher_walk_init(&walk, dst, src, nbytes); 232 - err = blkcipher_walk_virt(desc, &walk); 224 + err = skcipher_walk_virt(&walk, req, true); 233 225 234 226 kernel_neon_begin(); 235 227 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { 236 228 ce_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 237 229 (u8 *)ctx->key_enc, num_rounds(ctx), blocks, 238 230 walk.iv); 239 - err = blkcipher_walk_done(desc, &walk, 240 - walk.nbytes % AES_BLOCK_SIZE); 231 + err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); 241 232 } 242 233 kernel_neon_end(); 243 234 return err; 244 235 } 245 236 246 - static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 247 - struct scatterlist *src, unsigned int nbytes) 237 + static int cbc_decrypt(struct skcipher_request *req) 248 238 { 249 - struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 250 - struct blkcipher_walk walk; 239 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 240 + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 241 + struct skcipher_walk walk; 251 242 unsigned int blocks; 252 243 int err; 253 244 254 - desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 255 - blkcipher_walk_init(&walk, dst, src, nbytes); 256 - err = blkcipher_walk_virt(desc, &walk); 245 + err = skcipher_walk_virt(&walk, req, true); 257 246 258 247 kernel_neon_begin(); 259 248 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { 260 249 ce_aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 261 250 (u8 *)ctx->key_dec, num_rounds(ctx), blocks, 262 251 walk.iv); 263 - err = blkcipher_walk_done(desc, &walk, 264 - walk.nbytes % AES_BLOCK_SIZE); 252 + err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); 265 253 } 266 254 kernel_neon_end(); 267 255 return err; 268 256 } 269 257 270 - static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 271 - struct scatterlist *src, unsigned int nbytes) 258 + static int ctr_encrypt(struct skcipher_request *req) 272 259 { 273 - struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 274 - struct blkcipher_walk walk; 260 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 261 + struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 262 + struct skcipher_walk walk; 275 263 int err, blocks; 276 264 277 - desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 278 - blkcipher_walk_init(&walk, dst, src, nbytes); 279 - err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); 265 + err = skcipher_walk_virt(&walk, req, true); 280 266 281 267 kernel_neon_begin(); 282 268 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { 283 269 ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 284 270 (u8 *)ctx->key_enc, num_rounds(ctx), blocks, 285 271 walk.iv); 286 - nbytes -= blocks * AES_BLOCK_SIZE; 287 - if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE) 288 - break; 289 - err = blkcipher_walk_done(desc, &walk, 290 - walk.nbytes % AES_BLOCK_SIZE); 272 + err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); 291 273 } 292 - if (walk.nbytes % AES_BLOCK_SIZE) { 293 - u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; 294 - u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE; 274 + if (walk.nbytes) { 295 275 u8 __aligned(8) tail[AES_BLOCK_SIZE]; 276 + unsigned int nbytes = walk.nbytes; 277 + u8 *tdst = walk.dst.virt.addr; 278 + u8 *tsrc = walk.src.virt.addr; 296 279 297 280 /* 298 281 * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need ··· 286 303 ce_aes_ctr_encrypt(tail, tsrc, (u8 *)ctx->key_enc, 287 304 num_rounds(ctx), blocks, walk.iv); 288 305 memcpy(tdst, tail, nbytes); 289 - err = blkcipher_walk_done(desc, &walk, 0); 306 + err = skcipher_walk_done(&walk, 0); 290 307 } 291 308 kernel_neon_end(); 292 309 293 310 return err; 294 311 } 295 312 296 - static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 297 - struct scatterlist *src, unsigned int nbytes) 313 + static int xts_encrypt(struct skcipher_request *req) 298 314 { 299 - struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 315 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 316 + struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 300 317 int err, first, rounds = num_rounds(&ctx->key1); 301 - struct blkcipher_walk walk; 318 + struct skcipher_walk walk; 302 319 unsigned int blocks; 303 320 304 - desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 305 - blkcipher_walk_init(&walk, dst, src, nbytes); 306 - err = blkcipher_walk_virt(desc, &walk); 321 + err = skcipher_walk_virt(&walk, req, true); 307 322 308 323 kernel_neon_begin(); 309 324 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { 310 325 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 311 326 (u8 *)ctx->key1.key_enc, rounds, blocks, 312 327 walk.iv, (u8 *)ctx->key2.key_enc, first); 313 - err = blkcipher_walk_done(desc, &walk, 314 - walk.nbytes % AES_BLOCK_SIZE); 328 + err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); 315 329 } 316 330 kernel_neon_end(); 317 331 318 332 return err; 319 333 } 320 334 321 - static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 322 - struct scatterlist *src, unsigned int nbytes) 335 + static int xts_decrypt(struct skcipher_request *req) 323 336 { 324 - struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 337 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 338 + struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 325 339 int err, first, rounds = num_rounds(&ctx->key1); 326 - struct blkcipher_walk walk; 340 + struct skcipher_walk walk; 327 341 unsigned int blocks; 328 342 329 - desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 330 - blkcipher_walk_init(&walk, dst, src, nbytes); 331 - err = blkcipher_walk_virt(desc, &walk); 343 + err = skcipher_walk_virt(&walk, req, true); 332 344 333 345 kernel_neon_begin(); 334 346 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { 335 347 ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 336 348 (u8 *)ctx->key1.key_dec, rounds, blocks, 337 349 walk.iv, (u8 *)ctx->key2.key_enc, first); 338 - err = blkcipher_walk_done(desc, &walk, 339 - walk.nbytes % AES_BLOCK_SIZE); 350 + err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); 340 351 } 341 352 kernel_neon_end(); 342 353 343 354 return err; 344 355 } 345 356 346 - static struct crypto_alg aes_algs[] = { { 347 - .cra_name = "__ecb-aes-ce", 348 - .cra_driver_name = "__driver-ecb-aes-ce", 349 - .cra_priority = 0, 350 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 351 - CRYPTO_ALG_INTERNAL, 352 - .cra_blocksize = AES_BLOCK_SIZE, 353 - .cra_ctxsize = sizeof(struct crypto_aes_ctx), 354 - .cra_alignmask = 7, 355 - .cra_type = &crypto_blkcipher_type, 356 - .cra_module = THIS_MODULE, 357 - .cra_blkcipher = { 358 - .min_keysize = AES_MIN_KEY_SIZE, 359 - .max_keysize = AES_MAX_KEY_SIZE, 360 - .ivsize = 0, 361 - .setkey = ce_aes_setkey, 362 - .encrypt = ecb_encrypt, 363 - .decrypt = ecb_decrypt, 357 + static struct skcipher_alg aes_algs[] = { { 358 + .base = { 359 + .cra_name = "__ecb(aes)", 360 + .cra_driver_name = "__ecb-aes-ce", 361 + .cra_priority = 300, 362 + .cra_flags = CRYPTO_ALG_INTERNAL, 363 + .cra_blocksize = AES_BLOCK_SIZE, 364 + .cra_ctxsize = sizeof(struct crypto_aes_ctx), 365 + .cra_alignmask = 7, 366 + .cra_module = THIS_MODULE, 364 367 }, 368 + .min_keysize = AES_MIN_KEY_SIZE, 369 + .max_keysize = AES_MAX_KEY_SIZE, 370 + .setkey = ce_aes_setkey, 371 + .encrypt = ecb_encrypt, 372 + .decrypt = ecb_decrypt, 365 373 }, { 366 - .cra_name = "__cbc-aes-ce", 367 - .cra_driver_name = "__driver-cbc-aes-ce", 368 - .cra_priority = 0, 369 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 370 - CRYPTO_ALG_INTERNAL, 371 - .cra_blocksize = AES_BLOCK_SIZE, 372 - .cra_ctxsize = sizeof(struct crypto_aes_ctx), 373 - .cra_alignmask = 7, 374 - .cra_type = &crypto_blkcipher_type, 375 - .cra_module = THIS_MODULE, 376 - .cra_blkcipher = { 377 - .min_keysize = AES_MIN_KEY_SIZE, 378 - .max_keysize = AES_MAX_KEY_SIZE, 379 - .ivsize = AES_BLOCK_SIZE, 380 - .setkey = ce_aes_setkey, 381 - .encrypt = cbc_encrypt, 382 - .decrypt = cbc_decrypt, 374 + .base = { 375 + .cra_name = "__cbc(aes)", 376 + .cra_driver_name = "__cbc-aes-ce", 377 + .cra_priority = 300, 378 + .cra_flags = CRYPTO_ALG_INTERNAL, 379 + .cra_blocksize = AES_BLOCK_SIZE, 380 + .cra_ctxsize = sizeof(struct crypto_aes_ctx), 381 + .cra_alignmask = 7, 382 + .cra_module = THIS_MODULE, 383 383 }, 384 + .min_keysize = AES_MIN_KEY_SIZE, 385 + .max_keysize = AES_MAX_KEY_SIZE, 386 + .ivsize = AES_BLOCK_SIZE, 387 + .setkey = ce_aes_setkey, 388 + .encrypt = cbc_encrypt, 389 + .decrypt = cbc_decrypt, 384 390 }, { 385 - .cra_name = "__ctr-aes-ce", 386 - .cra_driver_name = "__driver-ctr-aes-ce", 387 - .cra_priority = 0, 388 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 389 - CRYPTO_ALG_INTERNAL, 390 - .cra_blocksize = 1, 391 - .cra_ctxsize = sizeof(struct crypto_aes_ctx), 392 - .cra_alignmask = 7, 393 - .cra_type = &crypto_blkcipher_type, 394 - .cra_module = THIS_MODULE, 395 - .cra_blkcipher = { 396 - .min_keysize = AES_MIN_KEY_SIZE, 397 - .max_keysize = AES_MAX_KEY_SIZE, 398 - .ivsize = AES_BLOCK_SIZE, 399 - .setkey = ce_aes_setkey, 400 - .encrypt = ctr_encrypt, 401 - .decrypt = ctr_encrypt, 391 + .base = { 392 + .cra_name = "__ctr(aes)", 393 + .cra_driver_name = "__ctr-aes-ce", 394 + .cra_priority = 300, 395 + .cra_flags = CRYPTO_ALG_INTERNAL, 396 + .cra_blocksize = 1, 397 + .cra_ctxsize = sizeof(struct crypto_aes_ctx), 398 + .cra_alignmask = 7, 399 + .cra_module = THIS_MODULE, 402 400 }, 401 + .min_keysize = AES_MIN_KEY_SIZE, 402 + .max_keysize = AES_MAX_KEY_SIZE, 403 + .ivsize = AES_BLOCK_SIZE, 404 + .chunksize = AES_BLOCK_SIZE, 405 + .setkey = ce_aes_setkey, 406 + .encrypt = ctr_encrypt, 407 + .decrypt = ctr_encrypt, 403 408 }, { 404 - .cra_name = "__xts-aes-ce", 405 - .cra_driver_name = "__driver-xts-aes-ce", 406 - .cra_priority = 0, 407 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 408 - CRYPTO_ALG_INTERNAL, 409 - .cra_blocksize = AES_BLOCK_SIZE, 410 - .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx), 411 - .cra_alignmask = 7, 412 - .cra_type = &crypto_blkcipher_type, 413 - .cra_module = THIS_MODULE, 414 - .cra_blkcipher = { 415 - .min_keysize = 2 * AES_MIN_KEY_SIZE, 416 - .max_keysize = 2 * AES_MAX_KEY_SIZE, 417 - .ivsize = AES_BLOCK_SIZE, 418 - .setkey = xts_set_key, 419 - .encrypt = xts_encrypt, 420 - .decrypt = xts_decrypt, 409 + .base = { 410 + .cra_name = "__xts(aes)", 411 + .cra_driver_name = "__xts-aes-ce", 412 + .cra_priority = 300, 413 + .cra_flags = CRYPTO_ALG_INTERNAL, 414 + .cra_blocksize = AES_BLOCK_SIZE, 415 + .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx), 416 + .cra_alignmask = 7, 417 + .cra_module = THIS_MODULE, 421 418 }, 422 - }, { 423 - .cra_name = "ecb(aes)", 424 - .cra_driver_name = "ecb-aes-ce", 425 - .cra_priority = 300, 426 - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, 427 - .cra_blocksize = AES_BLOCK_SIZE, 428 - .cra_ctxsize = sizeof(struct async_helper_ctx), 429 - .cra_alignmask = 7, 430 - .cra_type = &crypto_ablkcipher_type, 431 - .cra_module = THIS_MODULE, 432 - .cra_init = ablk_init, 433 - .cra_exit = ablk_exit, 434 - .cra_ablkcipher = { 435 - .min_keysize = AES_MIN_KEY_SIZE, 436 - .max_keysize = AES_MAX_KEY_SIZE, 437 - .ivsize = 0, 438 - .setkey = ablk_set_key, 439 - .encrypt = ablk_encrypt, 440 - .decrypt = ablk_decrypt, 441 - } 442 - }, { 443 - .cra_name = "cbc(aes)", 444 - .cra_driver_name = "cbc-aes-ce", 445 - .cra_priority = 300, 446 - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, 447 - .cra_blocksize = AES_BLOCK_SIZE, 448 - .cra_ctxsize = sizeof(struct async_helper_ctx), 449 - .cra_alignmask = 7, 450 - .cra_type = &crypto_ablkcipher_type, 451 - .cra_module = THIS_MODULE, 452 - .cra_init = ablk_init, 453 - .cra_exit = ablk_exit, 454 - .cra_ablkcipher = { 455 - .min_keysize = AES_MIN_KEY_SIZE, 456 - .max_keysize = AES_MAX_KEY_SIZE, 457 - .ivsize = AES_BLOCK_SIZE, 458 - .setkey = ablk_set_key, 459 - .encrypt = ablk_encrypt, 460 - .decrypt = ablk_decrypt, 461 - } 462 - }, { 463 - .cra_name = "ctr(aes)", 464 - .cra_driver_name = "ctr-aes-ce", 465 - .cra_priority = 300, 466 - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, 467 - .cra_blocksize = 1, 468 - .cra_ctxsize = sizeof(struct async_helper_ctx), 469 - .cra_alignmask = 7, 470 - .cra_type = &crypto_ablkcipher_type, 471 - .cra_module = THIS_MODULE, 472 - .cra_init = ablk_init, 473 - .cra_exit = ablk_exit, 474 - .cra_ablkcipher = { 475 - .min_keysize = AES_MIN_KEY_SIZE, 476 - .max_keysize = AES_MAX_KEY_SIZE, 477 - .ivsize = AES_BLOCK_SIZE, 478 - .setkey = ablk_set_key, 479 - .encrypt = ablk_encrypt, 480 - .decrypt = ablk_decrypt, 481 - } 482 - }, { 483 - .cra_name = "xts(aes)", 484 - .cra_driver_name = "xts-aes-ce", 485 - .cra_priority = 300, 486 - .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC, 487 - .cra_blocksize = AES_BLOCK_SIZE, 488 - .cra_ctxsize = sizeof(struct async_helper_ctx), 489 - .cra_alignmask = 7, 490 - .cra_type = &crypto_ablkcipher_type, 491 - .cra_module = THIS_MODULE, 492 - .cra_init = ablk_init, 493 - .cra_exit = ablk_exit, 494 - .cra_ablkcipher = { 495 - .min_keysize = 2 * AES_MIN_KEY_SIZE, 496 - .max_keysize = 2 * AES_MAX_KEY_SIZE, 497 - .ivsize = AES_BLOCK_SIZE, 498 - .setkey = ablk_set_key, 499 - .encrypt = ablk_encrypt, 500 - .decrypt = ablk_decrypt, 501 - } 419 + .min_keysize = 2 * AES_MIN_KEY_SIZE, 420 + .max_keysize = 2 * AES_MAX_KEY_SIZE, 421 + .ivsize = AES_BLOCK_SIZE, 422 + .setkey = xts_set_key, 423 + .encrypt = xts_encrypt, 424 + .decrypt = xts_decrypt, 502 425 } }; 426 + 427 + struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; 428 + 429 + static void aes_exit(void) 430 + { 431 + int i; 432 + 433 + for (i = 0; i < ARRAY_SIZE(aes_simd_algs) && aes_simd_algs[i]; i++) 434 + simd_skcipher_free(aes_simd_algs[i]); 435 + 436 + crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); 437 + } 503 438 504 439 static int __init aes_init(void) 505 440 { 441 + struct simd_skcipher_alg *simd; 442 + const char *basename; 443 + const char *algname; 444 + const char *drvname; 445 + int err; 446 + int i; 447 + 506 448 if (!(elf_hwcap2 & HWCAP2_AES)) 507 449 return -ENODEV; 508 - return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs)); 509 - } 510 450 511 - static void __exit aes_exit(void) 512 - { 513 - crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs)); 451 + err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); 452 + if (err) 453 + return err; 454 + 455 + for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { 456 + algname = aes_algs[i].base.cra_name + 2; 457 + drvname = aes_algs[i].base.cra_driver_name + 2; 458 + basename = aes_algs[i].base.cra_driver_name; 459 + simd = simd_skcipher_create_compat(algname, drvname, basename); 460 + err = PTR_ERR(simd); 461 + if (IS_ERR(simd)) 462 + goto unregister_simds; 463 + 464 + aes_simd_algs[i] = simd; 465 + } 466 + 467 + return 0; 468 + 469 + unregister_simds: 470 + aes_exit(); 471 + return err; 514 472 } 515 473 516 474 module_init(aes_init);