Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: s390 - add System z hardware support for CTR mode

This patch adds System z hardware acceleration support for AES, DES
and 3DES in CTR mode. The hardware support is available starting with
System z196.

Signed-off-by: Gerald Schaefer <gerald.schaefer@de.ibm.com>
Signed-off-by: Jan Glauber <jang@linux.vnet.ibm.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Gerald Schaefer and committed by
Herbert Xu
0200f3ec 9996e342

+383 -6
+144 -2
arch/s390/crypto/aes_s390.c
··· 31 31 #define AES_KEYLEN_192 2 32 32 #define AES_KEYLEN_256 4 33 33 34 - static char keylen_flag = 0; 34 + static u8 *ctrblk; 35 + static char keylen_flag; 35 36 36 37 struct s390_aes_ctx { 37 38 u8 iv[AES_BLOCK_SIZE]; ··· 725 724 } 726 725 }; 727 726 727 + static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 728 + unsigned int key_len) 729 + { 730 + struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 731 + 732 + switch (key_len) { 733 + case 16: 734 + sctx->enc = KMCTR_AES_128_ENCRYPT; 735 + sctx->dec = KMCTR_AES_128_DECRYPT; 736 + break; 737 + case 24: 738 + sctx->enc = KMCTR_AES_192_ENCRYPT; 739 + sctx->dec = KMCTR_AES_192_DECRYPT; 740 + break; 741 + case 32: 742 + sctx->enc = KMCTR_AES_256_ENCRYPT; 743 + sctx->dec = KMCTR_AES_256_DECRYPT; 744 + break; 745 + } 746 + 747 + return aes_set_key(tfm, in_key, key_len); 748 + } 749 + 750 + static int ctr_aes_crypt(struct blkcipher_desc *desc, long func, 751 + struct s390_aes_ctx *sctx, struct blkcipher_walk *walk) 752 + { 753 + int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE); 754 + unsigned int i, n, nbytes; 755 + u8 buf[AES_BLOCK_SIZE]; 756 + u8 *out, *in; 757 + 758 + if (!walk->nbytes) 759 + return ret; 760 + 761 + memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE); 762 + while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { 763 + out = walk->dst.virt.addr; 764 + in = walk->src.virt.addr; 765 + while (nbytes >= AES_BLOCK_SIZE) { 766 + /* only use complete blocks, max. PAGE_SIZE */ 767 + n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : 768 + nbytes & ~(AES_BLOCK_SIZE - 1); 769 + for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) { 770 + memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE, 771 + AES_BLOCK_SIZE); 772 + crypto_inc(ctrblk + i, AES_BLOCK_SIZE); 773 + } 774 + ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk); 775 + BUG_ON(ret < 0 || ret != n); 776 + if (n > AES_BLOCK_SIZE) 777 + memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE, 778 + AES_BLOCK_SIZE); 779 + crypto_inc(ctrblk, AES_BLOCK_SIZE); 780 + out += n; 781 + in += n; 782 + nbytes -= n; 783 + } 784 + ret = blkcipher_walk_done(desc, walk, nbytes); 785 + } 786 + /* 787 + * final block may be < AES_BLOCK_SIZE, copy only nbytes 788 + */ 789 + if (nbytes) { 790 + out = walk->dst.virt.addr; 791 + in = walk->src.virt.addr; 792 + ret = crypt_s390_kmctr(func, sctx->key, buf, in, 793 + AES_BLOCK_SIZE, ctrblk); 794 + BUG_ON(ret < 0 || ret != AES_BLOCK_SIZE); 795 + memcpy(out, buf, nbytes); 796 + crypto_inc(ctrblk, AES_BLOCK_SIZE); 797 + ret = blkcipher_walk_done(desc, walk, 0); 798 + } 799 + memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE); 800 + return ret; 801 + } 802 + 803 + static int ctr_aes_encrypt(struct blkcipher_desc *desc, 804 + struct scatterlist *dst, struct scatterlist *src, 805 + unsigned int nbytes) 806 + { 807 + struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 808 + struct blkcipher_walk walk; 809 + 810 + blkcipher_walk_init(&walk, dst, src, nbytes); 811 + return ctr_aes_crypt(desc, sctx->enc, sctx, &walk); 812 + } 813 + 814 + static int ctr_aes_decrypt(struct blkcipher_desc *desc, 815 + struct scatterlist *dst, struct scatterlist *src, 816 + unsigned int nbytes) 817 + { 818 + struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 819 + struct blkcipher_walk walk; 820 + 821 + blkcipher_walk_init(&walk, dst, src, nbytes); 822 + return ctr_aes_crypt(desc, sctx->dec, sctx, &walk); 823 + } 824 + 825 + static struct crypto_alg ctr_aes_alg = { 826 + .cra_name = "ctr(aes)", 827 + .cra_driver_name = "ctr-aes-s390", 828 + .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 829 + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 830 + .cra_blocksize = 1, 831 + .cra_ctxsize = sizeof(struct s390_aes_ctx), 832 + .cra_type = &crypto_blkcipher_type, 833 + .cra_module = THIS_MODULE, 834 + .cra_list = LIST_HEAD_INIT(ctr_aes_alg.cra_list), 835 + .cra_u = { 836 + .blkcipher = { 837 + .min_keysize = AES_MIN_KEY_SIZE, 838 + .max_keysize = AES_MAX_KEY_SIZE, 839 + .ivsize = AES_BLOCK_SIZE, 840 + .setkey = ctr_aes_set_key, 841 + .encrypt = ctr_aes_encrypt, 842 + .decrypt = ctr_aes_decrypt, 843 + } 844 + } 845 + }; 846 + 728 847 static int __init aes_s390_init(void) 729 848 { 730 - unsigned long long facility_bits[2]; 731 849 int ret; 732 850 733 851 if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA)) ··· 885 765 goto xts_aes_err; 886 766 } 887 767 768 + if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT, 769 + CRYPT_S390_MSA | CRYPT_S390_MSA4) && 770 + crypt_s390_func_available(KMCTR_AES_192_ENCRYPT, 771 + CRYPT_S390_MSA | CRYPT_S390_MSA4) && 772 + crypt_s390_func_available(KMCTR_AES_256_ENCRYPT, 773 + CRYPT_S390_MSA | CRYPT_S390_MSA4)) { 774 + ctrblk = (u8 *) __get_free_page(GFP_KERNEL); 775 + if (!ctrblk) { 776 + ret = -ENOMEM; 777 + goto ctr_aes_err; 778 + } 779 + ret = crypto_register_alg(&ctr_aes_alg); 780 + if (ret) { 781 + free_page((unsigned long) ctrblk); 782 + goto ctr_aes_err; 783 + } 784 + } 785 + 888 786 out: 889 787 return ret; 890 788 789 + ctr_aes_err: 790 + crypto_unregister_alg(&xts_aes_alg); 891 791 xts_aes_err: 892 792 crypto_unregister_alg(&cbc_aes_alg); 893 793 cbc_aes_err: ··· 920 780 921 781 static void __exit aes_s390_fini(void) 922 782 { 783 + crypto_unregister_alg(&ctr_aes_alg); 784 + free_page((unsigned long) ctrblk); 923 785 crypto_unregister_alg(&xts_aes_alg); 924 786 crypto_unregister_alg(&cbc_aes_alg); 925 787 crypto_unregister_alg(&ecb_aes_alg);
+65 -1
arch/s390/crypto/crypt_s390.h
··· 34 34 CRYPT_S390_KMC = 0x0200, 35 35 CRYPT_S390_KIMD = 0x0300, 36 36 CRYPT_S390_KLMD = 0x0400, 37 - CRYPT_S390_KMAC = 0x0500 37 + CRYPT_S390_KMAC = 0x0500, 38 + CRYPT_S390_KMCTR = 0x0600 38 39 }; 39 40 40 41 /* ··· 81 80 KMC_AES_256_ENCRYPT = CRYPT_S390_KMC | 0x14, 82 81 KMC_AES_256_DECRYPT = CRYPT_S390_KMC | 0x14 | 0x80, 83 82 KMC_PRNG = CRYPT_S390_KMC | 0x43, 83 + }; 84 + 85 + /* 86 + * function codes for KMCTR (CIPHER MESSAGE WITH COUNTER) 87 + * instruction 88 + */ 89 + enum crypt_s390_kmctr_func { 90 + KMCTR_QUERY = CRYPT_S390_KMCTR | 0x0, 91 + KMCTR_DEA_ENCRYPT = CRYPT_S390_KMCTR | 0x1, 92 + KMCTR_DEA_DECRYPT = CRYPT_S390_KMCTR | 0x1 | 0x80, 93 + KMCTR_TDEA_128_ENCRYPT = CRYPT_S390_KMCTR | 0x2, 94 + KMCTR_TDEA_128_DECRYPT = CRYPT_S390_KMCTR | 0x2 | 0x80, 95 + KMCTR_TDEA_192_ENCRYPT = CRYPT_S390_KMCTR | 0x3, 96 + KMCTR_TDEA_192_DECRYPT = CRYPT_S390_KMCTR | 0x3 | 0x80, 97 + KMCTR_AES_128_ENCRYPT = CRYPT_S390_KMCTR | 0x12, 98 + KMCTR_AES_128_DECRYPT = CRYPT_S390_KMCTR | 0x12 | 0x80, 99 + KMCTR_AES_192_ENCRYPT = CRYPT_S390_KMCTR | 0x13, 100 + KMCTR_AES_192_DECRYPT = CRYPT_S390_KMCTR | 0x13 | 0x80, 101 + KMCTR_AES_256_ENCRYPT = CRYPT_S390_KMCTR | 0x14, 102 + KMCTR_AES_256_DECRYPT = CRYPT_S390_KMCTR | 0x14 | 0x80, 84 103 }; 85 104 86 105 /* ··· 314 293 } 315 294 316 295 /** 296 + * crypt_s390_kmctr: 297 + * @func: the function code passed to KMCTR; see crypt_s390_kmctr_func 298 + * @param: address of parameter block; see POP for details on each func 299 + * @dest: address of destination memory area 300 + * @src: address of source memory area 301 + * @src_len: length of src operand in bytes 302 + * @counter: address of counter value 303 + * 304 + * Executes the KMCTR (CIPHER MESSAGE WITH COUNTER) operation of the CPU. 305 + * 306 + * Returns -1 for failure, 0 for the query func, number of processed 307 + * bytes for encryption/decryption funcs 308 + */ 309 + static inline int crypt_s390_kmctr(long func, void *param, u8 *dest, 310 + const u8 *src, long src_len, u8 *counter) 311 + { 312 + register long __func asm("0") = func & CRYPT_S390_FUNC_MASK; 313 + register void *__param asm("1") = param; 314 + register const u8 *__src asm("2") = src; 315 + register long __src_len asm("3") = src_len; 316 + register u8 *__dest asm("4") = dest; 317 + register u8 *__ctr asm("6") = counter; 318 + int ret = -1; 319 + 320 + asm volatile( 321 + "0: .insn rrf,0xb92d0000,%3,%1,%4,0 \n" /* KMCTR opcode */ 322 + "1: brc 1,0b \n" /* handle partial completion */ 323 + " la %0,0\n" 324 + "2:\n" 325 + EX_TABLE(0b,2b) EX_TABLE(1b,2b) 326 + : "+d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest), 327 + "+a" (__ctr) 328 + : "d" (__func), "a" (__param) : "cc", "memory"); 329 + if (ret < 0) 330 + return ret; 331 + return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len; 332 + } 333 + 334 + /** 317 335 * crypt_s390_func_available: 318 336 * @func: the function code of the specific function; 0 if op in general 319 337 * ··· 388 328 break; 389 329 case CRYPT_S390_KMAC: 390 330 ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0); 331 + break; 332 + case CRYPT_S390_KMCTR: 333 + ret = crypt_s390_kmctr(KMCTR_QUERY, &status, NULL, NULL, 0, 334 + NULL); 391 335 break; 392 336 default: 393 337 return 0;
+168 -1
arch/s390/crypto/des_s390.c
··· 3 3 * 4 4 * s390 implementation of the DES Cipher Algorithm. 5 5 * 6 - * Copyright IBM Corp. 2003,2007 6 + * Copyright IBM Corp. 2003,2011 7 7 * Author(s): Thomas Spatzier 8 8 * Jan Glauber (jan.glauber@de.ibm.com) 9 9 * ··· 23 23 #include "crypt_s390.h" 24 24 25 25 #define DES3_KEY_SIZE (3 * DES_KEY_SIZE) 26 + 27 + static u8 *ctrblk; 26 28 27 29 struct s390_des_ctx { 28 30 u8 iv[DES_BLOCK_SIZE]; ··· 372 370 } 373 371 }; 374 372 373 + static int ctr_desall_crypt(struct blkcipher_desc *desc, long func, 374 + struct s390_des_ctx *ctx, struct blkcipher_walk *walk) 375 + { 376 + int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE); 377 + unsigned int i, n, nbytes; 378 + u8 buf[DES_BLOCK_SIZE]; 379 + u8 *out, *in; 380 + 381 + memcpy(ctrblk, walk->iv, DES_BLOCK_SIZE); 382 + while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { 383 + out = walk->dst.virt.addr; 384 + in = walk->src.virt.addr; 385 + while (nbytes >= DES_BLOCK_SIZE) { 386 + /* align to block size, max. PAGE_SIZE */ 387 + n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : 388 + nbytes & ~(DES_BLOCK_SIZE - 1); 389 + for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) { 390 + memcpy(ctrblk + i, ctrblk + i - DES_BLOCK_SIZE, 391 + DES_BLOCK_SIZE); 392 + crypto_inc(ctrblk + i, DES_BLOCK_SIZE); 393 + } 394 + ret = crypt_s390_kmctr(func, ctx->key, out, in, n, ctrblk); 395 + BUG_ON((ret < 0) || (ret != n)); 396 + if (n > DES_BLOCK_SIZE) 397 + memcpy(ctrblk, ctrblk + n - DES_BLOCK_SIZE, 398 + DES_BLOCK_SIZE); 399 + crypto_inc(ctrblk, DES_BLOCK_SIZE); 400 + out += n; 401 + in += n; 402 + nbytes -= n; 403 + } 404 + ret = blkcipher_walk_done(desc, walk, nbytes); 405 + } 406 + 407 + /* final block may be < DES_BLOCK_SIZE, copy only nbytes */ 408 + if (nbytes) { 409 + out = walk->dst.virt.addr; 410 + in = walk->src.virt.addr; 411 + ret = crypt_s390_kmctr(func, ctx->key, buf, in, 412 + DES_BLOCK_SIZE, ctrblk); 413 + BUG_ON(ret < 0 || ret != DES_BLOCK_SIZE); 414 + memcpy(out, buf, nbytes); 415 + crypto_inc(ctrblk, DES_BLOCK_SIZE); 416 + ret = blkcipher_walk_done(desc, walk, 0); 417 + } 418 + memcpy(walk->iv, ctrblk, DES_BLOCK_SIZE); 419 + return ret; 420 + } 421 + 422 + static int ctr_des_encrypt(struct blkcipher_desc *desc, 423 + struct scatterlist *dst, struct scatterlist *src, 424 + unsigned int nbytes) 425 + { 426 + struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 427 + struct blkcipher_walk walk; 428 + 429 + blkcipher_walk_init(&walk, dst, src, nbytes); 430 + return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk); 431 + } 432 + 433 + static int ctr_des_decrypt(struct blkcipher_desc *desc, 434 + struct scatterlist *dst, struct scatterlist *src, 435 + unsigned int nbytes) 436 + { 437 + struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 438 + struct blkcipher_walk walk; 439 + 440 + blkcipher_walk_init(&walk, dst, src, nbytes); 441 + return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk); 442 + } 443 + 444 + static struct crypto_alg ctr_des_alg = { 445 + .cra_name = "ctr(des)", 446 + .cra_driver_name = "ctr-des-s390", 447 + .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 448 + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 449 + .cra_blocksize = 1, 450 + .cra_ctxsize = sizeof(struct s390_des_ctx), 451 + .cra_type = &crypto_blkcipher_type, 452 + .cra_module = THIS_MODULE, 453 + .cra_list = LIST_HEAD_INIT(ctr_des_alg.cra_list), 454 + .cra_u = { 455 + .blkcipher = { 456 + .min_keysize = DES_KEY_SIZE, 457 + .max_keysize = DES_KEY_SIZE, 458 + .ivsize = DES_BLOCK_SIZE, 459 + .setkey = des_setkey, 460 + .encrypt = ctr_des_encrypt, 461 + .decrypt = ctr_des_decrypt, 462 + } 463 + } 464 + }; 465 + 466 + static int ctr_des3_encrypt(struct blkcipher_desc *desc, 467 + struct scatterlist *dst, struct scatterlist *src, 468 + unsigned int nbytes) 469 + { 470 + struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 471 + struct blkcipher_walk walk; 472 + 473 + blkcipher_walk_init(&walk, dst, src, nbytes); 474 + return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk); 475 + } 476 + 477 + static int ctr_des3_decrypt(struct blkcipher_desc *desc, 478 + struct scatterlist *dst, struct scatterlist *src, 479 + unsigned int nbytes) 480 + { 481 + struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 482 + struct blkcipher_walk walk; 483 + 484 + blkcipher_walk_init(&walk, dst, src, nbytes); 485 + return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk); 486 + } 487 + 488 + static struct crypto_alg ctr_des3_alg = { 489 + .cra_name = "ctr(des3_ede)", 490 + .cra_driver_name = "ctr-des3_ede-s390", 491 + .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 492 + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 493 + .cra_blocksize = 1, 494 + .cra_ctxsize = sizeof(struct s390_des_ctx), 495 + .cra_type = &crypto_blkcipher_type, 496 + .cra_module = THIS_MODULE, 497 + .cra_list = LIST_HEAD_INIT(ctr_des3_alg.cra_list), 498 + .cra_u = { 499 + .blkcipher = { 500 + .min_keysize = DES3_KEY_SIZE, 501 + .max_keysize = DES3_KEY_SIZE, 502 + .ivsize = DES_BLOCK_SIZE, 503 + .setkey = des3_setkey, 504 + .encrypt = ctr_des3_encrypt, 505 + .decrypt = ctr_des3_decrypt, 506 + } 507 + } 508 + }; 509 + 375 510 static int __init des_s390_init(void) 376 511 { 377 512 int ret; ··· 535 396 ret = crypto_register_alg(&cbc_des3_alg); 536 397 if (ret) 537 398 goto cbc_des3_err; 399 + 400 + if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT, 401 + CRYPT_S390_MSA | CRYPT_S390_MSA4) && 402 + crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT, 403 + CRYPT_S390_MSA | CRYPT_S390_MSA4)) { 404 + ret = crypto_register_alg(&ctr_des_alg); 405 + if (ret) 406 + goto ctr_des_err; 407 + ret = crypto_register_alg(&ctr_des3_alg); 408 + if (ret) 409 + goto ctr_des3_err; 410 + ctrblk = (u8 *) __get_free_page(GFP_KERNEL); 411 + if (!ctrblk) { 412 + ret = -ENOMEM; 413 + goto ctr_mem_err; 414 + } 415 + } 538 416 out: 539 417 return ret; 540 418 419 + ctr_mem_err: 420 + crypto_unregister_alg(&ctr_des3_alg); 421 + ctr_des3_err: 422 + crypto_unregister_alg(&ctr_des_alg); 423 + ctr_des_err: 424 + crypto_unregister_alg(&cbc_des3_alg); 541 425 cbc_des3_err: 542 426 crypto_unregister_alg(&ecb_des3_alg); 543 427 ecb_des3_err: ··· 577 415 578 416 static void __exit des_s390_exit(void) 579 417 { 418 + if (ctrblk) { 419 + crypto_unregister_alg(&ctr_des_alg); 420 + crypto_unregister_alg(&ctr_des3_alg); 421 + free_page((unsigned long) ctrblk); 422 + } 580 423 crypto_unregister_alg(&cbc_des3_alg); 581 424 crypto_unregister_alg(&ecb_des3_alg); 582 425 crypto_unregister_alg(&des3_alg);
+6 -2
drivers/crypto/Kconfig
··· 119 119 select CRYPTO_ALGAPI 120 120 select CRYPTO_BLKCIPHER 121 121 help 122 - This us the s390 hardware accelerated implementation of the 122 + This is the s390 hardware accelerated implementation of the 123 123 DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). 124 + 125 + As of z990 the ECB and CBC mode are hardware accelerated. 126 + As of z196 the CTR mode is hardware accelerated. 124 127 125 128 config CRYPTO_AES_S390 126 129 tristate "AES cipher algorithms" ··· 138 135 for 128 bit keys. 139 136 As of z10 the ECB and CBC modes are hardware accelerated 140 137 for all AES key sizes. 141 - As of z196 the XTS mode is hardware accelerated for 256 and 138 + As of z196 the CTR mode is hardware accelerated for all AES 139 + key sizes and XTS mode is hardware accelerated for 256 and 142 140 512 bit keys. 143 141 144 142 config S390_PRNG