Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: stm32 - add new algorithms support

Add the all SHA-2 (up to 512) and SHA-3 algorithm support.
Update compatible table to add stm32mp13.

Signed-off-by: Thomas Bourgoin <thomas.bourgoin@foss.st.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Thomas Bourgoin and committed by
Herbert Xu
b6248fb8 0d517943

+548 -130
+2
drivers/crypto/stm32/Kconfig
··· 16 16 select CRYPTO_MD5 17 17 select CRYPTO_SHA1 18 18 select CRYPTO_SHA256 19 + select CRYPTO_SHA512 20 + select CRYPTO_SHA3 19 21 select CRYPTO_ENGINE 20 22 help 21 23 This enables support for the HASH hw accelerator which can be found
+546 -130
drivers/crypto/stm32/stm32-hash.c
··· 26 26 #include <crypto/scatterwalk.h> 27 27 #include <crypto/sha1.h> 28 28 #include <crypto/sha2.h> 29 + #include <crypto/sha3.h> 29 30 #include <crypto/internal/hash.h> 30 31 31 32 #define HASH_CR 0x00 ··· 46 45 #define HASH_CR_DMAE BIT(3) 47 46 #define HASH_CR_DATATYPE_POS 4 48 47 #define HASH_CR_MODE BIT(6) 48 + #define HASH_CR_ALGO_POS 7 49 49 #define HASH_CR_MDMAT BIT(13) 50 50 #define HASH_CR_DMAA BIT(14) 51 51 #define HASH_CR_LKEY BIT(16) 52 - 53 - #define HASH_CR_ALGO_SHA1 0x0 54 - #define HASH_CR_ALGO_MD5 0x80 55 - #define HASH_CR_ALGO_SHA224 0x40000 56 - #define HASH_CR_ALGO_SHA256 0x40080 57 - 58 - #define HASH_CR_UX500_EMPTYMSG BIT(20) 59 - #define HASH_CR_UX500_ALGO_SHA1 BIT(7) 60 - #define HASH_CR_UX500_ALGO_SHA256 0x0 61 52 62 53 /* Interrupt */ 63 54 #define HASH_DINIE BIT(0) ··· 58 65 /* Interrupt Mask */ 59 66 #define HASH_MASK_CALC_COMPLETION BIT(0) 60 67 #define HASH_MASK_DATA_INPUT BIT(1) 61 - 62 - /* Context swap register */ 63 - #define HASH_CSR_REGISTER_NUMBER 54 64 68 65 69 /* Status Flags */ 66 70 #define HASH_SR_DATA_INPUT_READY BIT(0) ··· 69 79 #define HASH_STR_NBLW_MASK GENMASK(4, 0) 70 80 #define HASH_STR_DCAL BIT(8) 71 81 82 + /* HWCFGR Register */ 83 + #define HASH_HWCFG_DMA_MASK GENMASK(3, 0) 84 + 85 + /* Context swap register */ 86 + #define HASH_CSR_NB_SHA256_HMAC 54 87 + #define HASH_CSR_NB_SHA256 38 88 + #define HASH_CSR_NB_SHA512_HMAC 103 89 + #define HASH_CSR_NB_SHA512 91 90 + #define HASH_CSR_NB_SHA3_HMAC 88 91 + #define HASH_CSR_NB_SHA3 72 92 + #define HASH_CSR_NB_MAX HASH_CSR_NB_SHA512_HMAC 93 + 72 94 #define HASH_FLAGS_INIT BIT(0) 73 95 #define HASH_FLAGS_OUTPUT_READY BIT(1) 74 96 #define HASH_FLAGS_CPU BIT(2) ··· 89 87 #define HASH_FLAGS_HMAC_INIT BIT(5) 90 88 #define HASH_FLAGS_HMAC_FINAL BIT(6) 91 89 #define HASH_FLAGS_HMAC_KEY BIT(7) 92 - 90 + #define HASH_FLAGS_SHA3_MODE BIT(8) 93 91 #define HASH_FLAGS_FINAL BIT(15) 94 92 #define HASH_FLAGS_FINUP BIT(16) 95 - #define HASH_FLAGS_ALGO_MASK GENMASK(21, 18) 96 - #define HASH_FLAGS_MD5 BIT(18) 97 - #define HASH_FLAGS_SHA1 BIT(19) 98 - #define HASH_FLAGS_SHA224 BIT(20) 99 - #define HASH_FLAGS_SHA256 BIT(21) 93 + #define HASH_FLAGS_ALGO_MASK GENMASK(20, 17) 94 + #define HASH_FLAGS_ALGO_SHIFT 17 95 + #define HASH_FLAGS_ERRORS BIT(21) 100 96 #define HASH_FLAGS_EMPTY BIT(22) 101 97 #define HASH_FLAGS_HMAC BIT(23) 102 98 103 99 #define HASH_OP_UPDATE 1 104 100 #define HASH_OP_FINAL 2 101 + 102 + #define HASH_BURST_LEVEL 4 105 103 106 104 enum stm32_hash_data_format { 107 105 HASH_DATA_32_BITS = 0x0, ··· 110 108 HASH_DATA_1_BIT = 0x3 111 109 }; 112 110 113 - #define HASH_BUFLEN 256 114 - #define HASH_LONG_KEY 64 115 - #define HASH_MAX_KEY_SIZE (SHA256_BLOCK_SIZE * 8) 116 - #define HASH_QUEUE_LENGTH 16 117 - #define HASH_DMA_THRESHOLD 50 111 + #define HASH_BUFLEN (SHA3_224_BLOCK_SIZE + 4) 112 + #define HASH_MAX_KEY_SIZE (SHA512_BLOCK_SIZE * 8) 113 + 114 + enum stm32_hash_algo { 115 + HASH_SHA1 = 0, 116 + HASH_MD5 = 1, 117 + HASH_SHA224 = 2, 118 + HASH_SHA256 = 3, 119 + HASH_SHA3_224 = 4, 120 + HASH_SHA3_256 = 5, 121 + HASH_SHA3_384 = 6, 122 + HASH_SHA3_512 = 7, 123 + HASH_SHA384 = 12, 124 + HASH_SHA512 = 15, 125 + }; 126 + 127 + enum ux500_hash_algo { 128 + HASH_SHA256_UX500 = 0, 129 + HASH_SHA1_UX500 = 1, 130 + }; 118 131 119 132 #define HASH_AUTOSUSPEND_DELAY 50 120 133 ··· 147 130 u32 flags; 148 131 149 132 u16 bufcnt; 150 - u16 buflen; 133 + u16 blocklen; 151 134 152 135 u8 buffer[HASH_BUFLEN] __aligned(4); 153 136 154 137 /* hash state */ 155 - u32 hw_context[3 + HASH_CSR_REGISTER_NUMBER]; 138 + u32 hw_context[3 + HASH_CSR_NB_MAX]; 156 139 }; 157 140 158 141 struct stm32_hash_request_ctx { 159 142 struct stm32_hash_dev *hdev; 160 143 unsigned long op; 161 144 162 - u8 digest[SHA256_DIGEST_SIZE] __aligned(sizeof(u32)); 145 + u8 digest[SHA512_DIGEST_SIZE] __aligned(sizeof(u32)); 163 146 size_t digcnt; 164 147 165 148 /* DMA */ ··· 183 166 }; 184 167 185 168 struct stm32_hash_pdata { 186 - struct stm32_hash_algs_info *algs_info; 187 - size_t algs_info_size; 188 - bool has_sr; 189 - bool has_mdmat; 190 - bool broken_emptymsg; 191 - bool ux500; 169 + const int alg_shift; 170 + const struct stm32_hash_algs_info *algs_info; 171 + size_t algs_info_size; 172 + bool has_sr; 173 + bool has_mdmat; 174 + bool broken_emptymsg; 175 + bool ux500; 192 176 }; 193 177 194 178 struct stm32_hash_dev { ··· 200 182 void __iomem *io_base; 201 183 phys_addr_t phys_base; 202 184 u32 dma_mode; 203 - u32 dma_maxburst; 204 185 bool polled; 205 186 206 187 struct ahash_request *req; ··· 292 275 struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req); 293 276 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm); 294 277 struct stm32_hash_state *state = &rctx->state; 278 + u32 alg = (state->flags & HASH_FLAGS_ALGO_MASK) >> HASH_FLAGS_ALGO_SHIFT; 295 279 296 280 u32 reg = HASH_CR_INIT; 297 281 298 282 if (!(hdev->flags & HASH_FLAGS_INIT)) { 299 - switch (state->flags & HASH_FLAGS_ALGO_MASK) { 300 - case HASH_FLAGS_MD5: 301 - reg |= HASH_CR_ALGO_MD5; 302 - break; 303 - case HASH_FLAGS_SHA1: 304 - if (hdev->pdata->ux500) 305 - reg |= HASH_CR_UX500_ALGO_SHA1; 283 + if (hdev->pdata->ux500) { 284 + reg |= ((alg & BIT(0)) << HASH_CR_ALGO_POS); 285 + } else { 286 + if (hdev->pdata->alg_shift == HASH_CR_ALGO_POS) 287 + reg |= ((alg & BIT(1)) << 17) | 288 + ((alg & BIT(0)) << HASH_CR_ALGO_POS); 306 289 else 307 - reg |= HASH_CR_ALGO_SHA1; 308 - break; 309 - case HASH_FLAGS_SHA224: 310 - reg |= HASH_CR_ALGO_SHA224; 311 - break; 312 - case HASH_FLAGS_SHA256: 313 - if (hdev->pdata->ux500) 314 - reg |= HASH_CR_UX500_ALGO_SHA256; 315 - else 316 - reg |= HASH_CR_ALGO_SHA256; 317 - break; 318 - default: 319 - reg |= HASH_CR_ALGO_MD5; 290 + reg |= alg << hdev->pdata->alg_shift; 320 291 } 321 292 322 293 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS); ··· 312 307 if (state->flags & HASH_FLAGS_HMAC) { 313 308 hdev->flags |= HASH_FLAGS_HMAC; 314 309 reg |= HASH_CR_MODE; 315 - if (ctx->keylen > HASH_LONG_KEY) 310 + if (ctx->keylen > crypto_ahash_blocksize(tfm)) 316 311 reg |= HASH_CR_LKEY; 317 312 } 318 313 ··· 323 318 324 319 hdev->flags |= HASH_FLAGS_INIT; 325 320 321 + /* 322 + * After first block + 1 words are fill up, 323 + * we only need to fill 1 block to start partial computation 324 + */ 325 + rctx->state.blocklen -= sizeof(u32); 326 + 326 327 dev_dbg(hdev->dev, "Write Control %x\n", reg); 327 328 } 328 329 } ··· 338 327 struct stm32_hash_state *state = &rctx->state; 339 328 size_t count; 340 329 341 - while ((state->bufcnt < state->buflen) && rctx->total) { 330 + while ((state->bufcnt < state->blocklen) && rctx->total) { 342 331 count = min(rctx->sg->length - rctx->offset, rctx->total); 343 - count = min_t(size_t, count, state->buflen - state->bufcnt); 332 + count = min_t(size_t, count, state->blocklen - state->bufcnt); 344 333 345 334 if (count <= 0) { 346 335 if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) { ··· 430 419 return 0; 431 420 } 432 421 422 + static int hash_swap_reg(struct stm32_hash_request_ctx *rctx) 423 + { 424 + struct stm32_hash_state *state = &rctx->state; 425 + 426 + switch ((state->flags & HASH_FLAGS_ALGO_MASK) >> 427 + HASH_FLAGS_ALGO_SHIFT) { 428 + case HASH_MD5: 429 + case HASH_SHA1: 430 + case HASH_SHA224: 431 + case HASH_SHA256: 432 + if (state->flags & HASH_FLAGS_HMAC) 433 + return HASH_CSR_NB_SHA256_HMAC; 434 + else 435 + return HASH_CSR_NB_SHA256; 436 + break; 437 + 438 + case HASH_SHA384: 439 + case HASH_SHA512: 440 + if (state->flags & HASH_FLAGS_HMAC) 441 + return HASH_CSR_NB_SHA512_HMAC; 442 + else 443 + return HASH_CSR_NB_SHA512; 444 + break; 445 + 446 + case HASH_SHA3_224: 447 + case HASH_SHA3_256: 448 + case HASH_SHA3_384: 449 + case HASH_SHA3_512: 450 + if (state->flags & HASH_FLAGS_HMAC) 451 + return HASH_CSR_NB_SHA3_HMAC; 452 + else 453 + return HASH_CSR_NB_SHA3; 454 + break; 455 + 456 + default: 457 + return -EINVAL; 458 + } 459 + } 460 + 433 461 static int stm32_hash_update_cpu(struct stm32_hash_dev *hdev) 434 462 { 435 463 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); 436 464 struct stm32_hash_state *state = &rctx->state; 437 465 u32 *preg = state->hw_context; 438 466 int bufcnt, err = 0, final; 439 - int i; 467 + int i, swap_reg; 440 468 441 469 dev_dbg(hdev->dev, "%s flags %x\n", __func__, state->flags); 442 470 443 471 final = state->flags & HASH_FLAGS_FINAL; 444 472 445 - while ((rctx->total >= state->buflen) || 446 - (state->bufcnt + rctx->total >= state->buflen)) { 473 + while ((rctx->total >= state->blocklen) || 474 + (state->bufcnt + rctx->total >= state->blocklen)) { 447 475 stm32_hash_append_sg(rctx); 448 476 bufcnt = state->bufcnt; 449 477 state->bufcnt = 0; ··· 505 455 if (stm32_hash_wait_busy(hdev)) 506 456 return -ETIMEDOUT; 507 457 458 + swap_reg = hash_swap_reg(rctx); 459 + 508 460 if (!hdev->pdata->ux500) 509 461 *preg++ = stm32_hash_read(hdev, HASH_IMR); 510 462 *preg++ = stm32_hash_read(hdev, HASH_STR); 511 463 *preg++ = stm32_hash_read(hdev, HASH_CR); 512 - for (i = 0; i < HASH_CSR_REGISTER_NUMBER; i++) 464 + for (i = 0; i < swap_reg; i++) 513 465 *preg++ = stm32_hash_read(hdev, HASH_CSR(i)); 514 466 515 467 state->flags |= HASH_FLAGS_INIT; ··· 596 544 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm); 597 545 int err; 598 546 599 - if (ctx->keylen < HASH_DMA_THRESHOLD || (hdev->dma_mode == 1)) { 547 + if (ctx->keylen < rctx->state.blocklen || hdev->dma_mode == 1) { 600 548 err = stm32_hash_write_key(hdev); 601 549 if (stm32_hash_wait_busy(hdev)) 602 550 return -ETIMEDOUT; ··· 631 579 dma_conf.direction = DMA_MEM_TO_DEV; 632 580 dma_conf.dst_addr = hdev->phys_base + HASH_DIN; 633 581 dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 634 - dma_conf.src_maxburst = hdev->dma_maxburst; 635 - dma_conf.dst_maxburst = hdev->dma_maxburst; 582 + dma_conf.src_maxburst = HASH_BURST_LEVEL; 583 + dma_conf.dst_maxburst = HASH_BURST_LEVEL; 636 584 dma_conf.device_fc = false; 637 585 638 586 chan = dma_request_chan(hdev->dev, "in"); ··· 666 614 rctx->total = hdev->req->nbytes; 667 615 668 616 rctx->nents = sg_nents(rctx->sg); 669 - 670 617 if (rctx->nents < 0) 671 618 return -EINVAL; 672 619 ··· 769 718 static bool stm32_hash_dma_aligned_data(struct ahash_request *req) 770 719 { 771 720 struct scatterlist *sg; 721 + struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); 772 722 struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 773 723 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx); 774 724 int i; 775 725 776 - if (req->nbytes <= HASH_DMA_THRESHOLD) 726 + if (!hdev->dma_lch || req->nbytes <= rctx->state.blocklen) 777 727 return false; 778 728 779 729 if (sg_nents(req->src) > 1) { ··· 800 748 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); 801 749 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx); 802 750 struct stm32_hash_state *state = &rctx->state; 751 + bool sha3_mode = ctx->flags & HASH_FLAGS_SHA3_MODE; 803 752 804 753 rctx->hdev = hdev; 805 754 806 755 state->flags = HASH_FLAGS_CPU; 807 756 757 + if (sha3_mode) 758 + state->flags |= HASH_FLAGS_SHA3_MODE; 759 + 808 760 rctx->digcnt = crypto_ahash_digestsize(tfm); 809 761 switch (rctx->digcnt) { 810 762 case MD5_DIGEST_SIZE: 811 - state->flags |= HASH_FLAGS_MD5; 763 + state->flags |= HASH_MD5 << HASH_FLAGS_ALGO_SHIFT; 812 764 break; 813 765 case SHA1_DIGEST_SIZE: 814 - state->flags |= HASH_FLAGS_SHA1; 766 + if (hdev->pdata->ux500) 767 + state->flags |= HASH_SHA1_UX500 << HASH_FLAGS_ALGO_SHIFT; 768 + else 769 + state->flags |= HASH_SHA1 << HASH_FLAGS_ALGO_SHIFT; 815 770 break; 816 771 case SHA224_DIGEST_SIZE: 817 - state->flags |= HASH_FLAGS_SHA224; 772 + if (sha3_mode) 773 + state->flags |= HASH_SHA3_224 << HASH_FLAGS_ALGO_SHIFT; 774 + else 775 + state->flags |= HASH_SHA224 << HASH_FLAGS_ALGO_SHIFT; 818 776 break; 819 777 case SHA256_DIGEST_SIZE: 820 - state->flags |= HASH_FLAGS_SHA256; 778 + if (sha3_mode) { 779 + state->flags |= HASH_SHA3_256 << HASH_FLAGS_ALGO_SHIFT; 780 + } else { 781 + if (hdev->pdata->ux500) 782 + state->flags |= HASH_SHA256_UX500 << HASH_FLAGS_ALGO_SHIFT; 783 + else 784 + state->flags |= HASH_SHA256 << HASH_FLAGS_ALGO_SHIFT; 785 + } 786 + break; 787 + case SHA384_DIGEST_SIZE: 788 + if (sha3_mode) 789 + state->flags |= HASH_SHA3_384 << HASH_FLAGS_ALGO_SHIFT; 790 + else 791 + state->flags |= HASH_SHA384 << HASH_FLAGS_ALGO_SHIFT; 792 + break; 793 + case SHA512_DIGEST_SIZE: 794 + if (sha3_mode) 795 + state->flags |= HASH_SHA3_512 << HASH_FLAGS_ALGO_SHIFT; 796 + else 797 + state->flags |= HASH_SHA512 << HASH_FLAGS_ALGO_SHIFT; 821 798 break; 822 799 default: 823 800 return -EINVAL; 824 801 } 825 802 826 803 rctx->state.bufcnt = 0; 827 - rctx->state.buflen = HASH_BUFLEN; 804 + rctx->state.blocklen = crypto_ahash_blocksize(tfm) + sizeof(u32); 805 + if (rctx->state.blocklen > HASH_BUFLEN) { 806 + dev_err(hdev->dev, "Error, block too large"); 807 + return -EINVAL; 808 + } 828 809 rctx->total = 0; 829 810 rctx->offset = 0; 830 811 rctx->data_type = HASH_DATA_8_BITS; ··· 927 842 928 843 static void stm32_hash_copy_hash(struct ahash_request *req) 929 844 { 845 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 930 846 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); 931 847 struct stm32_hash_state *state = &rctx->state; 932 848 struct stm32_hash_dev *hdev = rctx->hdev; ··· 937 851 if (hdev->pdata->broken_emptymsg && (state->flags & HASH_FLAGS_EMPTY)) 938 852 return stm32_hash_emptymsg_fallback(req); 939 853 940 - switch (state->flags & HASH_FLAGS_ALGO_MASK) { 941 - case HASH_FLAGS_MD5: 942 - hashsize = MD5_DIGEST_SIZE; 943 - break; 944 - case HASH_FLAGS_SHA1: 945 - hashsize = SHA1_DIGEST_SIZE; 946 - break; 947 - case HASH_FLAGS_SHA224: 948 - hashsize = SHA224_DIGEST_SIZE; 949 - break; 950 - case HASH_FLAGS_SHA256: 951 - hashsize = SHA256_DIGEST_SIZE; 952 - break; 953 - default: 954 - return; 955 - } 854 + hashsize = crypto_ahash_digestsize(tfm); 956 855 957 856 for (i = 0; i < hashsize / sizeof(u32); i++) { 958 857 if (hdev->pdata->ux500) ··· 952 881 static int stm32_hash_finish(struct ahash_request *req) 953 882 { 954 883 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); 884 + u32 reg; 885 + 886 + reg = stm32_hash_read(rctx->hdev, HASH_SR); 887 + reg &= ~HASH_SR_OUTPUT_READY; 888 + stm32_hash_write(rctx->hdev, HASH_SR, reg); 955 889 956 890 if (!req->result) 957 891 return -EINVAL; ··· 996 920 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); 997 921 struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx); 998 922 struct stm32_hash_state *state = &rctx->state; 923 + int swap_reg; 999 924 int err = 0; 1000 925 1001 926 if (!hdev) ··· 1009 932 1010 933 hdev->req = req; 1011 934 hdev->flags = 0; 935 + swap_reg = hash_swap_reg(rctx); 1012 936 1013 937 if (state->flags & HASH_FLAGS_INIT) { 1014 938 u32 *preg = rctx->state.hw_context; ··· 1023 945 reg = *preg++ | HASH_CR_INIT; 1024 946 stm32_hash_write(hdev, HASH_CR, reg); 1025 947 1026 - for (i = 0; i < HASH_CSR_REGISTER_NUMBER; i++) 948 + for (i = 0; i < swap_reg; i++) 1027 949 stm32_hash_write(hdev, HASH_CSR(i), *preg++); 1028 950 1029 951 hdev->flags |= HASH_FLAGS_INIT; ··· 1078 1000 rctx->sg = req->src; 1079 1001 rctx->offset = 0; 1080 1002 1081 - if ((state->bufcnt + rctx->total < state->buflen)) { 1003 + if ((state->bufcnt + rctx->total < state->blocklen)) { 1082 1004 stm32_hash_append_sg(rctx); 1083 1005 return 0; 1084 1006 } ··· 1180 1102 return 0; 1181 1103 } 1182 1104 1183 - static int stm32_hash_cra_init_algs(struct crypto_tfm *tfm, 1184 - const char *algs_hmac_name) 1105 + static int stm32_hash_cra_init_algs(struct crypto_tfm *tfm, u32 algs_flags) 1185 1106 { 1186 1107 struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm); 1187 1108 ··· 1189 1112 1190 1113 ctx->keylen = 0; 1191 1114 1192 - if (algs_hmac_name) 1193 - ctx->flags |= HASH_FLAGS_HMAC; 1115 + if (algs_flags) 1116 + ctx->flags |= algs_flags; 1194 1117 1195 1118 ctx->enginectx.op.do_one_request = stm32_hash_one_request; 1196 1119 ··· 1199 1122 1200 1123 static int stm32_hash_cra_init(struct crypto_tfm *tfm) 1201 1124 { 1202 - return stm32_hash_cra_init_algs(tfm, NULL); 1125 + return stm32_hash_cra_init_algs(tfm, 0); 1203 1126 } 1204 1127 1205 - static int stm32_hash_cra_md5_init(struct crypto_tfm *tfm) 1128 + static int stm32_hash_cra_hmac_init(struct crypto_tfm *tfm) 1206 1129 { 1207 - return stm32_hash_cra_init_algs(tfm, "md5"); 1130 + return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_HMAC); 1208 1131 } 1209 1132 1210 - static int stm32_hash_cra_sha1_init(struct crypto_tfm *tfm) 1133 + static int stm32_hash_cra_sha3_init(struct crypto_tfm *tfm) 1211 1134 { 1212 - return stm32_hash_cra_init_algs(tfm, "sha1"); 1135 + return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE); 1213 1136 } 1214 1137 1215 - static int stm32_hash_cra_sha224_init(struct crypto_tfm *tfm) 1138 + static int stm32_hash_cra_sha3_hmac_init(struct crypto_tfm *tfm) 1216 1139 { 1217 - return stm32_hash_cra_init_algs(tfm, "sha224"); 1140 + return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE | 1141 + HASH_FLAGS_HMAC); 1218 1142 } 1219 1143 1220 - static int stm32_hash_cra_sha256_init(struct crypto_tfm *tfm) 1221 - { 1222 - return stm32_hash_cra_init_algs(tfm, "sha256"); 1223 - } 1224 1144 1225 1145 static void stm32_hash_cra_exit(struct crypto_tfm *tfm) 1226 1146 { ··· 1259 1185 1260 1186 reg = stm32_hash_read(hdev, HASH_SR); 1261 1187 if (reg & HASH_SR_OUTPUT_READY) { 1262 - reg &= ~HASH_SR_OUTPUT_READY; 1263 - stm32_hash_write(hdev, HASH_SR, reg); 1264 1188 hdev->flags |= HASH_FLAGS_OUTPUT_READY; 1265 1189 /* Disable IT*/ 1266 1190 stm32_hash_write(hdev, HASH_IMR, 0); ··· 1316 1244 .cra_blocksize = MD5_HMAC_BLOCK_SIZE, 1317 1245 .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1318 1246 .cra_alignmask = 3, 1319 - .cra_init = stm32_hash_cra_md5_init, 1247 + .cra_init = stm32_hash_cra_hmac_init, 1320 1248 .cra_exit = stm32_hash_cra_exit, 1321 1249 .cra_module = THIS_MODULE, 1322 1250 } 1323 1251 } 1324 - }, 1252 + } 1325 1253 }; 1326 1254 1327 1255 static struct ahash_alg algs_sha1[] = { ··· 1372 1300 .cra_blocksize = SHA1_BLOCK_SIZE, 1373 1301 .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1374 1302 .cra_alignmask = 3, 1375 - .cra_init = stm32_hash_cra_sha1_init, 1303 + .cra_init = stm32_hash_cra_hmac_init, 1376 1304 .cra_exit = stm32_hash_cra_exit, 1377 1305 .cra_module = THIS_MODULE, 1378 1306 } ··· 1428 1356 .cra_blocksize = SHA224_BLOCK_SIZE, 1429 1357 .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1430 1358 .cra_alignmask = 3, 1431 - .cra_init = stm32_hash_cra_sha224_init, 1359 + .cra_init = stm32_hash_cra_hmac_init, 1432 1360 .cra_exit = stm32_hash_cra_exit, 1433 1361 .cra_module = THIS_MODULE, 1434 1362 } ··· 1484 1412 .cra_blocksize = SHA256_BLOCK_SIZE, 1485 1413 .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1486 1414 .cra_alignmask = 3, 1487 - .cra_init = stm32_hash_cra_sha256_init, 1415 + .cra_init = stm32_hash_cra_hmac_init, 1488 1416 .cra_exit = stm32_hash_cra_exit, 1489 1417 .cra_module = THIS_MODULE, 1490 1418 } 1491 1419 } 1492 1420 }, 1421 + }; 1422 + 1423 + static struct ahash_alg algs_sha384_sha512[] = { 1424 + { 1425 + .init = stm32_hash_init, 1426 + .update = stm32_hash_update, 1427 + .final = stm32_hash_final, 1428 + .finup = stm32_hash_finup, 1429 + .digest = stm32_hash_digest, 1430 + .export = stm32_hash_export, 1431 + .import = stm32_hash_import, 1432 + .halg = { 1433 + .digestsize = SHA384_DIGEST_SIZE, 1434 + .statesize = sizeof(struct stm32_hash_state), 1435 + .base = { 1436 + .cra_name = "sha384", 1437 + .cra_driver_name = "stm32-sha384", 1438 + .cra_priority = 200, 1439 + .cra_flags = CRYPTO_ALG_ASYNC | 1440 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1441 + .cra_blocksize = SHA384_BLOCK_SIZE, 1442 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1443 + .cra_alignmask = 3, 1444 + .cra_init = stm32_hash_cra_init, 1445 + .cra_exit = stm32_hash_cra_exit, 1446 + .cra_module = THIS_MODULE, 1447 + } 1448 + } 1449 + }, 1450 + { 1451 + .init = stm32_hash_init, 1452 + .update = stm32_hash_update, 1453 + .final = stm32_hash_final, 1454 + .finup = stm32_hash_finup, 1455 + .digest = stm32_hash_digest, 1456 + .setkey = stm32_hash_setkey, 1457 + .export = stm32_hash_export, 1458 + .import = stm32_hash_import, 1459 + .halg = { 1460 + .digestsize = SHA384_DIGEST_SIZE, 1461 + .statesize = sizeof(struct stm32_hash_state), 1462 + .base = { 1463 + .cra_name = "hmac(sha384)", 1464 + .cra_driver_name = "stm32-hmac-sha384", 1465 + .cra_priority = 200, 1466 + .cra_flags = CRYPTO_ALG_ASYNC | 1467 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1468 + .cra_blocksize = SHA384_BLOCK_SIZE, 1469 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1470 + .cra_alignmask = 3, 1471 + .cra_init = stm32_hash_cra_hmac_init, 1472 + .cra_exit = stm32_hash_cra_exit, 1473 + .cra_module = THIS_MODULE, 1474 + } 1475 + } 1476 + }, 1477 + { 1478 + .init = stm32_hash_init, 1479 + .update = stm32_hash_update, 1480 + .final = stm32_hash_final, 1481 + .finup = stm32_hash_finup, 1482 + .digest = stm32_hash_digest, 1483 + .export = stm32_hash_export, 1484 + .import = stm32_hash_import, 1485 + .halg = { 1486 + .digestsize = SHA512_DIGEST_SIZE, 1487 + .statesize = sizeof(struct stm32_hash_state), 1488 + .base = { 1489 + .cra_name = "sha512", 1490 + .cra_driver_name = "stm32-sha512", 1491 + .cra_priority = 200, 1492 + .cra_flags = CRYPTO_ALG_ASYNC | 1493 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1494 + .cra_blocksize = SHA512_BLOCK_SIZE, 1495 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1496 + .cra_alignmask = 3, 1497 + .cra_init = stm32_hash_cra_init, 1498 + .cra_exit = stm32_hash_cra_exit, 1499 + .cra_module = THIS_MODULE, 1500 + } 1501 + } 1502 + }, 1503 + { 1504 + .init = stm32_hash_init, 1505 + .update = stm32_hash_update, 1506 + .final = stm32_hash_final, 1507 + .finup = stm32_hash_finup, 1508 + .digest = stm32_hash_digest, 1509 + .export = stm32_hash_export, 1510 + .import = stm32_hash_import, 1511 + .setkey = stm32_hash_setkey, 1512 + .halg = { 1513 + .digestsize = SHA512_DIGEST_SIZE, 1514 + .statesize = sizeof(struct stm32_hash_state), 1515 + .base = { 1516 + .cra_name = "hmac(sha512)", 1517 + .cra_driver_name = "stm32-hmac-sha512", 1518 + .cra_priority = 200, 1519 + .cra_flags = CRYPTO_ALG_ASYNC | 1520 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1521 + .cra_blocksize = SHA512_BLOCK_SIZE, 1522 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1523 + .cra_alignmask = 3, 1524 + .cra_init = stm32_hash_cra_hmac_init, 1525 + .cra_exit = stm32_hash_cra_exit, 1526 + .cra_module = THIS_MODULE, 1527 + } 1528 + } 1529 + }, 1530 + }; 1531 + 1532 + static struct ahash_alg algs_sha3[] = { 1533 + { 1534 + .init = stm32_hash_init, 1535 + .update = stm32_hash_update, 1536 + .final = stm32_hash_final, 1537 + .finup = stm32_hash_finup, 1538 + .digest = stm32_hash_digest, 1539 + .export = stm32_hash_export, 1540 + .import = stm32_hash_import, 1541 + .halg = { 1542 + .digestsize = SHA3_224_DIGEST_SIZE, 1543 + .statesize = sizeof(struct stm32_hash_state), 1544 + .base = { 1545 + .cra_name = "sha3-224", 1546 + .cra_driver_name = "stm32-sha3-224", 1547 + .cra_priority = 200, 1548 + .cra_flags = CRYPTO_ALG_ASYNC | 1549 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1550 + .cra_blocksize = SHA3_224_BLOCK_SIZE, 1551 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1552 + .cra_alignmask = 3, 1553 + .cra_init = stm32_hash_cra_sha3_init, 1554 + .cra_exit = stm32_hash_cra_exit, 1555 + .cra_module = THIS_MODULE, 1556 + } 1557 + } 1558 + }, 1559 + { 1560 + .init = stm32_hash_init, 1561 + .update = stm32_hash_update, 1562 + .final = stm32_hash_final, 1563 + .finup = stm32_hash_finup, 1564 + .digest = stm32_hash_digest, 1565 + .export = stm32_hash_export, 1566 + .import = stm32_hash_import, 1567 + .setkey = stm32_hash_setkey, 1568 + .halg = { 1569 + .digestsize = SHA3_224_DIGEST_SIZE, 1570 + .statesize = sizeof(struct stm32_hash_state), 1571 + .base = { 1572 + .cra_name = "hmac(sha3-224)", 1573 + .cra_driver_name = "stm32-hmac-sha3-224", 1574 + .cra_priority = 200, 1575 + .cra_flags = CRYPTO_ALG_ASYNC | 1576 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1577 + .cra_blocksize = SHA3_224_BLOCK_SIZE, 1578 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1579 + .cra_alignmask = 3, 1580 + .cra_init = stm32_hash_cra_sha3_hmac_init, 1581 + .cra_exit = stm32_hash_cra_exit, 1582 + .cra_module = THIS_MODULE, 1583 + } 1584 + } 1585 + }, 1586 + { 1587 + .init = stm32_hash_init, 1588 + .update = stm32_hash_update, 1589 + .final = stm32_hash_final, 1590 + .finup = stm32_hash_finup, 1591 + .digest = stm32_hash_digest, 1592 + .export = stm32_hash_export, 1593 + .import = stm32_hash_import, 1594 + .halg = { 1595 + .digestsize = SHA3_256_DIGEST_SIZE, 1596 + .statesize = sizeof(struct stm32_hash_state), 1597 + .base = { 1598 + .cra_name = "sha3-256", 1599 + .cra_driver_name = "stm32-sha3-256", 1600 + .cra_priority = 200, 1601 + .cra_flags = CRYPTO_ALG_ASYNC | 1602 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1603 + .cra_blocksize = SHA3_256_BLOCK_SIZE, 1604 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1605 + .cra_alignmask = 3, 1606 + .cra_init = stm32_hash_cra_sha3_init, 1607 + .cra_exit = stm32_hash_cra_exit, 1608 + .cra_module = THIS_MODULE, 1609 + } 1610 + } 1611 + }, 1612 + { 1613 + .init = stm32_hash_init, 1614 + .update = stm32_hash_update, 1615 + .final = stm32_hash_final, 1616 + .finup = stm32_hash_finup, 1617 + .digest = stm32_hash_digest, 1618 + .export = stm32_hash_export, 1619 + .import = stm32_hash_import, 1620 + .setkey = stm32_hash_setkey, 1621 + .halg = { 1622 + .digestsize = SHA3_256_DIGEST_SIZE, 1623 + .statesize = sizeof(struct stm32_hash_state), 1624 + .base = { 1625 + .cra_name = "hmac(sha3-256)", 1626 + .cra_driver_name = "stm32-hmac-sha3-256", 1627 + .cra_priority = 200, 1628 + .cra_flags = CRYPTO_ALG_ASYNC | 1629 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1630 + .cra_blocksize = SHA3_256_BLOCK_SIZE, 1631 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1632 + .cra_alignmask = 3, 1633 + .cra_init = stm32_hash_cra_sha3_hmac_init, 1634 + .cra_exit = stm32_hash_cra_exit, 1635 + .cra_module = THIS_MODULE, 1636 + } 1637 + } 1638 + }, 1639 + { 1640 + .init = stm32_hash_init, 1641 + .update = stm32_hash_update, 1642 + .final = stm32_hash_final, 1643 + .finup = stm32_hash_finup, 1644 + .digest = stm32_hash_digest, 1645 + .export = stm32_hash_export, 1646 + .import = stm32_hash_import, 1647 + .halg = { 1648 + .digestsize = SHA3_384_DIGEST_SIZE, 1649 + .statesize = sizeof(struct stm32_hash_state), 1650 + .base = { 1651 + .cra_name = "sha3-384", 1652 + .cra_driver_name = "stm32-sha3-384", 1653 + .cra_priority = 200, 1654 + .cra_flags = CRYPTO_ALG_ASYNC | 1655 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1656 + .cra_blocksize = SHA3_384_BLOCK_SIZE, 1657 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1658 + .cra_alignmask = 3, 1659 + .cra_init = stm32_hash_cra_sha3_init, 1660 + .cra_exit = stm32_hash_cra_exit, 1661 + .cra_module = THIS_MODULE, 1662 + } 1663 + } 1664 + }, 1665 + { 1666 + .init = stm32_hash_init, 1667 + .update = stm32_hash_update, 1668 + .final = stm32_hash_final, 1669 + .finup = stm32_hash_finup, 1670 + .digest = stm32_hash_digest, 1671 + .export = stm32_hash_export, 1672 + .import = stm32_hash_import, 1673 + .setkey = stm32_hash_setkey, 1674 + .halg = { 1675 + .digestsize = SHA3_384_DIGEST_SIZE, 1676 + .statesize = sizeof(struct stm32_hash_state), 1677 + .base = { 1678 + .cra_name = "hmac(sha3-384)", 1679 + .cra_driver_name = "stm32-hmac-sha3-384", 1680 + .cra_priority = 200, 1681 + .cra_flags = CRYPTO_ALG_ASYNC | 1682 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1683 + .cra_blocksize = SHA3_384_BLOCK_SIZE, 1684 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1685 + .cra_alignmask = 3, 1686 + .cra_init = stm32_hash_cra_sha3_hmac_init, 1687 + .cra_exit = stm32_hash_cra_exit, 1688 + .cra_module = THIS_MODULE, 1689 + } 1690 + } 1691 + }, 1692 + { 1693 + .init = stm32_hash_init, 1694 + .update = stm32_hash_update, 1695 + .final = stm32_hash_final, 1696 + .finup = stm32_hash_finup, 1697 + .digest = stm32_hash_digest, 1698 + .export = stm32_hash_export, 1699 + .import = stm32_hash_import, 1700 + .halg = { 1701 + .digestsize = SHA3_512_DIGEST_SIZE, 1702 + .statesize = sizeof(struct stm32_hash_state), 1703 + .base = { 1704 + .cra_name = "sha3-512", 1705 + .cra_driver_name = "stm32-sha3-512", 1706 + .cra_priority = 200, 1707 + .cra_flags = CRYPTO_ALG_ASYNC | 1708 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1709 + .cra_blocksize = SHA3_512_BLOCK_SIZE, 1710 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1711 + .cra_alignmask = 3, 1712 + .cra_init = stm32_hash_cra_sha3_init, 1713 + .cra_exit = stm32_hash_cra_exit, 1714 + .cra_module = THIS_MODULE, 1715 + } 1716 + } 1717 + }, 1718 + { 1719 + .init = stm32_hash_init, 1720 + .update = stm32_hash_update, 1721 + .final = stm32_hash_final, 1722 + .finup = stm32_hash_finup, 1723 + .digest = stm32_hash_digest, 1724 + .export = stm32_hash_export, 1725 + .import = stm32_hash_import, 1726 + .setkey = stm32_hash_setkey, 1727 + .halg = { 1728 + .digestsize = SHA3_512_DIGEST_SIZE, 1729 + .statesize = sizeof(struct stm32_hash_state), 1730 + .base = { 1731 + .cra_name = "hmac(sha3-512)", 1732 + .cra_driver_name = "stm32-hmac-sha3-512", 1733 + .cra_priority = 200, 1734 + .cra_flags = CRYPTO_ALG_ASYNC | 1735 + CRYPTO_ALG_KERN_DRIVER_ONLY, 1736 + .cra_blocksize = SHA3_512_BLOCK_SIZE, 1737 + .cra_ctxsize = sizeof(struct stm32_hash_ctx), 1738 + .cra_alignmask = 3, 1739 + .cra_init = stm32_hash_cra_sha3_hmac_init, 1740 + .cra_exit = stm32_hash_cra_exit, 1741 + .cra_module = THIS_MODULE, 1742 + } 1743 + } 1744 + } 1493 1745 }; 1494 1746 1495 1747 static int stm32_hash_register_algs(struct stm32_hash_dev *hdev) ··· 1867 1471 }; 1868 1472 1869 1473 static const struct stm32_hash_pdata stm32_hash_pdata_ux500 = { 1474 + .alg_shift = 7, 1870 1475 .algs_info = stm32_hash_algs_info_ux500, 1871 1476 .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_ux500), 1872 1477 .broken_emptymsg = true, ··· 1886 1489 }; 1887 1490 1888 1491 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f4 = { 1492 + .alg_shift = 7, 1889 1493 .algs_info = stm32_hash_algs_info_stm32f4, 1890 1494 .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f4), 1891 1495 .has_sr = true, ··· 1913 1515 }; 1914 1516 1915 1517 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f7 = { 1518 + .alg_shift = 7, 1916 1519 .algs_info = stm32_hash_algs_info_stm32f7, 1917 1520 .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32f7), 1918 1521 .has_sr = true, 1919 1522 .has_mdmat = true, 1920 1523 }; 1921 1524 1525 + static struct stm32_hash_algs_info stm32_hash_algs_info_stm32mp13[] = { 1526 + { 1527 + .algs_list = algs_sha1, 1528 + .size = ARRAY_SIZE(algs_sha1), 1529 + }, 1530 + { 1531 + .algs_list = algs_sha224, 1532 + .size = ARRAY_SIZE(algs_sha224), 1533 + }, 1534 + { 1535 + .algs_list = algs_sha256, 1536 + .size = ARRAY_SIZE(algs_sha256), 1537 + }, 1538 + { 1539 + .algs_list = algs_sha384_sha512, 1540 + .size = ARRAY_SIZE(algs_sha384_sha512), 1541 + }, 1542 + { 1543 + .algs_list = algs_sha3, 1544 + .size = ARRAY_SIZE(algs_sha3), 1545 + }, 1546 + }; 1547 + 1548 + static const struct stm32_hash_pdata stm32_hash_pdata_stm32mp13 = { 1549 + .alg_shift = 17, 1550 + .algs_info = stm32_hash_algs_info_stm32mp13, 1551 + .algs_info_size = ARRAY_SIZE(stm32_hash_algs_info_stm32mp13), 1552 + .has_sr = true, 1553 + .has_mdmat = true, 1554 + }; 1555 + 1922 1556 static const struct of_device_id stm32_hash_of_match[] = { 1923 - { 1924 - .compatible = "stericsson,ux500-hash", 1925 - .data = &stm32_hash_pdata_ux500, 1926 - }, 1927 - { 1928 - .compatible = "st,stm32f456-hash", 1929 - .data = &stm32_hash_pdata_stm32f4, 1930 - }, 1931 - { 1932 - .compatible = "st,stm32f756-hash", 1933 - .data = &stm32_hash_pdata_stm32f7, 1934 - }, 1557 + { .compatible = "stericsson,ux500-hash", .data = &stm32_hash_pdata_ux500 }, 1558 + { .compatible = "st,stm32f456-hash", .data = &stm32_hash_pdata_stm32f4 }, 1559 + { .compatible = "st,stm32f756-hash", .data = &stm32_hash_pdata_stm32f7 }, 1560 + { .compatible = "st,stm32mp13-hash", .data = &stm32_hash_pdata_stm32mp13 }, 1935 1561 {}, 1936 1562 }; 1937 1563 ··· 1968 1546 if (!hdev->pdata) { 1969 1547 dev_err(dev, "no compatible OF match\n"); 1970 1548 return -EINVAL; 1971 - } 1972 - 1973 - if (of_property_read_u32(dev->of_node, "dma-maxburst", 1974 - &hdev->dma_maxburst)) { 1975 - dev_info(dev, "dma-maxburst not specified, using 0\n"); 1976 - hdev->dma_maxburst = 0; 1977 1549 } 1978 1550 1979 1551 return 0; ··· 2079 1663 /* FIXME: implement DMA mode for Ux500 */ 2080 1664 hdev->dma_mode = 0; 2081 1665 else 2082 - hdev->dma_mode = stm32_hash_read(hdev, HASH_HWCFGR); 1666 + hdev->dma_mode = stm32_hash_read(hdev, HASH_HWCFGR) & HASH_HWCFG_DMA_MASK; 2083 1667 2084 1668 /* Register algos */ 2085 1669 ret = stm32_hash_register_algs(hdev); ··· 2188 1772 2189 1773 module_platform_driver(stm32_hash_driver); 2190 1774 2191 - MODULE_DESCRIPTION("STM32 SHA1/224/256 & MD5 (HMAC) hw accelerator driver"); 1775 + MODULE_DESCRIPTION("STM32 SHA1/SHA2/SHA3 & MD5 (HMAC) hw accelerator driver"); 2192 1776 MODULE_AUTHOR("Lionel Debieve <lionel.debieve@st.com>"); 2193 1777 MODULE_LICENSE("GPL v2");