Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: starfive - Use dma for aes requests

Convert AES module to use dma for data transfers to reduce cpu load and
compatible with future variants.

Signed-off-by: Jia Jie Ho <jiajie.ho@starfivetech.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Jia Jie Ho and committed by
Herbert Xu
7467147e a05c821e

+398 -241
+4
drivers/crypto/starfive/Kconfig
··· 14 14 select CRYPTO_RSA 15 15 select CRYPTO_AES 16 16 select CRYPTO_CCM 17 + select CRYPTO_GCM 18 + select CRYPTO_ECB 19 + select CRYPTO_CBC 20 + select CRYPTO_CTR 17 21 help 18 22 Support for StarFive JH7110 crypto hardware acceleration engine. 19 23 This module provides acceleration for public key algo,
+393 -202
drivers/crypto/starfive/jh7110-aes.c
··· 78 78 return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM; 79 79 } 80 80 81 - static inline int is_encrypt(struct starfive_cryp_dev *cryp) 81 + static inline bool is_encrypt(struct starfive_cryp_dev *cryp) 82 82 { 83 83 return cryp->flags & FLG_ENCRYPT; 84 84 } ··· 101 101 writel(value, cryp->base + STARFIVE_AES_CSR); 102 102 break; 103 103 } 104 - } 105 - 106 - static inline void starfive_aes_set_ivlen(struct starfive_cryp_ctx *ctx) 107 - { 108 - struct starfive_cryp_dev *cryp = ctx->cryp; 109 - 110 - if (is_gcm(cryp)) 111 - writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN); 112 - else 113 - writel(AES_BLOCK_SIZE, cryp->base + STARFIVE_AES_IVLEN); 114 104 } 115 105 116 106 static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx) ··· 251 261 252 262 rctx->csr.aes.mode = hw_mode; 253 263 rctx->csr.aes.cmode = !is_encrypt(cryp); 254 - rctx->csr.aes.ie = 1; 255 264 rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1; 256 265 257 266 if (cryp->side_chan) { ··· 268 279 case STARFIVE_AES_MODE_GCM: 269 280 starfive_aes_set_alen(ctx); 270 281 starfive_aes_set_mlen(ctx); 271 - starfive_aes_set_ivlen(ctx); 282 + writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN); 272 283 starfive_aes_aead_hw_start(ctx, hw_mode); 273 284 starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv); 274 285 break; ··· 289 300 return cryp->err; 290 301 } 291 302 292 - static int starfive_aes_read_authtag(struct starfive_cryp_dev *cryp) 303 + static int starfive_aes_read_authtag(struct starfive_cryp_ctx *ctx) 293 304 { 294 - int i, start_addr; 305 + struct starfive_cryp_dev *cryp = ctx->cryp; 306 + struct starfive_cryp_request_ctx *rctx = ctx->rctx; 307 + int i; 295 308 296 309 if (starfive_aes_wait_busy(cryp)) 297 310 return dev_err_probe(cryp->dev, -ETIMEDOUT, 298 311 "Timeout waiting for tag generation."); 299 312 300 - start_addr = STARFIVE_AES_NONCE0; 301 - 302 - if (is_gcm(cryp)) 303 - for (i = 0; i < AES_BLOCK_32; i++, start_addr += 4) 304 - cryp->tag_out[i] = readl(cryp->base + start_addr); 305 - else 313 + if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM) { 314 + cryp->tag_out[0] = readl(cryp->base + STARFIVE_AES_NONCE0); 315 + cryp->tag_out[1] = readl(cryp->base + STARFIVE_AES_NONCE1); 316 + cryp->tag_out[2] = readl(cryp->base + STARFIVE_AES_NONCE2); 317 + cryp->tag_out[3] = readl(cryp->base + STARFIVE_AES_NONCE3); 318 + } else { 306 319 for (i = 0; i < AES_BLOCK_32; i++) 307 320 cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R); 321 + } 308 322 309 323 if (is_encrypt(cryp)) { 310 - scatterwalk_copychunks(cryp->tag_out, &cryp->out_walk, cryp->authsize, 1); 324 + scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg, 325 + cryp->total_in, cryp->authsize, 1); 311 326 } else { 312 - scatterwalk_copychunks(cryp->tag_in, &cryp->in_walk, cryp->authsize, 0); 313 - 314 327 if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize)) 315 328 return dev_err_probe(cryp->dev, -EBADMSG, "Failed tag verification\n"); 316 329 } ··· 320 329 return 0; 321 330 } 322 331 323 - static void starfive_aes_finish_req(struct starfive_cryp_dev *cryp) 332 + static void starfive_aes_finish_req(struct starfive_cryp_ctx *ctx) 324 333 { 325 - union starfive_aes_csr csr; 334 + struct starfive_cryp_dev *cryp = ctx->cryp; 326 335 int err = cryp->err; 327 336 328 337 if (!err && cryp->authsize) 329 - err = starfive_aes_read_authtag(cryp); 338 + err = starfive_aes_read_authtag(ctx); 330 339 331 340 if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC || 332 341 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR)) 333 342 starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv); 334 - 335 - /* reset irq flags*/ 336 - csr.v = 0; 337 - csr.aesrst = 1; 338 - writel(csr.v, cryp->base + STARFIVE_AES_CSR); 339 343 340 344 if (cryp->authsize) 341 345 crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err); 342 346 else 343 347 crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq, 344 348 err); 345 - } 346 - 347 - void starfive_aes_done_task(unsigned long param) 348 - { 349 - struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)param; 350 - u32 block[AES_BLOCK_32]; 351 - u32 stat; 352 - int i; 353 - 354 - for (i = 0; i < AES_BLOCK_32; i++) 355 - block[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R); 356 - 357 - scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, AES_BLOCK_SIZE, 358 - cryp->total_out), 1); 359 - 360 - cryp->total_out -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_out); 361 - 362 - if (!cryp->total_out) { 363 - starfive_aes_finish_req(cryp); 364 - return; 365 - } 366 - 367 - memset(block, 0, AES_BLOCK_SIZE); 368 - scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE, 369 - cryp->total_in), 0); 370 - cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in); 371 - 372 - for (i = 0; i < AES_BLOCK_32; i++) 373 - writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R); 374 - 375 - stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET); 376 - stat &= ~STARFIVE_IE_MASK_AES_DONE; 377 - writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET); 378 349 } 379 350 380 351 static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx) ··· 404 451 return 0; 405 452 } 406 453 407 - static int starfive_aes_prepare_req(struct skcipher_request *req, 408 - struct aead_request *areq) 454 + static void starfive_aes_dma_done(void *param) 409 455 { 410 - struct starfive_cryp_ctx *ctx; 411 - struct starfive_cryp_request_ctx *rctx; 412 - struct starfive_cryp_dev *cryp; 456 + struct starfive_cryp_dev *cryp = param; 413 457 414 - if (!req && !areq) 415 - return -EINVAL; 458 + complete(&cryp->dma_done); 459 + } 416 460 417 - ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) : 418 - crypto_aead_ctx(crypto_aead_reqtfm(areq)); 461 + static void starfive_aes_dma_init(struct starfive_cryp_dev *cryp) 462 + { 463 + cryp->cfg_in.direction = DMA_MEM_TO_DEV; 464 + cryp->cfg_in.src_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES; 465 + cryp->cfg_in.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 466 + cryp->cfg_in.src_maxburst = cryp->dma_maxburst; 467 + cryp->cfg_in.dst_maxburst = cryp->dma_maxburst; 468 + cryp->cfg_in.dst_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET; 419 469 420 - cryp = ctx->cryp; 421 - rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq); 470 + dmaengine_slave_config(cryp->tx, &cryp->cfg_in); 422 471 423 - if (req) { 424 - cryp->req.sreq = req; 425 - cryp->total_in = req->cryptlen; 426 - cryp->total_out = req->cryptlen; 427 - cryp->assoclen = 0; 428 - cryp->authsize = 0; 472 + cryp->cfg_out.direction = DMA_DEV_TO_MEM; 473 + cryp->cfg_out.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 474 + cryp->cfg_out.dst_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES; 475 + cryp->cfg_out.src_maxburst = 4; 476 + cryp->cfg_out.dst_maxburst = 4; 477 + cryp->cfg_out.src_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET; 478 + 479 + dmaengine_slave_config(cryp->rx, &cryp->cfg_out); 480 + 481 + init_completion(&cryp->dma_done); 482 + } 483 + 484 + static int starfive_aes_dma_xfer(struct starfive_cryp_dev *cryp, 485 + struct scatterlist *src, 486 + struct scatterlist *dst, 487 + int len) 488 + { 489 + struct dma_async_tx_descriptor *in_desc, *out_desc; 490 + union starfive_alg_cr alg_cr; 491 + int ret = 0, in_save, out_save; 492 + 493 + alg_cr.v = 0; 494 + alg_cr.start = 1; 495 + alg_cr.aes_dma_en = 1; 496 + writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET); 497 + 498 + in_save = sg_dma_len(src); 499 + out_save = sg_dma_len(dst); 500 + 501 + writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_IN_LEN_OFFSET); 502 + writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_OUT_LEN_OFFSET); 503 + 504 + sg_dma_len(src) = ALIGN(len, AES_BLOCK_SIZE); 505 + sg_dma_len(dst) = ALIGN(len, AES_BLOCK_SIZE); 506 + 507 + out_desc = dmaengine_prep_slave_sg(cryp->rx, dst, 1, DMA_DEV_TO_MEM, 508 + DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 509 + if (!out_desc) { 510 + ret = -EINVAL; 511 + goto dma_err; 512 + } 513 + 514 + out_desc->callback = starfive_aes_dma_done; 515 + out_desc->callback_param = cryp; 516 + 517 + reinit_completion(&cryp->dma_done); 518 + dmaengine_submit(out_desc); 519 + dma_async_issue_pending(cryp->rx); 520 + 521 + in_desc = dmaengine_prep_slave_sg(cryp->tx, src, 1, DMA_MEM_TO_DEV, 522 + DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 523 + if (!in_desc) { 524 + ret = -EINVAL; 525 + goto dma_err; 526 + } 527 + 528 + dmaengine_submit(in_desc); 529 + dma_async_issue_pending(cryp->tx); 530 + 531 + if (!wait_for_completion_timeout(&cryp->dma_done, 532 + msecs_to_jiffies(1000))) 533 + ret = -ETIMEDOUT; 534 + 535 + dma_err: 536 + sg_dma_len(src) = in_save; 537 + sg_dma_len(dst) = out_save; 538 + 539 + alg_cr.v = 0; 540 + alg_cr.clear = 1; 541 + writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET); 542 + 543 + return ret; 544 + } 545 + 546 + static int starfive_aes_map_sg(struct starfive_cryp_dev *cryp, 547 + struct scatterlist *src, 548 + struct scatterlist *dst) 549 + { 550 + struct scatterlist *stsg, *dtsg; 551 + struct scatterlist _src[2], _dst[2]; 552 + unsigned int remain = cryp->total_in; 553 + unsigned int len, src_nents, dst_nents; 554 + int ret; 555 + 556 + if (src == dst) { 557 + for (stsg = src, dtsg = dst; remain > 0; 558 + stsg = sg_next(stsg), dtsg = sg_next(dtsg)) { 559 + src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL); 560 + if (src_nents == 0) 561 + return dev_err_probe(cryp->dev, -ENOMEM, 562 + "dma_map_sg error\n"); 563 + 564 + dst_nents = src_nents; 565 + len = min(sg_dma_len(stsg), remain); 566 + 567 + ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len); 568 + dma_unmap_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL); 569 + if (ret) 570 + return ret; 571 + 572 + remain -= len; 573 + } 429 574 } else { 430 - cryp->req.areq = areq; 431 - cryp->assoclen = areq->assoclen; 432 - cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); 433 - if (is_encrypt(cryp)) { 434 - cryp->total_in = areq->cryptlen; 435 - cryp->total_out = areq->cryptlen; 436 - } else { 437 - cryp->total_in = areq->cryptlen - cryp->authsize; 438 - cryp->total_out = cryp->total_in; 575 + for (stsg = src, dtsg = dst;;) { 576 + src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE); 577 + if (src_nents == 0) 578 + return dev_err_probe(cryp->dev, -ENOMEM, 579 + "dma_map_sg src error\n"); 580 + 581 + dst_nents = dma_map_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE); 582 + if (dst_nents == 0) 583 + return dev_err_probe(cryp->dev, -ENOMEM, 584 + "dma_map_sg dst error\n"); 585 + 586 + len = min(sg_dma_len(stsg), sg_dma_len(dtsg)); 587 + len = min(len, remain); 588 + 589 + ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len); 590 + dma_unmap_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE); 591 + dma_unmap_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE); 592 + if (ret) 593 + return ret; 594 + 595 + remain -= len; 596 + if (remain == 0) 597 + break; 598 + 599 + if (sg_dma_len(stsg) - len) { 600 + stsg = scatterwalk_ffwd(_src, stsg, len); 601 + dtsg = sg_next(dtsg); 602 + } else if (sg_dma_len(dtsg) - len) { 603 + dtsg = scatterwalk_ffwd(_dst, dtsg, len); 604 + stsg = sg_next(stsg); 605 + } else { 606 + stsg = sg_next(stsg); 607 + dtsg = sg_next(dtsg); 608 + } 439 609 } 440 610 } 441 611 442 - rctx->in_sg = req ? req->src : areq->src; 443 - scatterwalk_start(&cryp->in_walk, rctx->in_sg); 444 - 445 - rctx->out_sg = req ? req->dst : areq->dst; 446 - scatterwalk_start(&cryp->out_walk, rctx->out_sg); 447 - 448 - if (cryp->assoclen) { 449 - rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL); 450 - if (!rctx->adata) 451 - return dev_err_probe(cryp->dev, -ENOMEM, 452 - "Failed to alloc memory for adata"); 453 - 454 - scatterwalk_copychunks(rctx->adata, &cryp->in_walk, cryp->assoclen, 0); 455 - scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->assoclen, 2); 456 - } 457 - 458 - ctx->rctx = rctx; 459 - 460 - return starfive_aes_hw_init(ctx); 612 + return 0; 461 613 } 462 614 463 615 static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq) ··· 571 513 container_of(areq, struct skcipher_request, base); 572 514 struct starfive_cryp_ctx *ctx = 573 515 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 516 + struct starfive_cryp_request_ctx *rctx = skcipher_request_ctx(req); 574 517 struct starfive_cryp_dev *cryp = ctx->cryp; 575 - u32 block[AES_BLOCK_32]; 576 - u32 stat; 577 - int err; 578 - int i; 518 + int ret; 579 519 580 - err = starfive_aes_prepare_req(req, NULL); 581 - if (err) 582 - return err; 520 + cryp->req.sreq = req; 521 + cryp->total_in = req->cryptlen; 522 + cryp->total_out = req->cryptlen; 523 + cryp->assoclen = 0; 524 + cryp->authsize = 0; 583 525 584 - /* 585 - * Write first plain/ciphertext block to start the module 586 - * then let irq tasklet handle the rest of the data blocks. 587 - */ 588 - scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE, 589 - cryp->total_in), 0); 590 - cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in); 526 + rctx->in_sg = req->src; 527 + rctx->out_sg = req->dst; 591 528 592 - for (i = 0; i < AES_BLOCK_32; i++) 593 - writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R); 529 + ctx->rctx = rctx; 594 530 595 - stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET); 596 - stat &= ~STARFIVE_IE_MASK_AES_DONE; 597 - writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET); 531 + ret = starfive_aes_hw_init(ctx); 532 + if (ret) 533 + return ret; 534 + 535 + starfive_aes_dma_init(cryp); 536 + 537 + ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); 538 + if (ret) 539 + return ret; 540 + 541 + starfive_aes_finish_req(ctx); 598 542 599 543 return 0; 600 544 } 601 545 602 - static int starfive_aes_init_tfm(struct crypto_skcipher *tfm) 546 + static int starfive_aes_init_tfm(struct crypto_skcipher *tfm, 547 + const char *alg_name) 603 548 { 604 549 struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 605 550 ··· 610 549 if (!ctx->cryp) 611 550 return -ENODEV; 612 551 552 + ctx->skcipher_fbk = crypto_alloc_skcipher(alg_name, 0, 553 + CRYPTO_ALG_NEED_FALLBACK); 554 + if (IS_ERR(ctx->skcipher_fbk)) 555 + return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->skcipher_fbk), 556 + "%s() failed to allocate fallback for %s\n", 557 + __func__, alg_name); 558 + 613 559 crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) + 614 - sizeof(struct skcipher_request)); 560 + crypto_skcipher_reqsize(ctx->skcipher_fbk)); 615 561 616 562 return 0; 563 + } 564 + 565 + static void starfive_aes_exit_tfm(struct crypto_skcipher *tfm) 566 + { 567 + struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 568 + 569 + crypto_free_skcipher(ctx->skcipher_fbk); 617 570 } 618 571 619 572 static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq) ··· 637 562 struct starfive_cryp_ctx *ctx = 638 563 crypto_aead_ctx(crypto_aead_reqtfm(req)); 639 564 struct starfive_cryp_dev *cryp = ctx->cryp; 640 - struct starfive_cryp_request_ctx *rctx; 641 - u32 block[AES_BLOCK_32]; 642 - u32 stat; 643 - int err; 644 - int i; 565 + struct starfive_cryp_request_ctx *rctx = aead_request_ctx(req); 566 + struct scatterlist _src[2], _dst[2]; 567 + int ret; 645 568 646 - err = starfive_aes_prepare_req(NULL, req); 647 - if (err) 648 - return err; 569 + cryp->req.areq = req; 570 + cryp->assoclen = req->assoclen; 571 + cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(req)); 649 572 650 - rctx = ctx->rctx; 573 + rctx->in_sg = scatterwalk_ffwd(_src, req->src, cryp->assoclen); 574 + if (req->src == req->dst) 575 + rctx->out_sg = rctx->in_sg; 576 + else 577 + rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen); 578 + 579 + if (is_encrypt(cryp)) { 580 + cryp->total_in = req->cryptlen; 581 + cryp->total_out = req->cryptlen; 582 + } else { 583 + cryp->total_in = req->cryptlen - cryp->authsize; 584 + cryp->total_out = cryp->total_in; 585 + scatterwalk_map_and_copy(cryp->tag_in, req->src, 586 + cryp->total_in + cryp->assoclen, 587 + cryp->authsize, 0); 588 + } 589 + 590 + if (cryp->assoclen) { 591 + rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL); 592 + if (!rctx->adata) 593 + return dev_err_probe(cryp->dev, -ENOMEM, 594 + "Failed to alloc memory for adata"); 595 + 596 + if (sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, cryp->assoclen), 597 + rctx->adata, cryp->assoclen) != cryp->assoclen) 598 + return -EINVAL; 599 + } 600 + 601 + if (cryp->total_in) 602 + sg_zero_buffer(rctx->in_sg, sg_nents(rctx->in_sg), 603 + sg_dma_len(rctx->in_sg) - cryp->total_in, 604 + cryp->total_in); 605 + 606 + ctx->rctx = rctx; 607 + 608 + ret = starfive_aes_hw_init(ctx); 609 + if (ret) 610 + return ret; 651 611 652 612 if (!cryp->assoclen) 653 613 goto write_text; 654 614 655 615 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) 656 - cryp->err = starfive_aes_ccm_write_adata(ctx); 616 + ret = starfive_aes_ccm_write_adata(ctx); 657 617 else 658 - cryp->err = starfive_aes_gcm_write_adata(ctx); 618 + ret = starfive_aes_gcm_write_adata(ctx); 659 619 660 620 kfree(rctx->adata); 661 621 662 - if (cryp->err) 663 - return cryp->err; 622 + if (ret) 623 + return ret; 664 624 665 625 write_text: 666 626 if (!cryp->total_in) 667 627 goto finish_req; 668 628 669 - /* 670 - * Write first plain/ciphertext block to start the module 671 - * then let irq tasklet handle the rest of the data blocks. 672 - */ 673 - scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE, 674 - cryp->total_in), 0); 675 - cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in); 629 + starfive_aes_dma_init(cryp); 676 630 677 - for (i = 0; i < AES_BLOCK_32; i++) 678 - writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R); 679 - 680 - stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET); 681 - stat &= ~STARFIVE_IE_MASK_AES_DONE; 682 - writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET); 683 - 684 - return 0; 631 + ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); 632 + if (ret) 633 + return ret; 685 634 686 635 finish_req: 687 - starfive_aes_finish_req(cryp); 636 + starfive_aes_finish_req(ctx); 688 637 return 0; 689 638 } 690 639 691 - static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm) 640 + static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm, 641 + const char *alg_name) 692 642 { 693 643 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 694 - struct starfive_cryp_dev *cryp = ctx->cryp; 695 - struct crypto_tfm *aead = crypto_aead_tfm(tfm); 696 - struct crypto_alg *alg = aead->__crt_alg; 697 644 698 645 ctx->cryp = starfive_cryp_find_dev(ctx); 699 646 if (!ctx->cryp) 700 647 return -ENODEV; 701 648 702 - if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) { 703 - ctx->aead_fbk = crypto_alloc_aead(alg->cra_name, 0, 704 - CRYPTO_ALG_NEED_FALLBACK); 705 - if (IS_ERR(ctx->aead_fbk)) 706 - return dev_err_probe(cryp->dev, PTR_ERR(ctx->aead_fbk), 707 - "%s() failed to allocate fallback for %s\n", 708 - __func__, alg->cra_name); 709 - } 649 + ctx->aead_fbk = crypto_alloc_aead(alg_name, 0, 650 + CRYPTO_ALG_NEED_FALLBACK); 651 + if (IS_ERR(ctx->aead_fbk)) 652 + return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->aead_fbk), 653 + "%s() failed to allocate fallback for %s\n", 654 + __func__, alg_name); 710 655 711 - crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_ctx) + 712 - sizeof(struct aead_request)); 656 + crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) + 657 + crypto_aead_reqsize(ctx->aead_fbk)); 713 658 714 659 return 0; 715 660 } ··· 739 644 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 740 645 741 646 crypto_free_aead(ctx->aead_fbk); 647 + } 648 + 649 + static bool starfive_aes_check_unaligned(struct starfive_cryp_dev *cryp, 650 + struct scatterlist *src, 651 + struct scatterlist *dst) 652 + { 653 + struct scatterlist *tsg; 654 + int i; 655 + 656 + for_each_sg(src, tsg, sg_nents(src), i) 657 + if (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) && 658 + !sg_is_last(tsg)) 659 + return true; 660 + 661 + if (src != dst) 662 + for_each_sg(dst, tsg, sg_nents(dst), i) 663 + if (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) && 664 + !sg_is_last(tsg)) 665 + return true; 666 + 667 + return false; 668 + } 669 + 670 + static int starfive_aes_do_fallback(struct skcipher_request *req, bool enc) 671 + { 672 + struct starfive_cryp_ctx *ctx = 673 + crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 674 + struct skcipher_request *subreq = skcipher_request_ctx(req); 675 + 676 + skcipher_request_set_tfm(subreq, ctx->skcipher_fbk); 677 + skcipher_request_set_callback(subreq, req->base.flags, 678 + req->base.complete, 679 + req->base.data); 680 + skcipher_request_set_crypt(subreq, req->src, req->dst, 681 + req->cryptlen, req->iv); 682 + 683 + return enc ? crypto_skcipher_encrypt(subreq) : 684 + crypto_skcipher_decrypt(subreq); 742 685 } 743 686 744 687 static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags) ··· 793 660 if (req->cryptlen & blocksize_align) 794 661 return -EINVAL; 795 662 663 + if (starfive_aes_check_unaligned(cryp, req->src, req->dst)) 664 + return starfive_aes_do_fallback(req, is_encrypt(cryp)); 665 + 796 666 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req); 667 + } 668 + 669 + static int starfive_aes_aead_do_fallback(struct aead_request *req, bool enc) 670 + { 671 + struct starfive_cryp_ctx *ctx = 672 + crypto_aead_ctx(crypto_aead_reqtfm(req)); 673 + struct aead_request *subreq = aead_request_ctx(req); 674 + 675 + aead_request_set_tfm(subreq, ctx->aead_fbk); 676 + aead_request_set_callback(subreq, req->base.flags, 677 + req->base.complete, 678 + req->base.data); 679 + aead_request_set_crypt(subreq, req->src, req->dst, 680 + req->cryptlen, req->iv); 681 + aead_request_set_ad(subreq, req->assoclen); 682 + 683 + return enc ? crypto_aead_encrypt(subreq) : 684 + crypto_aead_decrypt(subreq); 797 685 } 798 686 799 687 static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags) 800 688 { 801 689 struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 802 690 struct starfive_cryp_dev *cryp = ctx->cryp; 691 + struct scatterlist *src, *dst, _src[2], _dst[2]; 803 692 804 693 cryp->flags = flags; 805 694 806 - /* 807 - * HW engine could not perform CCM tag verification on 808 - * non-blocksize aligned text, use fallback algo instead 695 + /* aes-ccm does not support tag verification for non-aligned text, 696 + * use fallback for ccm decryption instead. 809 697 */ 810 - if (ctx->aead_fbk && !is_encrypt(cryp)) { 811 - struct aead_request *subreq = aead_request_ctx(req); 698 + if (((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) && 699 + !is_encrypt(cryp)) 700 + return starfive_aes_aead_do_fallback(req, 0); 812 701 813 - aead_request_set_tfm(subreq, ctx->aead_fbk); 814 - aead_request_set_callback(subreq, req->base.flags, 815 - req->base.complete, req->base.data); 816 - aead_request_set_crypt(subreq, req->src, 817 - req->dst, req->cryptlen, req->iv); 818 - aead_request_set_ad(subreq, req->assoclen); 702 + src = scatterwalk_ffwd(_src, req->src, req->assoclen); 819 703 820 - return crypto_aead_decrypt(subreq); 821 - } 704 + if (req->src == req->dst) 705 + dst = src; 706 + else 707 + dst = scatterwalk_ffwd(_dst, req->dst, req->assoclen); 708 + 709 + if (starfive_aes_check_unaligned(cryp, src, dst)) 710 + return starfive_aes_aead_do_fallback(req, is_encrypt(cryp)); 822 711 823 712 return crypto_transfer_aead_request_to_engine(cryp->engine, req); 824 713 } ··· 861 706 memcpy(ctx->key, key, keylen); 862 707 ctx->keylen = keylen; 863 708 864 - return 0; 709 + return crypto_skcipher_setkey(ctx->skcipher_fbk, key, keylen); 865 710 } 866 711 867 712 static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key, ··· 880 725 memcpy(ctx->key, key, keylen); 881 726 ctx->keylen = keylen; 882 727 883 - if (ctx->aead_fbk) 884 - return crypto_aead_setkey(ctx->aead_fbk, key, keylen); 885 - 886 - return 0; 728 + return crypto_aead_setkey(ctx->aead_fbk, key, keylen); 887 729 } 888 730 889 731 static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm, 890 732 unsigned int authsize) 891 733 { 892 - return crypto_gcm_check_authsize(authsize); 734 + struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm); 735 + int ret; 736 + 737 + ret = crypto_gcm_check_authsize(authsize); 738 + if (ret) 739 + return ret; 740 + 741 + return crypto_aead_setauthsize(ctx->aead_fbk, authsize); 893 742 } 894 743 895 744 static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm, ··· 979 820 return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM); 980 821 } 981 822 823 + static int starfive_aes_ecb_init_tfm(struct crypto_skcipher *tfm) 824 + { 825 + return starfive_aes_init_tfm(tfm, "ecb(aes-generic)"); 826 + } 827 + 828 + static int starfive_aes_cbc_init_tfm(struct crypto_skcipher *tfm) 829 + { 830 + return starfive_aes_init_tfm(tfm, "cbc(aes-generic)"); 831 + } 832 + 833 + static int starfive_aes_ctr_init_tfm(struct crypto_skcipher *tfm) 834 + { 835 + return starfive_aes_init_tfm(tfm, "ctr(aes-generic)"); 836 + } 837 + 838 + static int starfive_aes_ccm_init_tfm(struct crypto_aead *tfm) 839 + { 840 + return starfive_aes_aead_init_tfm(tfm, "ccm_base(ctr(aes-generic),cbcmac(aes-generic))"); 841 + } 842 + 843 + static int starfive_aes_gcm_init_tfm(struct crypto_aead *tfm) 844 + { 845 + return starfive_aes_aead_init_tfm(tfm, "gcm_base(ctr(aes-generic),ghash-generic)"); 846 + } 847 + 982 848 static struct skcipher_engine_alg skcipher_algs[] = { 983 849 { 984 - .base.init = starfive_aes_init_tfm, 850 + .base.init = starfive_aes_ecb_init_tfm, 851 + .base.exit = starfive_aes_exit_tfm, 985 852 .base.setkey = starfive_aes_setkey, 986 853 .base.encrypt = starfive_aes_ecb_encrypt, 987 854 .base.decrypt = starfive_aes_ecb_decrypt, ··· 1017 832 .cra_name = "ecb(aes)", 1018 833 .cra_driver_name = "starfive-ecb-aes", 1019 834 .cra_priority = 200, 1020 - .cra_flags = CRYPTO_ALG_ASYNC, 835 + .cra_flags = CRYPTO_ALG_ASYNC | 836 + CRYPTO_ALG_NEED_FALLBACK, 1021 837 .cra_blocksize = AES_BLOCK_SIZE, 1022 838 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1023 839 .cra_alignmask = 0xf, ··· 1028 842 .do_one_request = starfive_aes_do_one_req, 1029 843 }, 1030 844 }, { 1031 - .base.init = starfive_aes_init_tfm, 845 + .base.init = starfive_aes_cbc_init_tfm, 846 + .base.exit = starfive_aes_exit_tfm, 1032 847 .base.setkey = starfive_aes_setkey, 1033 848 .base.encrypt = starfive_aes_cbc_encrypt, 1034 849 .base.decrypt = starfive_aes_cbc_decrypt, ··· 1040 853 .cra_name = "cbc(aes)", 1041 854 .cra_driver_name = "starfive-cbc-aes", 1042 855 .cra_priority = 200, 1043 - .cra_flags = CRYPTO_ALG_ASYNC, 856 + .cra_flags = CRYPTO_ALG_ASYNC | 857 + CRYPTO_ALG_NEED_FALLBACK, 1044 858 .cra_blocksize = AES_BLOCK_SIZE, 1045 859 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1046 860 .cra_alignmask = 0xf, ··· 1051 863 .do_one_request = starfive_aes_do_one_req, 1052 864 }, 1053 865 }, { 1054 - .base.init = starfive_aes_init_tfm, 866 + .base.init = starfive_aes_ctr_init_tfm, 867 + .base.exit = starfive_aes_exit_tfm, 1055 868 .base.setkey = starfive_aes_setkey, 1056 869 .base.encrypt = starfive_aes_ctr_encrypt, 1057 870 .base.decrypt = starfive_aes_ctr_decrypt, ··· 1063 874 .cra_name = "ctr(aes)", 1064 875 .cra_driver_name = "starfive-ctr-aes", 1065 876 .cra_priority = 200, 1066 - .cra_flags = CRYPTO_ALG_ASYNC, 877 + .cra_flags = CRYPTO_ALG_ASYNC | 878 + CRYPTO_ALG_NEED_FALLBACK, 1067 879 .cra_blocksize = 1, 1068 880 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1069 881 .cra_alignmask = 0xf, ··· 1082 892 .base.setauthsize = starfive_aes_gcm_setauthsize, 1083 893 .base.encrypt = starfive_aes_gcm_encrypt, 1084 894 .base.decrypt = starfive_aes_gcm_decrypt, 1085 - .base.init = starfive_aes_aead_init_tfm, 895 + .base.init = starfive_aes_gcm_init_tfm, 1086 896 .base.exit = starfive_aes_aead_exit_tfm, 1087 897 .base.ivsize = GCM_AES_IV_SIZE, 1088 898 .base.maxauthsize = AES_BLOCK_SIZE, ··· 1090 900 .cra_name = "gcm(aes)", 1091 901 .cra_driver_name = "starfive-gcm-aes", 1092 902 .cra_priority = 200, 1093 - .cra_flags = CRYPTO_ALG_ASYNC, 903 + .cra_flags = CRYPTO_ALG_ASYNC | 904 + CRYPTO_ALG_NEED_FALLBACK, 1094 905 .cra_blocksize = 1, 1095 906 .cra_ctxsize = sizeof(struct starfive_cryp_ctx), 1096 907 .cra_alignmask = 0xf, ··· 1105 914 .base.setauthsize = starfive_aes_ccm_setauthsize, 1106 915 .base.encrypt = starfive_aes_ccm_encrypt, 1107 916 .base.decrypt = starfive_aes_ccm_decrypt, 1108 - .base.init = starfive_aes_aead_init_tfm, 917 + .base.init = starfive_aes_ccm_init_tfm, 1109 918 .base.exit = starfive_aes_aead_exit_tfm, 1110 919 .base.ivsize = AES_BLOCK_SIZE, 1111 920 .base.maxauthsize = AES_BLOCK_SIZE,
-34
drivers/crypto/starfive/jh7110-cryp.c
··· 89 89 dma_release_channel(cryp->rx); 90 90 } 91 91 92 - static irqreturn_t starfive_cryp_irq(int irq, void *priv) 93 - { 94 - u32 status; 95 - u32 mask; 96 - struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)priv; 97 - 98 - mask = readl(cryp->base + STARFIVE_IE_MASK_OFFSET); 99 - status = readl(cryp->base + STARFIVE_IE_FLAG_OFFSET); 100 - if (status & STARFIVE_IE_FLAG_AES_DONE) { 101 - mask |= STARFIVE_IE_MASK_AES_DONE; 102 - writel(mask, cryp->base + STARFIVE_IE_MASK_OFFSET); 103 - tasklet_schedule(&cryp->aes_done); 104 - } 105 - 106 - return IRQ_HANDLED; 107 - } 108 - 109 92 static int starfive_cryp_probe(struct platform_device *pdev) 110 93 { 111 94 struct starfive_cryp_dev *cryp; 112 95 struct resource *res; 113 - int irq; 114 96 int ret; 115 97 116 98 cryp = devm_kzalloc(&pdev->dev, sizeof(*cryp), GFP_KERNEL); ··· 106 124 if (IS_ERR(cryp->base)) 107 125 return dev_err_probe(&pdev->dev, PTR_ERR(cryp->base), 108 126 "Error remapping memory for platform device\n"); 109 - 110 - tasklet_init(&cryp->aes_done, starfive_aes_done_task, (unsigned long)cryp); 111 127 112 128 cryp->phys_base = res->start; 113 129 cryp->dma_maxburst = 32; ··· 125 145 if (IS_ERR(cryp->rst)) 126 146 return dev_err_probe(&pdev->dev, PTR_ERR(cryp->rst), 127 147 "Error getting hardware reset line\n"); 128 - 129 - irq = platform_get_irq(pdev, 0); 130 - if (irq < 0) 131 - return irq; 132 - 133 - ret = devm_request_irq(&pdev->dev, irq, starfive_cryp_irq, 0, pdev->name, 134 - (void *)cryp); 135 - if (ret) 136 - return dev_err_probe(&pdev->dev, ret, 137 - "Failed to register interrupt handler\n"); 138 148 139 149 clk_prepare_enable(cryp->hclk); 140 150 clk_prepare_enable(cryp->ahb); ··· 182 212 clk_disable_unprepare(cryp->ahb); 183 213 reset_control_assert(cryp->rst); 184 214 185 - tasklet_kill(&cryp->aes_done); 186 - 187 215 return ret; 188 216 } 189 217 ··· 192 224 starfive_aes_unregister_algs(); 193 225 starfive_hash_unregister_algs(); 194 226 starfive_rsa_unregister_algs(); 195 - 196 - tasklet_kill(&cryp->aes_done); 197 227 198 228 crypto_engine_stop(cryp->engine); 199 229 crypto_engine_exit(cryp->engine);
+1 -5
drivers/crypto/starfive/jh7110-cryp.h
··· 169 169 struct crypto_akcipher *akcipher_fbk; 170 170 struct crypto_ahash *ahash_fbk; 171 171 struct crypto_aead *aead_fbk; 172 + struct crypto_skcipher *skcipher_fbk; 172 173 }; 173 174 174 175 struct starfive_cryp_dev { ··· 187 186 struct dma_chan *rx; 188 187 struct dma_slave_config cfg_in; 189 188 struct dma_slave_config cfg_out; 190 - struct scatter_walk in_walk; 191 - struct scatter_walk out_walk; 192 189 struct crypto_engine *engine; 193 - struct tasklet_struct aes_done; 194 190 struct completion dma_done; 195 191 size_t assoclen; 196 192 size_t total_in; ··· 235 237 236 238 int starfive_aes_register_algs(void); 237 239 void starfive_aes_unregister_algs(void); 238 - 239 - void starfive_aes_done_task(unsigned long param); 240 240 #endif