Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

dm-crypt: switch to using the crc32 library

Now that the crc32() library function takes advantage of
architecture-specific optimizations, it is unnecessary to go through the
crypto API. Just use crc32(). This is much simpler, and it improves
performance due to eliminating the crypto API overhead. (However, this
only affects the TCW IV mode of dm-crypt, which is a compatibility mode
that is rarely used compared to other dm-crypt modes.)

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Mikulas Patocka <mpatocka@redhat.com>

authored by

Eric Biggers and committed by
Mikulas Patocka
f656fa40 2014c95a

+11 -31
+1
drivers/md/Kconfig
··· 267 267 depends on BLK_DEV_DM 268 268 depends on (ENCRYPTED_KEYS || ENCRYPTED_KEYS=n) 269 269 depends on (TRUSTED_KEYS || TRUSTED_KEYS=n) 270 + select CRC32 270 271 select CRYPTO 271 272 select CRYPTO_CBC 272 273 select CRYPTO_ESSIV
+10 -31
drivers/md/dm-crypt.c
··· 17 17 #include <linux/bio.h> 18 18 #include <linux/blkdev.h> 19 19 #include <linux/blk-integrity.h> 20 + #include <linux/crc32.h> 20 21 #include <linux/mempool.h> 21 22 #include <linux/slab.h> 22 23 #include <linux/crypto.h> ··· 126 125 127 126 #define TCW_WHITENING_SIZE 16 128 127 struct iv_tcw_private { 129 - struct crypto_shash *crc32_tfm; 130 128 u8 *iv_seed; 131 129 u8 *whitening; 132 130 }; ··· 607 607 tcw->iv_seed = NULL; 608 608 kfree_sensitive(tcw->whitening); 609 609 tcw->whitening = NULL; 610 - 611 - if (tcw->crc32_tfm && !IS_ERR(tcw->crc32_tfm)) 612 - crypto_free_shash(tcw->crc32_tfm); 613 - tcw->crc32_tfm = NULL; 614 610 } 615 611 616 612 static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti, ··· 622 626 if (cc->key_size <= (cc->iv_size + TCW_WHITENING_SIZE)) { 623 627 ti->error = "Wrong key size for TCW"; 624 628 return -EINVAL; 625 - } 626 - 627 - tcw->crc32_tfm = crypto_alloc_shash("crc32", 0, 628 - CRYPTO_ALG_ALLOCATES_MEMORY); 629 - if (IS_ERR(tcw->crc32_tfm)) { 630 - ti->error = "Error initializing CRC32 in TCW"; 631 - return PTR_ERR(tcw->crc32_tfm); 632 629 } 633 630 634 631 tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL); ··· 657 668 return 0; 658 669 } 659 670 660 - static int crypt_iv_tcw_whitening(struct crypt_config *cc, 661 - struct dm_crypt_request *dmreq, 662 - u8 *data) 671 + static void crypt_iv_tcw_whitening(struct crypt_config *cc, 672 + struct dm_crypt_request *dmreq, u8 *data) 663 673 { 664 674 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; 665 675 __le64 sector = cpu_to_le64(dmreq->iv_sector); 666 676 u8 buf[TCW_WHITENING_SIZE]; 667 - SHASH_DESC_ON_STACK(desc, tcw->crc32_tfm); 668 - int i, r; 677 + int i; 669 678 670 679 /* xor whitening with sector number */ 671 680 crypto_xor_cpy(buf, tcw->whitening, (u8 *)&sector, 8); 672 681 crypto_xor_cpy(&buf[8], tcw->whitening + 8, (u8 *)&sector, 8); 673 682 674 683 /* calculate crc32 for every 32bit part and xor it */ 675 - desc->tfm = tcw->crc32_tfm; 676 - for (i = 0; i < 4; i++) { 677 - r = crypto_shash_digest(desc, &buf[i * 4], 4, &buf[i * 4]); 678 - if (r) 679 - goto out; 680 - } 684 + for (i = 0; i < 4; i++) 685 + put_unaligned_le32(crc32(0, &buf[i * 4], 4), &buf[i * 4]); 681 686 crypto_xor(&buf[0], &buf[12], 4); 682 687 crypto_xor(&buf[4], &buf[8], 4); 683 688 684 689 /* apply whitening (8 bytes) to whole sector */ 685 690 for (i = 0; i < ((1 << SECTOR_SHIFT) / 8); i++) 686 691 crypto_xor(data + i * 8, buf, 8); 687 - out: 688 692 memzero_explicit(buf, sizeof(buf)); 689 - return r; 690 693 } 691 694 692 695 static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv, ··· 688 707 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; 689 708 __le64 sector = cpu_to_le64(dmreq->iv_sector); 690 709 u8 *src; 691 - int r = 0; 692 710 693 711 /* Remove whitening from ciphertext */ 694 712 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { 695 713 sg = crypt_get_sg_data(cc, dmreq->sg_in); 696 714 src = kmap_local_page(sg_page(sg)); 697 - r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); 715 + crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); 698 716 kunmap_local(src); 699 717 } 700 718 ··· 703 723 crypto_xor_cpy(&iv[8], tcw->iv_seed + 8, (u8 *)&sector, 704 724 cc->iv_size - 8); 705 725 706 - return r; 726 + return 0; 707 727 } 708 728 709 729 static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, ··· 711 731 { 712 732 struct scatterlist *sg; 713 733 u8 *dst; 714 - int r; 715 734 716 735 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) 717 736 return 0; ··· 718 739 /* Apply whitening on ciphertext */ 719 740 sg = crypt_get_sg_data(cc, dmreq->sg_out); 720 741 dst = kmap_local_page(sg_page(sg)); 721 - r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); 742 + crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); 722 743 kunmap_local(dst); 723 744 724 - return r; 745 + return 0; 725 746 } 726 747 727 748 static int crypt_iv_random_gen(struct crypt_config *cc, u8 *iv,