Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: arm64/aes - Use API partial block handling

Use the Crypto API partial block handling.

Also remove the unnecessary SIMD fallback path.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Herbert Xu 4dc3c40c 566ec9ad

+41 -81
+41 -81
arch/arm64/crypto/aes-glue.c
··· 5 5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> 6 6 */ 7 7 8 - #include <asm/neon.h> 9 8 #include <asm/hwcap.h> 10 - #include <asm/simd.h> 9 + #include <asm/neon.h> 11 10 #include <crypto/aes.h> 12 11 #include <crypto/ctr.h> 13 - #include <crypto/sha2.h> 14 12 #include <crypto/internal/hash.h> 15 - #include <crypto/internal/simd.h> 16 13 #include <crypto/internal/skcipher.h> 17 14 #include <crypto/scatterwalk.h> 18 - #include <linux/module.h> 19 - #include <linux/cpufeature.h> 15 + #include <crypto/sha2.h> 16 + #include <crypto/utils.h> 20 17 #include <crypto/xts.h> 18 + #include <linux/cpufeature.h> 19 + #include <linux/kernel.h> 20 + #include <linux/module.h> 21 + #include <linux/string.h> 21 22 22 23 #include "aes-ce-setkey.h" 23 24 ··· 131 130 }; 132 131 133 132 struct mac_desc_ctx { 134 - unsigned int len; 135 133 u8 dg[AES_BLOCK_SIZE]; 136 134 }; 137 135 ··· 869 869 struct mac_desc_ctx *ctx = shash_desc_ctx(desc); 870 870 871 871 memset(ctx->dg, 0, AES_BLOCK_SIZE); 872 - ctx->len = 0; 873 - 874 872 return 0; 875 873 } 876 874 877 875 static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks, 878 - u8 dg[], int enc_before, int enc_after) 876 + u8 dg[], int enc_before) 879 877 { 880 878 int rounds = 6 + ctx->key_length / 4; 879 + int rem; 881 880 882 - if (crypto_simd_usable()) { 883 - int rem; 884 - 885 - do { 886 - kernel_neon_begin(); 887 - rem = aes_mac_update(in, ctx->key_enc, rounds, blocks, 888 - dg, enc_before, enc_after); 889 - kernel_neon_end(); 890 - in += (blocks - rem) * AES_BLOCK_SIZE; 891 - blocks = rem; 892 - enc_before = 0; 893 - } while (blocks); 894 - } else { 895 - if (enc_before) 896 - aes_encrypt(ctx, dg, dg); 897 - 898 - while (blocks--) { 899 - crypto_xor(dg, in, AES_BLOCK_SIZE); 900 - in += AES_BLOCK_SIZE; 901 - 902 - if (blocks || enc_after) 903 - aes_encrypt(ctx, dg, dg); 904 - } 905 - } 881 + do { 882 + kernel_neon_begin(); 883 + rem = aes_mac_update(in, ctx->key_enc, rounds, blocks, 884 + dg, enc_before, !enc_before); 885 + kernel_neon_end(); 886 + in += (blocks - rem) * AES_BLOCK_SIZE; 887 + blocks = rem; 888 + } while (blocks); 906 889 } 907 890 908 891 static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len) 909 892 { 910 893 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 911 894 struct mac_desc_ctx *ctx = shash_desc_ctx(desc); 895 + int blocks = len / AES_BLOCK_SIZE; 912 896 913 - while (len > 0) { 914 - unsigned int l; 915 - 916 - if ((ctx->len % AES_BLOCK_SIZE) == 0 && 917 - (ctx->len + len) > AES_BLOCK_SIZE) { 918 - 919 - int blocks = len / AES_BLOCK_SIZE; 920 - 921 - len %= AES_BLOCK_SIZE; 922 - 923 - mac_do_update(&tctx->key, p, blocks, ctx->dg, 924 - (ctx->len != 0), (len != 0)); 925 - 926 - p += blocks * AES_BLOCK_SIZE; 927 - 928 - if (!len) { 929 - ctx->len = AES_BLOCK_SIZE; 930 - break; 931 - } 932 - ctx->len = 0; 933 - } 934 - 935 - l = min(len, AES_BLOCK_SIZE - ctx->len); 936 - 937 - if (l <= AES_BLOCK_SIZE) { 938 - crypto_xor(ctx->dg + ctx->len, p, l); 939 - ctx->len += l; 940 - len -= l; 941 - p += l; 942 - } 943 - } 944 - 945 - return 0; 897 + len %= AES_BLOCK_SIZE; 898 + mac_do_update(&tctx->key, p, blocks, ctx->dg, 0); 899 + return len; 946 900 } 947 901 948 - static int cbcmac_final(struct shash_desc *desc, u8 *out) 902 + static int cbcmac_finup(struct shash_desc *desc, const u8 *src, 903 + unsigned int len, u8 *out) 949 904 { 950 905 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 951 906 struct mac_desc_ctx *ctx = shash_desc_ctx(desc); 952 907 953 - mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0); 954 - 908 + if (len) { 909 + crypto_xor(ctx->dg, src, len); 910 + mac_do_update(&tctx->key, NULL, 0, ctx->dg, 1); 911 + } 955 912 memcpy(out, ctx->dg, AES_BLOCK_SIZE); 956 - 957 913 return 0; 958 914 } 959 915 960 - static int cmac_final(struct shash_desc *desc, u8 *out) 916 + static int cmac_finup(struct shash_desc *desc, const u8 *src, unsigned int len, 917 + u8 *out) 961 918 { 962 919 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 963 920 struct mac_desc_ctx *ctx = shash_desc_ctx(desc); 964 921 u8 *consts = tctx->consts; 965 922 966 - if (ctx->len != AES_BLOCK_SIZE) { 967 - ctx->dg[ctx->len] ^= 0x80; 923 + crypto_xor(ctx->dg, src, len); 924 + if (len != AES_BLOCK_SIZE) { 925 + ctx->dg[len] ^= 0x80; 968 926 consts += AES_BLOCK_SIZE; 969 927 } 970 - 971 - mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1); 972 - 928 + mac_do_update(&tctx->key, consts, 1, ctx->dg, 0); 973 929 memcpy(out, ctx->dg, AES_BLOCK_SIZE); 974 - 975 930 return 0; 976 931 } 977 932 ··· 934 979 .base.cra_name = "cmac(aes)", 935 980 .base.cra_driver_name = "cmac-aes-" MODE, 936 981 .base.cra_priority = PRIO, 982 + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | 983 + CRYPTO_AHASH_ALG_FINAL_NONZERO, 937 984 .base.cra_blocksize = AES_BLOCK_SIZE, 938 985 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) + 939 986 2 * AES_BLOCK_SIZE, ··· 944 987 .digestsize = AES_BLOCK_SIZE, 945 988 .init = mac_init, 946 989 .update = mac_update, 947 - .final = cmac_final, 990 + .finup = cmac_finup, 948 991 .setkey = cmac_setkey, 949 992 .descsize = sizeof(struct mac_desc_ctx), 950 993 }, { 951 994 .base.cra_name = "xcbc(aes)", 952 995 .base.cra_driver_name = "xcbc-aes-" MODE, 953 996 .base.cra_priority = PRIO, 997 + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | 998 + CRYPTO_AHASH_ALG_FINAL_NONZERO, 954 999 .base.cra_blocksize = AES_BLOCK_SIZE, 955 1000 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) + 956 1001 2 * AES_BLOCK_SIZE, ··· 961 1002 .digestsize = AES_BLOCK_SIZE, 962 1003 .init = mac_init, 963 1004 .update = mac_update, 964 - .final = cmac_final, 1005 + .finup = cmac_finup, 965 1006 .setkey = xcbc_setkey, 966 1007 .descsize = sizeof(struct mac_desc_ctx), 967 1008 }, { 968 1009 .base.cra_name = "cbcmac(aes)", 969 1010 .base.cra_driver_name = "cbcmac-aes-" MODE, 970 1011 .base.cra_priority = PRIO, 1012 + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY, 971 1013 .base.cra_blocksize = AES_BLOCK_SIZE, 972 1014 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx), 973 1015 .base.cra_module = THIS_MODULE, ··· 976 1016 .digestsize = AES_BLOCK_SIZE, 977 1017 .init = mac_init, 978 1018 .update = mac_update, 979 - .final = cbcmac_final, 1019 + .finup = cbcmac_finup, 980 1020 .setkey = cbcmac_setkey, 981 1021 .descsize = sizeof(struct mac_desc_ctx), 982 1022 } };