Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: x86/sm4 - stop using the SIMD helper

Stop wrapping skcipher and aead algorithms with the crypto SIMD helper
(crypto/simd.c). The only purpose of doing so was to work around x86
not always supporting kernel-mode FPU in softirqs. Specifically, if a
hardirq interrupted a task context kernel-mode FPU section and then a
softirqs were run at the end of that hardirq, those softirqs could not
use kernel-mode FPU. This has now been fixed. In combination with the
fact that the skcipher and aead APIs only support task and softirq
contexts, these can now just use kernel-mode FPU unconditionally on x86.

This simplifies the code and improves performance.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Eric Biggers and committed by
Herbert Xu
982b72cd cc01d284

+22 -42
-2
arch/x86/crypto/Kconfig
··· 192 192 tristate "Ciphers: SM4 with modes: ECB, CBC, CTR (AES-NI/AVX)" 193 193 depends on X86 && 64BIT 194 194 select CRYPTO_SKCIPHER 195 - select CRYPTO_SIMD 196 195 select CRYPTO_ALGAPI 197 196 select CRYPTO_SM4 198 197 help ··· 212 213 tristate "Ciphers: SM4 with modes: ECB, CBC, CTR (AES-NI/AVX2)" 213 214 depends on X86 && 64BIT 214 215 select CRYPTO_SKCIPHER 215 - select CRYPTO_SIMD 216 216 select CRYPTO_ALGAPI 217 217 select CRYPTO_SM4 218 218 select CRYPTO_SM4_AESNI_AVX_X86_64
+11 -20
arch/x86/crypto/sm4_aesni_avx2_glue.c
··· 8 8 * Copyright (c) 2021 Tianjia Zhang <tianjia.zhang@linux.alibaba.com> 9 9 */ 10 10 11 + #include <asm/fpu/api.h> 11 12 #include <linux/module.h> 12 13 #include <linux/crypto.h> 13 14 #include <linux/kernel.h> 14 - #include <asm/simd.h> 15 - #include <crypto/internal/simd.h> 16 15 #include <crypto/internal/skcipher.h> 17 16 #include <crypto/sm4.h> 18 17 #include "sm4-avx.h" ··· 47 48 static struct skcipher_alg sm4_aesni_avx2_skciphers[] = { 48 49 { 49 50 .base = { 50 - .cra_name = "__ecb(sm4)", 51 - .cra_driver_name = "__ecb-sm4-aesni-avx2", 51 + .cra_name = "ecb(sm4)", 52 + .cra_driver_name = "ecb-sm4-aesni-avx2", 52 53 .cra_priority = 500, 53 - .cra_flags = CRYPTO_ALG_INTERNAL, 54 54 .cra_blocksize = SM4_BLOCK_SIZE, 55 55 .cra_ctxsize = sizeof(struct sm4_ctx), 56 56 .cra_module = THIS_MODULE, ··· 62 64 .decrypt = sm4_avx_ecb_decrypt, 63 65 }, { 64 66 .base = { 65 - .cra_name = "__cbc(sm4)", 66 - .cra_driver_name = "__cbc-sm4-aesni-avx2", 67 + .cra_name = "cbc(sm4)", 68 + .cra_driver_name = "cbc-sm4-aesni-avx2", 67 69 .cra_priority = 500, 68 - .cra_flags = CRYPTO_ALG_INTERNAL, 69 70 .cra_blocksize = SM4_BLOCK_SIZE, 70 71 .cra_ctxsize = sizeof(struct sm4_ctx), 71 72 .cra_module = THIS_MODULE, ··· 78 81 .decrypt = cbc_decrypt, 79 82 }, { 80 83 .base = { 81 - .cra_name = "__ctr(sm4)", 82 - .cra_driver_name = "__ctr-sm4-aesni-avx2", 84 + .cra_name = "ctr(sm4)", 85 + .cra_driver_name = "ctr-sm4-aesni-avx2", 83 86 .cra_priority = 500, 84 - .cra_flags = CRYPTO_ALG_INTERNAL, 85 87 .cra_blocksize = 1, 86 88 .cra_ctxsize = sizeof(struct sm4_ctx), 87 89 .cra_module = THIS_MODULE, ··· 95 99 .decrypt = ctr_crypt, 96 100 } 97 101 }; 98 - 99 - static struct simd_skcipher_alg * 100 - simd_sm4_aesni_avx2_skciphers[ARRAY_SIZE(sm4_aesni_avx2_skciphers)]; 101 102 102 103 static int __init sm4_init(void) 103 104 { ··· 114 121 return -ENODEV; 115 122 } 116 123 117 - return simd_register_skciphers_compat(sm4_aesni_avx2_skciphers, 118 - ARRAY_SIZE(sm4_aesni_avx2_skciphers), 119 - simd_sm4_aesni_avx2_skciphers); 124 + return crypto_register_skciphers(sm4_aesni_avx2_skciphers, 125 + ARRAY_SIZE(sm4_aesni_avx2_skciphers)); 120 126 } 121 127 122 128 static void __exit sm4_exit(void) 123 129 { 124 - simd_unregister_skciphers(sm4_aesni_avx2_skciphers, 125 - ARRAY_SIZE(sm4_aesni_avx2_skciphers), 126 - simd_sm4_aesni_avx2_skciphers); 130 + crypto_unregister_skciphers(sm4_aesni_avx2_skciphers, 131 + ARRAY_SIZE(sm4_aesni_avx2_skciphers)); 127 132 } 128 133 129 134 module_init(sm4_init);
+11 -20
arch/x86/crypto/sm4_aesni_avx_glue.c
··· 8 8 * Copyright (c) 2021 Tianjia Zhang <tianjia.zhang@linux.alibaba.com> 9 9 */ 10 10 11 + #include <asm/fpu/api.h> 11 12 #include <linux/module.h> 12 13 #include <linux/crypto.h> 13 14 #include <linux/kernel.h> 14 - #include <asm/simd.h> 15 - #include <crypto/internal/simd.h> 16 15 #include <crypto/internal/skcipher.h> 17 16 #include <crypto/sm4.h> 18 17 #include "sm4-avx.h" ··· 262 263 static struct skcipher_alg sm4_aesni_avx_skciphers[] = { 263 264 { 264 265 .base = { 265 - .cra_name = "__ecb(sm4)", 266 - .cra_driver_name = "__ecb-sm4-aesni-avx", 266 + .cra_name = "ecb(sm4)", 267 + .cra_driver_name = "ecb-sm4-aesni-avx", 267 268 .cra_priority = 400, 268 - .cra_flags = CRYPTO_ALG_INTERNAL, 269 269 .cra_blocksize = SM4_BLOCK_SIZE, 270 270 .cra_ctxsize = sizeof(struct sm4_ctx), 271 271 .cra_module = THIS_MODULE, ··· 277 279 .decrypt = sm4_avx_ecb_decrypt, 278 280 }, { 279 281 .base = { 280 - .cra_name = "__cbc(sm4)", 281 - .cra_driver_name = "__cbc-sm4-aesni-avx", 282 + .cra_name = "cbc(sm4)", 283 + .cra_driver_name = "cbc-sm4-aesni-avx", 282 284 .cra_priority = 400, 283 - .cra_flags = CRYPTO_ALG_INTERNAL, 284 285 .cra_blocksize = SM4_BLOCK_SIZE, 285 286 .cra_ctxsize = sizeof(struct sm4_ctx), 286 287 .cra_module = THIS_MODULE, ··· 293 296 .decrypt = cbc_decrypt, 294 297 }, { 295 298 .base = { 296 - .cra_name = "__ctr(sm4)", 297 - .cra_driver_name = "__ctr-sm4-aesni-avx", 299 + .cra_name = "ctr(sm4)", 300 + .cra_driver_name = "ctr-sm4-aesni-avx", 298 301 .cra_priority = 400, 299 - .cra_flags = CRYPTO_ALG_INTERNAL, 300 302 .cra_blocksize = 1, 301 303 .cra_ctxsize = sizeof(struct sm4_ctx), 302 304 .cra_module = THIS_MODULE, ··· 310 314 .decrypt = ctr_crypt, 311 315 } 312 316 }; 313 - 314 - static struct simd_skcipher_alg * 315 - simd_sm4_aesni_avx_skciphers[ARRAY_SIZE(sm4_aesni_avx_skciphers)]; 316 317 317 318 static int __init sm4_init(void) 318 319 { ··· 328 335 return -ENODEV; 329 336 } 330 337 331 - return simd_register_skciphers_compat(sm4_aesni_avx_skciphers, 332 - ARRAY_SIZE(sm4_aesni_avx_skciphers), 333 - simd_sm4_aesni_avx_skciphers); 338 + return crypto_register_skciphers(sm4_aesni_avx_skciphers, 339 + ARRAY_SIZE(sm4_aesni_avx_skciphers)); 334 340 } 335 341 336 342 static void __exit sm4_exit(void) 337 343 { 338 - simd_unregister_skciphers(sm4_aesni_avx_skciphers, 339 - ARRAY_SIZE(sm4_aesni_avx_skciphers), 340 - simd_sm4_aesni_avx_skciphers); 344 + crypto_unregister_skciphers(sm4_aesni_avx_skciphers, 345 + ARRAY_SIZE(sm4_aesni_avx_skciphers)); 341 346 } 342 347 343 348 module_init(sm4_init);