Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: arch - conditionalize crypto api in arch glue for lib code

For glue code that's used by Zinc, the actual Crypto API functions might
not necessarily exist, and don't need to exist either. Before this
patch, there are valid build configurations that lead to a unbuildable
kernel. This fixes it to conditionalize those symbols on the existence
of the proper config entry.

Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
Acked-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Jason A. Donenfeld and committed by
Herbert Xu
8394bfec 4ee812f6

+53 -32
+16 -10
arch/arm/crypto/chacha-glue.c
··· 286 286 287 287 static int __init chacha_simd_mod_init(void) 288 288 { 289 - int err; 289 + int err = 0; 290 290 291 - err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs)); 292 - if (err) 293 - return err; 291 + if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) { 292 + err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs)); 293 + if (err) 294 + return err; 295 + } 294 296 295 297 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) { 296 298 int i; ··· 312 310 static_branch_enable(&use_neon); 313 311 } 314 312 315 - err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs)); 316 - if (err) 317 - crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs)); 313 + if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) { 314 + err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs)); 315 + if (err) 316 + crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs)); 317 + } 318 318 } 319 319 return err; 320 320 } 321 321 322 322 static void __exit chacha_simd_mod_fini(void) 323 323 { 324 - crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs)); 325 - if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) 326 - crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs)); 324 + if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) { 325 + crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs)); 326 + if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) 327 + crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs)); 328 + } 327 329 } 328 330 329 331 module_init(chacha_simd_mod_init);
+3 -2
arch/arm/crypto/curve25519-glue.c
··· 108 108 { 109 109 if (elf_hwcap & HWCAP_NEON) { 110 110 static_branch_enable(&have_neon); 111 - return crypto_register_kpp(&curve25519_alg); 111 + return IS_REACHABLE(CONFIG_CRYPTO_KPP) ? 112 + crypto_register_kpp(&curve25519_alg) : 0; 112 113 } 113 114 return 0; 114 115 } 115 116 116 117 static void __exit mod_exit(void) 117 118 { 118 - if (elf_hwcap & HWCAP_NEON) 119 + if (IS_REACHABLE(CONFIG_CRYPTO_KPP) && elf_hwcap & HWCAP_NEON) 119 120 crypto_unregister_kpp(&curve25519_alg); 120 121 } 121 122
+6 -3
arch/arm/crypto/poly1305-glue.c
··· 249 249 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && 250 250 (elf_hwcap & HWCAP_NEON)) 251 251 static_branch_enable(&have_neon); 252 - else 252 + else if (IS_REACHABLE(CONFIG_CRYPTO_HASH)) 253 253 /* register only the first entry */ 254 254 return crypto_register_shash(&arm_poly1305_algs[0]); 255 255 256 - return crypto_register_shashes(arm_poly1305_algs, 257 - ARRAY_SIZE(arm_poly1305_algs)); 256 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? 257 + crypto_register_shashes(arm_poly1305_algs, 258 + ARRAY_SIZE(arm_poly1305_algs)) : 0; 258 259 } 259 260 260 261 static void __exit arm_poly1305_mod_exit(void) 261 262 { 263 + if (!IS_REACHABLE(CONFIG_CRYPTO_HASH)) 264 + return; 262 265 if (!static_branch_likely(&have_neon)) { 263 266 crypto_unregister_shash(&arm_poly1305_algs[0]); 264 267 return;
+3 -2
arch/arm64/crypto/chacha-neon-glue.c
··· 211 211 212 212 static_branch_enable(&have_neon); 213 213 214 - return crypto_register_skciphers(algs, ARRAY_SIZE(algs)); 214 + return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ? 215 + crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0; 215 216 } 216 217 217 218 static void __exit chacha_simd_mod_fini(void) 218 219 { 219 - if (cpu_have_named_feature(ASIMD)) 220 + if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) && cpu_have_named_feature(ASIMD)) 220 221 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); 221 222 } 222 223
+3 -2
arch/arm64/crypto/poly1305-glue.c
··· 220 220 221 221 static_branch_enable(&have_neon); 222 222 223 - return crypto_register_shash(&neon_poly1305_alg); 223 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? 224 + crypto_register_shash(&neon_poly1305_alg) : 0; 224 225 } 225 226 226 227 static void __exit neon_poly1305_mod_exit(void) 227 228 { 228 - if (cpu_have_named_feature(ASIMD)) 229 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && cpu_have_named_feature(ASIMD)) 229 230 crypto_unregister_shash(&neon_poly1305_alg); 230 231 } 231 232
+4 -2
arch/mips/crypto/chacha-glue.c
··· 128 128 129 129 static int __init chacha_simd_mod_init(void) 130 130 { 131 - return crypto_register_skciphers(algs, ARRAY_SIZE(algs)); 131 + return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ? 132 + crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0; 132 133 } 133 134 134 135 static void __exit chacha_simd_mod_fini(void) 135 136 { 136 - crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); 137 + if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) 138 + crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); 137 139 } 138 140 139 141 module_init(chacha_simd_mod_init);
+4 -2
arch/mips/crypto/poly1305-glue.c
··· 187 187 188 188 static int __init mips_poly1305_mod_init(void) 189 189 { 190 - return crypto_register_shash(&mips_poly1305_alg); 190 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? 191 + crypto_register_shash(&mips_poly1305_alg) : 0; 191 192 } 192 193 193 194 static void __exit mips_poly1305_mod_exit(void) 194 195 { 195 - crypto_unregister_shash(&mips_poly1305_alg); 196 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH)) 197 + crypto_unregister_shash(&mips_poly1305_alg); 196 198 } 197 199 198 200 module_init(mips_poly1305_mod_init);
+4 -2
arch/x86/crypto/blake2s-glue.c
··· 210 210 XFEATURE_MASK_AVX512, NULL)) 211 211 static_branch_enable(&blake2s_use_avx512); 212 212 213 - return crypto_register_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs)); 213 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? 214 + crypto_register_shashes(blake2s_algs, 215 + ARRAY_SIZE(blake2s_algs)) : 0; 214 216 } 215 217 216 218 static void __exit blake2s_mod_exit(void) 217 219 { 218 - if (boot_cpu_has(X86_FEATURE_SSSE3)) 220 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && boot_cpu_has(X86_FEATURE_SSSE3)) 219 221 crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs)); 220 222 } 221 223
+3 -2
arch/x86/crypto/chacha_glue.c
··· 299 299 boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */ 300 300 static_branch_enable(&chacha_use_avx512vl); 301 301 } 302 - return crypto_register_skciphers(algs, ARRAY_SIZE(algs)); 302 + return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ? 303 + crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0; 303 304 } 304 305 305 306 static void __exit chacha_simd_mod_fini(void) 306 307 { 307 - if (boot_cpu_has(X86_FEATURE_SSSE3)) 308 + if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3)) 308 309 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); 309 310 } 310 311
+4 -3
arch/x86/crypto/curve25519-x86_64.c
··· 2457 2457 static_branch_enable(&curve25519_use_adx); 2458 2458 else 2459 2459 return 0; 2460 - return crypto_register_kpp(&curve25519_alg); 2460 + return IS_REACHABLE(CONFIG_CRYPTO_KPP) ? 2461 + crypto_register_kpp(&curve25519_alg) : 0; 2461 2462 } 2462 2463 2463 2464 static void __exit curve25519_mod_exit(void) 2464 2465 { 2465 - if (boot_cpu_has(X86_FEATURE_BMI2) || 2466 - boot_cpu_has(X86_FEATURE_ADX)) 2466 + if (IS_REACHABLE(CONFIG_CRYPTO_KPP) && 2467 + (boot_cpu_has(X86_FEATURE_BMI2) || boot_cpu_has(X86_FEATURE_ADX))) 2467 2468 crypto_unregister_kpp(&curve25519_alg); 2468 2469 } 2469 2470
+3 -2
arch/x86/crypto/poly1305_glue.c
··· 224 224 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) 225 225 static_branch_enable(&poly1305_use_avx2); 226 226 227 - return crypto_register_shash(&alg); 227 + return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0; 228 228 } 229 229 230 230 static void __exit poly1305_simd_mod_exit(void) 231 231 { 232 - crypto_unregister_shash(&alg); 232 + if (IS_REACHABLE(CONFIG_CRYPTO_HASH)) 233 + crypto_unregister_shash(&alg); 233 234 } 234 235 235 236 module_init(poly1305_simd_mod_init);