Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: aesni-intel - Fixed build error on x86-32

Exclude AES-GCM code for x86-32 due to heavy usage of 64-bit registers
not available on x86-32.

While at it, fixed unregister order in aesni_exit().

Signed-off-by: Mathias Krause <minipli@googlemail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Mathias Krause and committed by
Herbert Xu
559ad0ff c762be63

+17 -14
+4 -1
arch/x86/crypto/aesni-intel_asm.S
··· 32 32 #include <linux/linkage.h> 33 33 #include <asm/inst.h> 34 34 35 + #ifdef __x86_64__ 35 36 .data 36 37 POLY: .octa 0xC2000000000000000000000000000001 37 38 TWOONE: .octa 0x00000001000000000000000000000001 ··· 85 84 #define arg8 STACK_OFFSET+16(%r14) 86 85 #define arg9 STACK_OFFSET+24(%r14) 87 86 #define arg10 STACK_OFFSET+32(%r14) 87 + #endif 88 88 89 89 90 90 #define STATE1 %xmm0 ··· 132 130 #endif 133 131 134 132 133 + #ifdef __x86_64__ 135 134 /* GHASH_MUL MACRO to implement: Data*HashKey mod (128,127,126,121,0) 136 135 * 137 136 * ··· 1258 1255 pop %r13 1259 1256 pop %r12 1260 1257 ret 1261 - 1258 + #endif 1262 1259 1263 1260 1264 1261 _key_expansion_128:
+13 -13
arch/x86/crypto/aesni-intel_glue.c
··· 97 97 #ifdef CONFIG_X86_64 98 98 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out, 99 99 const u8 *in, unsigned int len, u8 *iv); 100 - #endif 101 100 102 101 /* asmlinkage void aesni_gcm_enc() 103 102 * void *ctx, AES Key schedule. Starts on a 16 byte boundary. ··· 148 149 PTR_ALIGN((u8 *) 149 150 crypto_tfm_ctx(crypto_aead_tfm(tfm)), AESNI_ALIGN); 150 151 } 152 + #endif 151 153 152 154 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx) 153 155 { ··· 822 822 }; 823 823 #endif 824 824 825 + #ifdef CONFIG_X86_64 825 826 static int rfc4106_init(struct crypto_tfm *tfm) 826 827 { 827 828 struct cryptd_aead *cryptd_tfm; ··· 1238 1237 }, 1239 1238 }, 1240 1239 }; 1240 + #endif 1241 1241 1242 1242 static int __init aesni_init(void) 1243 1243 { ··· 1266 1264 goto blk_ctr_err; 1267 1265 if ((err = crypto_register_alg(&ablk_ctr_alg))) 1268 1266 goto ablk_ctr_err; 1267 + if ((err = crypto_register_alg(&__rfc4106_alg))) 1268 + goto __aead_gcm_err; 1269 + if ((err = crypto_register_alg(&rfc4106_alg))) 1270 + goto aead_gcm_err; 1269 1271 #ifdef HAS_CTR 1270 1272 if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg))) 1271 1273 goto ablk_rfc3686_ctr_err; ··· 1287 1281 if ((err = crypto_register_alg(&ablk_xts_alg))) 1288 1282 goto ablk_xts_err; 1289 1283 #endif 1290 - err = crypto_register_alg(&__rfc4106_alg); 1291 - if (err) 1292 - goto __aead_gcm_err; 1293 - err = crypto_register_alg(&rfc4106_alg); 1294 - if (err) 1295 - goto aead_gcm_err; 1296 1284 return err; 1297 1285 1298 - aead_gcm_err: 1299 - crypto_unregister_alg(&__rfc4106_alg); 1300 - __aead_gcm_err: 1301 1286 #ifdef HAS_XTS 1302 - crypto_unregister_alg(&ablk_xts_alg); 1303 1287 ablk_xts_err: 1304 1288 #endif 1305 1289 #ifdef HAS_PCBC ··· 1305 1309 crypto_unregister_alg(&ablk_rfc3686_ctr_alg); 1306 1310 ablk_rfc3686_ctr_err: 1307 1311 #endif 1312 + crypto_unregister_alg(&rfc4106_alg); 1313 + aead_gcm_err: 1314 + crypto_unregister_alg(&__rfc4106_alg); 1315 + __aead_gcm_err: 1308 1316 crypto_unregister_alg(&ablk_ctr_alg); 1309 1317 ablk_ctr_err: 1310 1318 crypto_unregister_alg(&blk_ctr_alg); ··· 1331 1331 1332 1332 static void __exit aesni_exit(void) 1333 1333 { 1334 - crypto_unregister_alg(&__rfc4106_alg); 1335 - crypto_unregister_alg(&rfc4106_alg); 1336 1334 #ifdef HAS_XTS 1337 1335 crypto_unregister_alg(&ablk_xts_alg); 1338 1336 #endif ··· 1344 1346 #ifdef HAS_CTR 1345 1347 crypto_unregister_alg(&ablk_rfc3686_ctr_alg); 1346 1348 #endif 1349 + crypto_unregister_alg(&rfc4106_alg); 1350 + crypto_unregister_alg(&__rfc4106_alg); 1347 1351 crypto_unregister_alg(&ablk_ctr_alg); 1348 1352 crypto_unregister_alg(&blk_ctr_alg); 1349 1353 #endif