Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

Merge tag 'v5.18-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Pull crypto fixes from Herbert Xu:

- Missing Kconfig dependency on arm that leads to boot failure

- x86 SLS fixes

- Reference leak in the stm32 driver

* tag 'v5.18-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
crypto: x86/sm3 - Fixup SLS
crypto: x86/poly1305 - Fixup SLS
crypto: x86/chacha20 - Avoid spurious jumps to other functions
crypto: stm32 - fix reference leak in stm32_crc_remove
crypto: arm/aes-neonbs-cbc - Select generic cbc and aes

+27 -23
+2
arch/arm/crypto/Kconfig
··· 102 102 depends on KERNEL_MODE_NEON 103 103 select CRYPTO_SKCIPHER 104 104 select CRYPTO_LIB_AES 105 + select CRYPTO_AES 106 + select CRYPTO_CBC 105 107 select CRYPTO_SIMD 106 108 help 107 109 Use a faster and more secure NEON based implementation of AES in CBC,
+2 -2
arch/x86/crypto/chacha-avx512vl-x86_64.S
··· 172 172 # xor remaining bytes from partial register into output 173 173 mov %rcx,%rax 174 174 and $0xf,%rcx 175 - jz .Ldone8 175 + jz .Ldone2 176 176 mov %rax,%r9 177 177 and $~0xf,%r9 178 178 ··· 438 438 # xor remaining bytes from partial register into output 439 439 mov %rcx,%rax 440 440 and $0xf,%rcx 441 - jz .Ldone8 441 + jz .Ldone4 442 442 mov %rax,%r9 443 443 and $~0xf,%r9 444 444
+19 -19
arch/x86/crypto/poly1305-x86_64-cryptogams.pl
··· 297 297 $code.=<<___; 298 298 mov \$1,%eax 299 299 .Lno_key: 300 - ret 300 + RET 301 301 ___ 302 302 &end_function("poly1305_init_x86_64"); 303 303 ··· 373 373 .cfi_adjust_cfa_offset -48 374 374 .Lno_data: 375 375 .Lblocks_epilogue: 376 - ret 376 + RET 377 377 .cfi_endproc 378 378 ___ 379 379 &end_function("poly1305_blocks_x86_64"); ··· 399 399 mov %rax,0($mac) # write result 400 400 mov %rcx,8($mac) 401 401 402 - ret 402 + RET 403 403 ___ 404 404 &end_function("poly1305_emit_x86_64"); 405 405 if ($avx) { ··· 429 429 &poly1305_iteration(); 430 430 $code.=<<___; 431 431 pop $ctx 432 - ret 432 + RET 433 433 .size __poly1305_block,.-__poly1305_block 434 434 435 435 .type __poly1305_init_avx,\@abi-omnipotent ··· 594 594 595 595 lea -48-64($ctx),$ctx # size [de-]optimization 596 596 pop %rbp 597 - ret 597 + RET 598 598 .size __poly1305_init_avx,.-__poly1305_init_avx 599 599 ___ 600 600 ··· 747 747 .cfi_restore %rbp 748 748 .Lno_data_avx: 749 749 .Lblocks_avx_epilogue: 750 - ret 750 + RET 751 751 .cfi_endproc 752 752 753 753 .align 32 ··· 1452 1452 ___ 1453 1453 $code.=<<___; 1454 1454 vzeroupper 1455 - ret 1455 + RET 1456 1456 .cfi_endproc 1457 1457 ___ 1458 1458 &end_function("poly1305_blocks_avx"); ··· 1508 1508 mov %rax,0($mac) # write result 1509 1509 mov %rcx,8($mac) 1510 1510 1511 - ret 1511 + RET 1512 1512 ___ 1513 1513 &end_function("poly1305_emit_avx"); 1514 1514 ··· 1675 1675 .cfi_restore %rbp 1676 1676 .Lno_data_avx2$suffix: 1677 1677 .Lblocks_avx2_epilogue$suffix: 1678 - ret 1678 + RET 1679 1679 .cfi_endproc 1680 1680 1681 1681 .align 32 ··· 2201 2201 ___ 2202 2202 $code.=<<___; 2203 2203 vzeroupper 2204 - ret 2204 + RET 2205 2205 .cfi_endproc 2206 2206 ___ 2207 2207 if($avx > 2 && $avx512) { ··· 2792 2792 .cfi_def_cfa_register %rsp 2793 2793 ___ 2794 2794 $code.=<<___; 2795 - ret 2795 + RET 2796 2796 .cfi_endproc 2797 2797 ___ 2798 2798 ··· 2893 2893 ___ 2894 2894 $code.=<<___; 2895 2895 mov \$1,%eax 2896 - ret 2896 + RET 2897 2897 .size poly1305_init_base2_44,.-poly1305_init_base2_44 2898 2898 ___ 2899 2899 { ··· 3010 3010 jnz .Lblocks_vpmadd52_4x 3011 3011 3012 3012 .Lno_data_vpmadd52: 3013 - ret 3013 + RET 3014 3014 .size poly1305_blocks_vpmadd52,.-poly1305_blocks_vpmadd52 3015 3015 ___ 3016 3016 } ··· 3451 3451 vzeroall 3452 3452 3453 3453 .Lno_data_vpmadd52_4x: 3454 - ret 3454 + RET 3455 3455 .size poly1305_blocks_vpmadd52_4x,.-poly1305_blocks_vpmadd52_4x 3456 3456 ___ 3457 3457 } ··· 3824 3824 vzeroall 3825 3825 3826 3826 .Lno_data_vpmadd52_8x: 3827 - ret 3827 + RET 3828 3828 .size poly1305_blocks_vpmadd52_8x,.-poly1305_blocks_vpmadd52_8x 3829 3829 ___ 3830 3830 } ··· 3861 3861 mov %rax,0($mac) # write result 3862 3862 mov %rcx,8($mac) 3863 3863 3864 - ret 3864 + RET 3865 3865 .size poly1305_emit_base2_44,.-poly1305_emit_base2_44 3866 3866 ___ 3867 3867 } } } ··· 3916 3916 3917 3917 .Ldone_enc: 3918 3918 mov $otp,%rax 3919 - ret 3919 + RET 3920 3920 .size xor128_encrypt_n_pad,.-xor128_encrypt_n_pad 3921 3921 3922 3922 .globl xor128_decrypt_n_pad ··· 3967 3967 3968 3968 .Ldone_dec: 3969 3969 mov $otp,%rax 3970 - ret 3970 + RET 3971 3971 .size xor128_decrypt_n_pad,.-xor128_decrypt_n_pad 3972 3972 ___ 3973 3973 } ··· 4109 4109 pop %rbx 4110 4110 pop %rdi 4111 4111 pop %rsi 4112 - ret 4112 + RET 4113 4113 .size avx_handler,.-avx_handler 4114 4114 4115 4115 .section .pdata
+1 -1
arch/x86/crypto/sm3-avx-asm_64.S
··· 513 513 514 514 movq %rbp, %rsp; 515 515 popq %rbp; 516 - ret; 516 + RET; 517 517 SYM_FUNC_END(sm3_transform_avx)
+3 -1
drivers/crypto/stm32/stm32-crc32.c
··· 384 384 struct stm32_crc *crc = platform_get_drvdata(pdev); 385 385 int ret = pm_runtime_get_sync(crc->dev); 386 386 387 - if (ret < 0) 387 + if (ret < 0) { 388 + pm_runtime_put_noidle(crc->dev); 388 389 return ret; 390 + } 389 391 390 392 spin_lock(&crc_list.lock); 391 393 list_del(&crc->list);