x86/paravirt: Use normal calling sequences for irq enable/disable

Bastian Blank reported a boot crash with stackprotector enabled,
and debugged it back to edx register corruption.

For historical reasons irq enable/disable/save/restore had special
calling sequences to make them more efficient. With the more
recent introduction of higher-level and more general optimisations
this is no longer necessary so we can just use the normal PVOP_
macros.

This fixes some residual bugs in the old implementations which left
edx liable to inadvertent clobbering. Also, fix some bugs in
__PVOP_VCALLEESAVE which were revealed by actual use.

Reported-by: Bastian Blank <bastian@waldi.eu.org>
Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com>
Cc: Stable Kernel <stable@kernel.org>
Cc: Xen-devel <xen-devel@lists.xensource.com>
LKML-Reference: <4AD3BC9B.7040501@goop.org>
Signed-off-by: Ingo Molnar <mingo@elte.hu>

authored by Jeremy Fitzhardinge and committed by Ingo Molnar 71999d98 d1705c55

+10 -28
+4 -24
arch/x86/include/asm/paravirt.h
··· 840 841 static inline unsigned long __raw_local_save_flags(void) 842 { 843 - unsigned long f; 844 - 845 - asm volatile(paravirt_alt(PARAVIRT_CALL) 846 - : "=a"(f) 847 - : paravirt_type(pv_irq_ops.save_fl), 848 - paravirt_clobber(CLBR_EAX) 849 - : "memory", "cc"); 850 - return f; 851 } 852 853 static inline void raw_local_irq_restore(unsigned long f) 854 { 855 - asm volatile(paravirt_alt(PARAVIRT_CALL) 856 - : "=a"(f) 857 - : PV_FLAGS_ARG(f), 858 - paravirt_type(pv_irq_ops.restore_fl), 859 - paravirt_clobber(CLBR_EAX) 860 - : "memory", "cc"); 861 } 862 863 static inline void raw_local_irq_disable(void) 864 { 865 - asm volatile(paravirt_alt(PARAVIRT_CALL) 866 - : 867 - : paravirt_type(pv_irq_ops.irq_disable), 868 - paravirt_clobber(CLBR_EAX) 869 - : "memory", "eax", "cc"); 870 } 871 872 static inline void raw_local_irq_enable(void) 873 { 874 - asm volatile(paravirt_alt(PARAVIRT_CALL) 875 - : 876 - : paravirt_type(pv_irq_ops.irq_enable), 877 - paravirt_clobber(CLBR_EAX) 878 - : "memory", "eax", "cc"); 879 } 880 881 static inline unsigned long __raw_local_irq_save(void)
··· 840 841 static inline unsigned long __raw_local_save_flags(void) 842 { 843 + return PVOP_CALLEE0(unsigned long, pv_irq_ops.save_fl); 844 } 845 846 static inline void raw_local_irq_restore(unsigned long f) 847 { 848 + PVOP_VCALLEE1(pv_irq_ops.restore_fl, f); 849 } 850 851 static inline void raw_local_irq_disable(void) 852 { 853 + PVOP_VCALLEE0(pv_irq_ops.irq_disable); 854 } 855 856 static inline void raw_local_irq_enable(void) 857 { 858 + PVOP_VCALLEE0(pv_irq_ops.irq_enable); 859 } 860 861 static inline unsigned long __raw_local_irq_save(void)
+6 -4
arch/x86/include/asm/paravirt_types.h
··· 494 #define EXTRA_CLOBBERS 495 #define VEXTRA_CLOBBERS 496 #else /* CONFIG_X86_64 */ 497 #define PVOP_VCALL_ARGS \ 498 unsigned long __edi = __edi, __esi = __esi, \ 499 - __edx = __edx, __ecx = __ecx 500 - #define PVOP_CALL_ARGS PVOP_VCALL_ARGS, __eax 501 502 #define PVOP_CALL_ARG1(x) "D" ((unsigned long)(x)) 503 #define PVOP_CALL_ARG2(x) "S" ((unsigned long)(x)) ··· 510 "=c" (__ecx) 511 #define PVOP_CALL_CLOBBERS PVOP_VCALL_CLOBBERS, "=a" (__eax) 512 513 #define PVOP_VCALLEE_CLOBBERS "=a" (__eax) 514 #define PVOP_CALLEE_CLOBBERS PVOP_VCALLEE_CLOBBERS 515 ··· 585 VEXTRA_CLOBBERS, \ 586 pre, post, ##__VA_ARGS__) 587 588 - #define __PVOP_VCALLEESAVE(rettype, op, pre, post, ...) \ 589 - ____PVOP_CALL(rettype, op.func, CLBR_RET_REG, \ 590 PVOP_VCALLEE_CLOBBERS, , \ 591 pre, post, ##__VA_ARGS__) 592
··· 494 #define EXTRA_CLOBBERS 495 #define VEXTRA_CLOBBERS 496 #else /* CONFIG_X86_64 */ 497 + /* [re]ax isn't an arg, but the return val */ 498 #define PVOP_VCALL_ARGS \ 499 unsigned long __edi = __edi, __esi = __esi, \ 500 + __edx = __edx, __ecx = __ecx, __eax = __eax 501 + #define PVOP_CALL_ARGS PVOP_VCALL_ARGS 502 503 #define PVOP_CALL_ARG1(x) "D" ((unsigned long)(x)) 504 #define PVOP_CALL_ARG2(x) "S" ((unsigned long)(x)) ··· 509 "=c" (__ecx) 510 #define PVOP_CALL_CLOBBERS PVOP_VCALL_CLOBBERS, "=a" (__eax) 511 512 + /* void functions are still allowed [re]ax for scratch */ 513 #define PVOP_VCALLEE_CLOBBERS "=a" (__eax) 514 #define PVOP_CALLEE_CLOBBERS PVOP_VCALLEE_CLOBBERS 515 ··· 583 VEXTRA_CLOBBERS, \ 584 pre, post, ##__VA_ARGS__) 585 586 + #define __PVOP_VCALLEESAVE(op, pre, post, ...) \ 587 + ____PVOP_VCALL(op.func, CLBR_RET_REG, \ 588 PVOP_VCALLEE_CLOBBERS, , \ 589 pre, post, ##__VA_ARGS__) 590