]> git.proxmox.com Git - mirror_ubuntu-eoan-kernel.git/commitdiff
x86/paravirt: Use normal calling sequences for irq enable/disable
authorJeremy Fitzhardinge <jeremy@goop.org>
Mon, 12 Oct 2009 23:32:43 +0000 (16:32 -0700)
committerIngo Molnar <mingo@elte.hu>
Tue, 13 Oct 2009 07:22:01 +0000 (09:22 +0200)
Bastian Blank reported a boot crash with stackprotector enabled,
and debugged it back to edx register corruption.

For historical reasons irq enable/disable/save/restore had special
calling sequences to make them more efficient.  With the more
recent introduction of higher-level and more general optimisations
this is no longer necessary so we can just use the normal PVOP_
macros.

This fixes some residual bugs in the old implementations which left
edx liable to inadvertent clobbering. Also, fix some bugs in
__PVOP_VCALLEESAVE which were revealed by actual use.

Reported-by: Bastian Blank <bastian@waldi.eu.org>
Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com>
Cc: Stable Kernel <stable@kernel.org>
Cc: Xen-devel <xen-devel@lists.xensource.com>
LKML-Reference: <4AD3BC9B.7040501@goop.org>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
arch/x86/include/asm/paravirt.h
arch/x86/include/asm/paravirt_types.h

index 8aebcc41041d7cbc15a0b3c1023cdccf7010e407..efb38994859c6c961c68a0cf301106098e0b86cc 100644 (file)
@@ -840,42 +840,22 @@ static __always_inline void __raw_spin_unlock(struct raw_spinlock *lock)
 
 static inline unsigned long __raw_local_save_flags(void)
 {
-       unsigned long f;
-
-       asm volatile(paravirt_alt(PARAVIRT_CALL)
-                    : "=a"(f)
-                    : paravirt_type(pv_irq_ops.save_fl),
-                      paravirt_clobber(CLBR_EAX)
-                    : "memory", "cc");
-       return f;
+       return PVOP_CALLEE0(unsigned long, pv_irq_ops.save_fl);
 }
 
 static inline void raw_local_irq_restore(unsigned long f)
 {
-       asm volatile(paravirt_alt(PARAVIRT_CALL)
-                    : "=a"(f)
-                    : PV_FLAGS_ARG(f),
-                      paravirt_type(pv_irq_ops.restore_fl),
-                      paravirt_clobber(CLBR_EAX)
-                    : "memory", "cc");
+       PVOP_VCALLEE1(pv_irq_ops.restore_fl, f);
 }
 
 static inline void raw_local_irq_disable(void)
 {
-       asm volatile(paravirt_alt(PARAVIRT_CALL)
-                    :
-                    : paravirt_type(pv_irq_ops.irq_disable),
-                      paravirt_clobber(CLBR_EAX)
-                    : "memory", "eax", "cc");
+       PVOP_VCALLEE0(pv_irq_ops.irq_disable);
 }
 
 static inline void raw_local_irq_enable(void)
 {
-       asm volatile(paravirt_alt(PARAVIRT_CALL)
-                    :
-                    : paravirt_type(pv_irq_ops.irq_enable),
-                      paravirt_clobber(CLBR_EAX)
-                    : "memory", "eax", "cc");
+       PVOP_VCALLEE0(pv_irq_ops.irq_enable);
 }
 
 static inline unsigned long __raw_local_irq_save(void)
index dd0f5b32489dfec730849202b474d453b773dbb5..9357473c8da06b7d37f76fa22ea610942cbc90e7 100644 (file)
@@ -494,10 +494,11 @@ int paravirt_disable_iospace(void);
 #define EXTRA_CLOBBERS
 #define VEXTRA_CLOBBERS
 #else  /* CONFIG_X86_64 */
+/* [re]ax isn't an arg, but the return val */
 #define PVOP_VCALL_ARGS                                        \
        unsigned long __edi = __edi, __esi = __esi,     \
-               __edx = __edx, __ecx = __ecx
-#define PVOP_CALL_ARGS         PVOP_VCALL_ARGS, __eax
+               __edx = __edx, __ecx = __ecx, __eax = __eax
+#define PVOP_CALL_ARGS         PVOP_VCALL_ARGS
 
 #define PVOP_CALL_ARG1(x)              "D" ((unsigned long)(x))
 #define PVOP_CALL_ARG2(x)              "S" ((unsigned long)(x))
@@ -509,6 +510,7 @@ int paravirt_disable_iospace(void);
                                "=c" (__ecx)
 #define PVOP_CALL_CLOBBERS     PVOP_VCALL_CLOBBERS, "=a" (__eax)
 
+/* void functions are still allowed [re]ax for scratch */
 #define PVOP_VCALLEE_CLOBBERS  "=a" (__eax)
 #define PVOP_CALLEE_CLOBBERS   PVOP_VCALLEE_CLOBBERS
 
@@ -583,8 +585,8 @@ int paravirt_disable_iospace(void);
                       VEXTRA_CLOBBERS,                                 \
                       pre, post, ##__VA_ARGS__)
 
-#define __PVOP_VCALLEESAVE(rettype, op, pre, post, ...)                        \
-       ____PVOP_CALL(rettype, op.func, CLBR_RET_REG,                   \
+#define __PVOP_VCALLEESAVE(op, pre, post, ...)                         \
+       ____PVOP_VCALL(op.func, CLBR_RET_REG,                           \
                      PVOP_VCALLEE_CLOBBERS, ,                          \
                      pre, post, ##__VA_ARGS__)