* means that all uses must be wrapped in inline functions. This also
* makes sure the incoming and outgoing types are always correct.
*/
-#define PVOP_CALL0(__rettype, __op) \
- ({ \
- __rettype __ret; \
- if (sizeof(__rettype) > sizeof(unsigned long)) { \
- unsigned long long __tmp; \
- unsigned long __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=A" (__tmp), "=c" (__ecx) \
- : paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } else { \
- unsigned long __tmp, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__tmp), "=d" (__edx), \
- "=c" (__ecx) \
- : paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } \
- __ret; \
- })
-#define PVOP_VCALL0(__op) \
+#define __PVOP_CALL(rettype, op, pre, post, ...) \
({ \
+ rettype __ret; \
unsigned long __eax, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__eax), "=d" (__edx), "=c" (__ecx) \
- : paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- })
-
-#define PVOP_CALL1(__rettype, __op, arg1) \
- ({ \
- __rettype __ret; \
- if (sizeof(__rettype) > sizeof(unsigned long)) { \
- unsigned long long __tmp; \
- unsigned long __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=A" (__tmp), "=c" (__ecx) \
- : "a" ((u32)(arg1)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } else { \
- unsigned long __tmp, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__tmp), "=d" (__edx), \
- "=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } \
- __ret; \
- })
-#define PVOP_VCALL1(__op, arg1) \
- ({ \
- unsigned long __eax, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__eax), "=d" (__edx), "=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- })
-
-#define PVOP_CALL2(__rettype, __op, arg1, arg2) \
- ({ \
- __rettype __ret; \
- if (sizeof(__rettype) > sizeof(unsigned long)) { \
- unsigned long long __tmp; \
- unsigned long __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=A" (__tmp), "=c" (__ecx) \
- : "a" ((u32)(arg1)), \
- "d" ((u32)(arg2)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } else { \
- unsigned long __tmp, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__tmp), "=d" (__edx), \
- "=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- "1" ((u32)(arg2)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } \
- __ret; \
- })
-#define PVOP_VCALL2(__op, arg1, arg2) \
- ({ \
- unsigned long __eax, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__eax), "=d" (__edx), "=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- "1" ((u32)(arg2)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- })
-
-#define PVOP_CALL3(__rettype, __op, arg1, arg2, arg3) \
- ({ \
- __rettype __ret; \
- if (sizeof(__rettype) > sizeof(unsigned long)) { \
- unsigned long long __tmp; \
- unsigned long __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=A" (__tmp), "=c" (__ecx) \
- : "a" ((u32)(arg1)), \
- "d" ((u32)(arg2)), \
- "1" ((u32)(arg3)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } else { \
- unsigned long __tmp, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__tmp), "=d" (__edx), \
+ if (sizeof(rettype) > sizeof(unsigned long)) { \
+ asm volatile(pre \
+ paravirt_alt(PARAVIRT_CALL) \
+ post \
+ : "=a" (__eax), "=d" (__edx), \
"=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- "1" ((u32)(arg2)), \
- "2" ((u32)(arg3)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
+ : paravirt_type(op), \
+ paravirt_clobber(CLBR_ANY), \
+ ##__VA_ARGS__ \
: "memory", "cc"); \
- __ret = (__rettype)__tmp; \
- } \
- __ret; \
- })
-#define PVOP_VCALL3(__op, arg1, arg2, arg3) \
- ({ \
- unsigned long __eax, __edx, __ecx; \
- asm volatile(paravirt_alt(PARAVIRT_CALL) \
- : "=a" (__eax), "=d" (__edx), "=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- "1" ((u32)(arg2)), \
- "2" ((u32)(arg3)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc"); \
- })
-
-#define PVOP_CALL4(__rettype, __op, arg1, arg2, arg3, arg4) \
- ({ \
- __rettype __ret; \
- if (sizeof(__rettype) > sizeof(unsigned long)) { \
- unsigned long long __tmp; \
- unsigned long __ecx; \
- asm volatile("push %[_arg4]; " \
- paravirt_alt(PARAVIRT_CALL) \
- "lea 4(%%esp),%%esp" \
- : "=A" (__tmp), "=c" (__ecx) \
- : "a" ((u32)(arg1)), \
- "d" ((u32)(arg2)), \
- "1" ((u32)(arg3)), \
- [_arg4] "mr" ((u32)(arg4)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
- : "memory", "cc",); \
- __ret = (__rettype)__tmp; \
+ __ret = (rettype)((((u64)__edx) << 32) | __eax); \
} else { \
- unsigned long __tmp, __edx, __ecx; \
- asm volatile("push %[_arg4]; " \
+ asm volatile(pre \
paravirt_alt(PARAVIRT_CALL) \
- "lea 4(%%esp),%%esp" \
- : "=a" (__tmp), "=d" (__edx), "=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- "1" ((u32)(arg2)), \
- "2" ((u32)(arg3)), \
- [_arg4]"mr" ((u32)(arg4)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
+ post \
+ : "=a" (__eax), "=d" (__edx), \
+ "=c" (__ecx) \
+ : paravirt_type(op), \
+ paravirt_clobber(CLBR_ANY), \
+ ##__VA_ARGS__ \
: "memory", "cc"); \
- __ret = (__rettype)__tmp; \
+ __ret = (rettype)__eax; \
} \
__ret; \
})
-#define PVOP_VCALL4(__op, arg1, arg2, arg3, arg4) \
+#define __PVOP_VCALL(op, pre, post, ...) \
({ \
unsigned long __eax, __edx, __ecx; \
- asm volatile("push %[_arg4]; " \
+ asm volatile(pre \
paravirt_alt(PARAVIRT_CALL) \
- "lea 4(%%esp),%%esp" \
+ post \
: "=a" (__eax), "=d" (__edx), "=c" (__ecx) \
- : "0" ((u32)(arg1)), \
- "1" ((u32)(arg2)), \
- "2" ((u32)(arg3)), \
- [_arg4]"mr" ((u32)(arg4)), \
- paravirt_type(__op), \
- paravirt_clobber(CLBR_ANY) \
+ : paravirt_type(op), \
+ paravirt_clobber(CLBR_ANY), \
+ ##__VA_ARGS__ \
: "memory", "cc"); \
})
+#define PVOP_CALL0(rettype, op) \
+ __PVOP_CALL(rettype, op, "", "")
+#define PVOP_VCALL0(op) \
+ __PVOP_VCALL(op, "", "")
+
+#define PVOP_CALL1(rettype, op, arg1) \
+ __PVOP_CALL(rettype, op, "", "", "0" ((u32)(arg1)))
+#define PVOP_VCALL1(op, arg1) \
+ __PVOP_VCALL(op, "", "", "0" ((u32)(arg1)))
+
+#define PVOP_CALL2(rettype, op, arg1, arg2) \
+ __PVOP_CALL(rettype, op, "", "", "0" ((u32)(arg1)), "1" ((u32)(arg2)))
+#define PVOP_VCALL2(op, arg1, arg2) \
+ __PVOP_VCALL(op, "", "", "0" ((u32)(arg1)), "1" ((u32)(arg2)))
+
+#define PVOP_CALL3(rettype, op, arg1, arg2, arg3) \
+ __PVOP_CALL(rettype, op, "", "", "0" ((u32)(arg1)), \
+ "1"((u32)(arg2)), "2"((u32)(arg3)))
+#define PVOP_VCALL3(op, arg1, arg2, arg3) \
+ __PVOP_VCALL(op, "", "", "0" ((u32)(arg1)), "1"((u32)(arg2)), \
+ "2"((u32)(arg3)))
+
+#define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4) \
+ __PVOP_CALL(rettype, op, \
+ "push %[_arg4];", "lea 4(%%esp),%%esp;", \
+ "0" ((u32)(arg1)), "1" ((u32)(arg2)), \
+ "2" ((u32)(arg3)), [_arg4] "mr" ((u32)(arg4)))
+#define PVOP_VCALL4(op, arg1, arg2, arg3, arg4) \
+ __PVOP_VCALL(op, \
+ "push %[_arg4];", "lea 4(%%esp),%%esp;", \
+ "0" ((u32)(arg1)), "1" ((u32)(arg2)), \
+ "2" ((u32)(arg3)), [_arg4] "mr" ((u32)(arg4)))
+
static inline int paravirt_enabled(void)
{
return paravirt_ops.paravirt_enabled;
/* Make sure as little as possible of this mess escapes. */
#undef PARAVIRT_CALL
+#undef __PVOP_CALL
+#undef __PVOP_VCALL
#undef PVOP_VCALL0
#undef PVOP_CALL0
#undef PVOP_VCALL1