Merge tag 'arm-drivers-5.13' of git://git.kernel.org/pub/scm/linux/kernel/git/soc/soc
[linux-2.6-microblaze.git] / arch / x86 / include / asm / paravirt_types.h
index de87087..ae692c3 100644 (file)
@@ -3,7 +3,6 @@
 #define _ASM_X86_PARAVIRT_TYPES_H
 
 /* Bitmask of what can be clobbered: usually at least eax. */
-#define CLBR_NONE 0
 #define CLBR_EAX  (1 << 0)
 #define CLBR_ECX  (1 << 1)
 #define CLBR_EDX  (1 << 2)
@@ -15,7 +14,6 @@
 
 #define CLBR_ARG_REGS  (CLBR_EAX | CLBR_EDX | CLBR_ECX)
 #define CLBR_RET_REG   (CLBR_EAX | CLBR_EDX)
-#define CLBR_SCRATCH   (0)
 #else
 #define CLBR_RAX  CLBR_EAX
 #define CLBR_RCX  CLBR_ECX
 #define CLBR_ARG_REGS  (CLBR_RDI | CLBR_RSI | CLBR_RDX | \
                         CLBR_RCX | CLBR_R8 | CLBR_R9)
 #define CLBR_RET_REG   (CLBR_RAX)
-#define CLBR_SCRATCH   (CLBR_R10 | CLBR_R11)
 
 #endif /* X86_64 */
 
-#define CLBR_CALLEE_SAVE ((CLBR_ARG_REGS | CLBR_SCRATCH) & ~CLBR_RET_REG)
-
 #ifndef __ASSEMBLY__
 
 #include <asm/desc_defs.h>
@@ -73,19 +68,6 @@ struct pv_info {
        const char *name;
 };
 
-struct pv_init_ops {
-       /*
-        * Patch may replace one of the defined code sequences with
-        * arbitrary code, subject to the same register constraints.
-        * This generally means the code is not free to clobber any
-        * registers other than EAX.  The patch function should return
-        * the number of bytes of code generated, as we nop pad the
-        * rest in generic code.
-        */
-       unsigned (*patch)(u8 type, void *insn_buff,
-                         unsigned long addr, unsigned len);
-} __no_randomize_layout;
-
 #ifdef CONFIG_PARAVIRT_XXL
 struct pv_lazy_ops {
        /* Set deferred update mode, used for batching operations. */
@@ -95,11 +77,6 @@ struct pv_lazy_ops {
 } __no_randomize_layout;
 #endif
 
-struct pv_time_ops {
-       unsigned long long (*sched_clock)(void);
-       unsigned long long (*steal_clock)(int cpu);
-} __no_randomize_layout;
-
 struct pv_cpu_ops {
        /* hooks for various privileged instructions */
        void (*io_delay)(void);
@@ -156,10 +133,6 @@ struct pv_cpu_ops {
 
        u64 (*read_pmc)(int counter);
 
-       /* Normal iret.  Jump to this with the standard iret stack
-          frame set up. */
-       void (*iret)(void);
-
        void (*start_context_switch)(struct task_struct *prev);
        void (*end_context_switch)(struct task_struct *next);
 #endif
@@ -290,8 +263,6 @@ struct pv_lock_ops {
  * number for each function using the offset which we use to indicate
  * what to patch. */
 struct paravirt_patch_template {
-       struct pv_init_ops      init;
-       struct pv_time_ops      time;
        struct pv_cpu_ops       cpu;
        struct pv_irq_ops       irq;
        struct pv_mmu_ops       mmu;
@@ -300,6 +271,7 @@ struct paravirt_patch_template {
 
 extern struct pv_info pv_info;
 extern struct paravirt_patch_template pv_ops;
+extern void (*paravirt_iret)(void);
 
 #define PARAVIRT_PATCH(x)                                      \
        (offsetof(struct paravirt_patch_template, x) / sizeof(void *))
@@ -331,11 +303,7 @@ extern struct paravirt_patch_template pv_ops;
 /* Simple instruction patching code. */
 #define NATIVE_LABEL(a,x,b) "\n\t.globl " a #x "_" #b "\n" a #x "_" #b ":\n\t"
 
-unsigned paravirt_patch_ident_64(void *insn_buff, unsigned len);
-unsigned paravirt_patch_default(u8 type, void *insn_buff, unsigned long addr, unsigned len);
-unsigned paravirt_patch_insns(void *insn_buff, unsigned len, const char *start, const char *end);
-
-unsigned native_patch(u8 type, void *insn_buff, unsigned long addr, unsigned len);
+unsigned int paravirt_patch(u8 type, void *insn_buff, unsigned long addr, unsigned int len);
 
 int paravirt_disable_iospace(void);
 
@@ -371,7 +339,7 @@ int paravirt_disable_iospace(void);
  * on the stack.  All caller-save registers (eax,edx,ecx) are expected
  * to be modified (either clobbered or used for return values).
  * X86_64, on the other hand, already specifies a register-based calling
- * conventions, returning at %rax, with parameteres going on %rdi, %rsi,
+ * conventions, returning at %rax, with parameters going on %rdi, %rsi,
  * %rdx, and %rcx. Note that for this reason, x86_64 does not need any
  * special handling for dealing with 4 arguments, unlike i386.
  * However, x86_64 also have to clobber all caller saved registers, which
@@ -414,11 +382,9 @@ int paravirt_disable_iospace(void);
  * makes sure the incoming and outgoing types are always correct.
  */
 #ifdef CONFIG_X86_32
-#define PVOP_VCALL_ARGS                                                        \
+#define PVOP_CALL_ARGS                                                 \
        unsigned long __eax = __eax, __edx = __edx, __ecx = __ecx;
 
-#define PVOP_CALL_ARGS                 PVOP_VCALL_ARGS
-
 #define PVOP_CALL_ARG1(x)              "a" ((unsigned long)(x))
 #define PVOP_CALL_ARG2(x)              "d" ((unsigned long)(x))
 #define PVOP_CALL_ARG3(x)              "c" ((unsigned long)(x))
@@ -434,12 +400,10 @@ int paravirt_disable_iospace(void);
 #define VEXTRA_CLOBBERS
 #else  /* CONFIG_X86_64 */
 /* [re]ax isn't an arg, but the return val */
-#define PVOP_VCALL_ARGS                                                \
+#define PVOP_CALL_ARGS                                         \
        unsigned long __edi = __edi, __esi = __esi,             \
                __edx = __edx, __ecx = __ecx, __eax = __eax;
 
-#define PVOP_CALL_ARGS         PVOP_VCALL_ARGS
-
 #define PVOP_CALL_ARG1(x)              "D" ((unsigned long)(x))
 #define PVOP_CALL_ARG2(x)              "S" ((unsigned long)(x))
 #define PVOP_CALL_ARG3(x)              "d" ((unsigned long)(x))
@@ -464,152 +428,138 @@ int paravirt_disable_iospace(void);
 #define PVOP_TEST_NULL(op)     ((void)pv_ops.op)
 #endif
 
-#define PVOP_RETMASK(rettype)                                          \
+#define PVOP_RETVAL(rettype)                                           \
        ({      unsigned long __mask = ~0UL;                            \
+               BUILD_BUG_ON(sizeof(rettype) > sizeof(unsigned long));  \
                switch (sizeof(rettype)) {                              \
                case 1: __mask =       0xffUL; break;                   \
                case 2: __mask =     0xffffUL; break;                   \
                case 4: __mask = 0xffffffffUL; break;                   \
                default: break;                                         \
                }                                                       \
-               __mask;                                                 \
+               __mask & __eax;                                         \
        })
 
 
-#define ____PVOP_CALL(rettype, op, clbr, call_clbr, extra_clbr,                \
-                     pre, post, ...)                                   \
+#define ____PVOP_CALL(ret, op, clbr, call_clbr, extra_clbr, ...)       \
        ({                                                              \
-               rettype __ret;                                          \
                PVOP_CALL_ARGS;                                         \
                PVOP_TEST_NULL(op);                                     \
-               /* This is 32-bit specific, but is okay in 64-bit */    \
-               /* since this condition will never hold */              \
-               if (sizeof(rettype) > sizeof(unsigned long)) {          \
-                       asm volatile(pre                                \
-                                    paravirt_alt(PARAVIRT_CALL)        \
-                                    post                               \
-                                    : call_clbr, ASM_CALL_CONSTRAINT   \
-                                    : paravirt_type(op),               \
-                                      paravirt_clobber(clbr),          \
-                                      ##__VA_ARGS__                    \
-                                    : "memory", "cc" extra_clbr);      \
-                       __ret = (rettype)((((u64)__edx) << 32) | __eax); \
-               } else {                                                \
-                       asm volatile(pre                                \
-                                    paravirt_alt(PARAVIRT_CALL)        \
-                                    post                               \
-                                    : call_clbr, ASM_CALL_CONSTRAINT   \
-                                    : paravirt_type(op),               \
-                                      paravirt_clobber(clbr),          \
-                                      ##__VA_ARGS__                    \
-                                    : "memory", "cc" extra_clbr);      \
-                       __ret = (rettype)(__eax & PVOP_RETMASK(rettype));       \
-               }                                                       \
-               __ret;                                                  \
+               asm volatile(paravirt_alt(PARAVIRT_CALL)                \
+                            : call_clbr, ASM_CALL_CONSTRAINT           \
+                            : paravirt_type(op),                       \
+                              paravirt_clobber(clbr),                  \
+                              ##__VA_ARGS__                            \
+                            : "memory", "cc" extra_clbr);              \
+               ret;                                                    \
        })
 
-#define __PVOP_CALL(rettype, op, pre, post, ...)                       \
-       ____PVOP_CALL(rettype, op, CLBR_ANY, PVOP_CALL_CLOBBERS,        \
-                     EXTRA_CLOBBERS, pre, post, ##__VA_ARGS__)
-
-#define __PVOP_CALLEESAVE(rettype, op, pre, post, ...)                 \
-       ____PVOP_CALL(rettype, op.func, CLBR_RET_REG,                   \
-                     PVOP_CALLEE_CLOBBERS, ,                           \
-                     pre, post, ##__VA_ARGS__)
-
-
-#define ____PVOP_VCALL(op, clbr, call_clbr, extra_clbr, pre, post, ...)        \
+#define ____PVOP_ALT_CALL(ret, op, alt, cond, clbr, call_clbr,         \
+                         extra_clbr, ...)                              \
        ({                                                              \
-               PVOP_VCALL_ARGS;                                        \
+               PVOP_CALL_ARGS;                                         \
                PVOP_TEST_NULL(op);                                     \
-               asm volatile(pre                                        \
-                            paravirt_alt(PARAVIRT_CALL)                \
-                            post                                       \
+               asm volatile(ALTERNATIVE(paravirt_alt(PARAVIRT_CALL),   \
+                                        alt, cond)                     \
                             : call_clbr, ASM_CALL_CONSTRAINT           \
                             : paravirt_type(op),                       \
                               paravirt_clobber(clbr),                  \
                               ##__VA_ARGS__                            \
                             : "memory", "cc" extra_clbr);              \
+               ret;                                                    \
        })
 
-#define __PVOP_VCALL(op, pre, post, ...)                               \
-       ____PVOP_VCALL(op, CLBR_ANY, PVOP_VCALL_CLOBBERS,               \
-                      VEXTRA_CLOBBERS,                                 \
-                      pre, post, ##__VA_ARGS__)
+#define __PVOP_CALL(rettype, op, ...)                                  \
+       ____PVOP_CALL(PVOP_RETVAL(rettype), op, CLBR_ANY,               \
+                     PVOP_CALL_CLOBBERS, EXTRA_CLOBBERS, ##__VA_ARGS__)
+
+#define __PVOP_ALT_CALL(rettype, op, alt, cond, ...)                   \
+       ____PVOP_ALT_CALL(PVOP_RETVAL(rettype), op, alt, cond, CLBR_ANY,\
+                         PVOP_CALL_CLOBBERS, EXTRA_CLOBBERS,           \
+                         ##__VA_ARGS__)
+
+#define __PVOP_CALLEESAVE(rettype, op, ...)                            \
+       ____PVOP_CALL(PVOP_RETVAL(rettype), op.func, CLBR_RET_REG,      \
+                     PVOP_CALLEE_CLOBBERS, , ##__VA_ARGS__)
+
+#define __PVOP_ALT_CALLEESAVE(rettype, op, alt, cond, ...)             \
+       ____PVOP_ALT_CALL(PVOP_RETVAL(rettype), op.func, alt, cond,     \
+                         CLBR_RET_REG, PVOP_CALLEE_CLOBBERS, , ##__VA_ARGS__)
+
+
+#define __PVOP_VCALL(op, ...)                                          \
+       (void)____PVOP_CALL(, op, CLBR_ANY, PVOP_VCALL_CLOBBERS,        \
+                      VEXTRA_CLOBBERS, ##__VA_ARGS__)
+
+#define __PVOP_ALT_VCALL(op, alt, cond, ...)                           \
+       (void)____PVOP_ALT_CALL(, op, alt, cond, CLBR_ANY,              \
+                               PVOP_VCALL_CLOBBERS, VEXTRA_CLOBBERS,   \
+                               ##__VA_ARGS__)
 
-#define __PVOP_VCALLEESAVE(op, pre, post, ...)                         \
-       ____PVOP_VCALL(op.func, CLBR_RET_REG,                           \
-                     PVOP_VCALLEE_CLOBBERS, ,                          \
-                     pre, post, ##__VA_ARGS__)
+#define __PVOP_VCALLEESAVE(op, ...)                                    \
+       (void)____PVOP_CALL(, op.func, CLBR_RET_REG,                    \
+                           PVOP_VCALLEE_CLOBBERS, , ##__VA_ARGS__)
 
+#define __PVOP_ALT_VCALLEESAVE(op, alt, cond, ...)                     \
+       (void)____PVOP_ALT_CALL(, op.func, alt, cond, CLBR_RET_REG,     \
+                               PVOP_VCALLEE_CLOBBERS, , ##__VA_ARGS__)
 
 
 #define PVOP_CALL0(rettype, op)                                                \
-       __PVOP_CALL(rettype, op, "", "")
+       __PVOP_CALL(rettype, op)
 #define PVOP_VCALL0(op)                                                        \
-       __PVOP_VCALL(op, "", "")
+       __PVOP_VCALL(op)
+#define PVOP_ALT_CALL0(rettype, op, alt, cond)                         \
+       __PVOP_ALT_CALL(rettype, op, alt, cond)
+#define PVOP_ALT_VCALL0(op, alt, cond)                                 \
+       __PVOP_ALT_VCALL(op, alt, cond)
 
 #define PVOP_CALLEE0(rettype, op)                                      \
-       __PVOP_CALLEESAVE(rettype, op, "", "")
+       __PVOP_CALLEESAVE(rettype, op)
 #define PVOP_VCALLEE0(op)                                              \
-       __PVOP_VCALLEESAVE(op, "", "")
+       __PVOP_VCALLEESAVE(op)
+#define PVOP_ALT_CALLEE0(rettype, op, alt, cond)                       \
+       __PVOP_ALT_CALLEESAVE(rettype, op, alt, cond)
+#define PVOP_ALT_VCALLEE0(op, alt, cond)                               \
+       __PVOP_ALT_VCALLEESAVE(op, alt, cond)
 
 
 #define PVOP_CALL1(rettype, op, arg1)                                  \
-       __PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1))
+       __PVOP_CALL(rettype, op, PVOP_CALL_ARG1(arg1))
 #define PVOP_VCALL1(op, arg1)                                          \
-       __PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1))
+       __PVOP_VCALL(op, PVOP_CALL_ARG1(arg1))
+#define PVOP_ALT_VCALL1(op, arg1, alt, cond)                           \
+       __PVOP_ALT_VCALL(op, alt, cond, PVOP_CALL_ARG1(arg1))
 
 #define PVOP_CALLEE1(rettype, op, arg1)                                        \
-       __PVOP_CALLEESAVE(rettype, op, "", "", PVOP_CALL_ARG1(arg1))
+       __PVOP_CALLEESAVE(rettype, op, PVOP_CALL_ARG1(arg1))
 #define PVOP_VCALLEE1(op, arg1)                                                \
-       __PVOP_VCALLEESAVE(op, "", "", PVOP_CALL_ARG1(arg1))
+       __PVOP_VCALLEESAVE(op, PVOP_CALL_ARG1(arg1))
+#define PVOP_ALT_CALLEE1(rettype, op, arg1, alt, cond)                 \
+       __PVOP_ALT_CALLEESAVE(rettype, op, alt, cond, PVOP_CALL_ARG1(arg1))
+#define PVOP_ALT_VCALLEE1(op, arg1, alt, cond)                         \
+       __PVOP_ALT_VCALLEESAVE(op, alt, cond, PVOP_CALL_ARG1(arg1))
 
 
 #define PVOP_CALL2(rettype, op, arg1, arg2)                            \
-       __PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1),          \
-                   PVOP_CALL_ARG2(arg2))
+       __PVOP_CALL(rettype, op, PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2))
 #define PVOP_VCALL2(op, arg1, arg2)                                    \
-       __PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1),                  \
-                    PVOP_CALL_ARG2(arg2))
-
-#define PVOP_CALLEE2(rettype, op, arg1, arg2)                          \
-       __PVOP_CALLEESAVE(rettype, op, "", "", PVOP_CALL_ARG1(arg1),    \
-                         PVOP_CALL_ARG2(arg2))
-#define PVOP_VCALLEE2(op, arg1, arg2)                                  \
-       __PVOP_VCALLEESAVE(op, "", "", PVOP_CALL_ARG1(arg1),            \
-                          PVOP_CALL_ARG2(arg2))
-
+       __PVOP_VCALL(op, PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2))
 
 #define PVOP_CALL3(rettype, op, arg1, arg2, arg3)                      \
-       __PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1),          \
+       __PVOP_CALL(rettype, op, PVOP_CALL_ARG1(arg1),                  \
                    PVOP_CALL_ARG2(arg2), PVOP_CALL_ARG3(arg3))
 #define PVOP_VCALL3(op, arg1, arg2, arg3)                              \
-       __PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1),                  \
+       __PVOP_VCALL(op, PVOP_CALL_ARG1(arg1),                          \
                     PVOP_CALL_ARG2(arg2), PVOP_CALL_ARG3(arg3))
 
-/* This is the only difference in x86_64. We can make it much simpler */
-#ifdef CONFIG_X86_32
 #define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4)                        \
        __PVOP_CALL(rettype, op,                                        \
-                   "push %[_arg4];", "lea 4(%%esp),%%esp;",            \
-                   PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),         \
-                   PVOP_CALL_ARG3(arg3), [_arg4] "mr" ((u32)(arg4)))
-#define PVOP_VCALL4(op, arg1, arg2, arg3, arg4)                                \
-       __PVOP_VCALL(op,                                                \
-                   "push %[_arg4];", "lea 4(%%esp),%%esp;",            \
-                   "0" ((u32)(arg1)), "1" ((u32)(arg2)),               \
-                   "2" ((u32)(arg3)), [_arg4] "mr" ((u32)(arg4)))
-#else
-#define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4)                        \
-       __PVOP_CALL(rettype, op, "", "",                                \
                    PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),         \
                    PVOP_CALL_ARG3(arg3), PVOP_CALL_ARG4(arg4))
 #define PVOP_VCALL4(op, arg1, arg2, arg3, arg4)                                \
-       __PVOP_VCALL(op, "", "",                                        \
-                    PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),        \
+       __PVOP_VCALL(op, PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),    \
                     PVOP_CALL_ARG3(arg3), PVOP_CALL_ARG4(arg4))
-#endif
 
 /* Lazy mode for batching updates / context switch */
 enum paravirt_lazy_mode {