work around gcc bugs with 'asm goto' with outputs
authorLinus Torvalds <torvalds@linux-foundation.org>
Fri, 9 Feb 2024 20:39:31 +0000 (12:39 -0800)
committerLinus Torvalds <torvalds@linux-foundation.org>
Fri, 9 Feb 2024 23:57:48 +0000 (15:57 -0800)
We've had issues with gcc and 'asm goto' before, and we created a
'asm_volatile_goto()' macro for that in the past: see commits
3f0116c3238a ("compiler/gcc4: Add quirk for 'asm goto' miscompilation
bug") and a9f180345f53 ("compiler/gcc4: Make quirk for
asm_volatile_goto() unconditional").

Then, much later, we ended up removing the workaround in commit
43c249ea0b1e ("compiler-gcc.h: remove ancient workaround for gcc PR
58670") because we no longer supported building the kernel with the
affected gcc versions, but we left the macro uses around.

Now, Sean Christopherson reports a new version of a very similar
problem, which is fixed by re-applying that ancient workaround.  But the
problem in question is limited to only the 'asm goto with outputs'
cases, so instead of re-introducing the old workaround as-is, let's
rename and limit the workaround to just that much less common case.

It looks like there are at least two separate issues that all hit in
this area:

 (a) some versions of gcc don't mark the asm goto as 'volatile' when it
     has outputs:

        https://gcc.gnu.org/bugzilla/show_bug.cgi?id=98619
        https://gcc.gnu.org/bugzilla/show_bug.cgi?id=110420

     which is easy to work around by just adding the 'volatile' by hand.

 (b) Internal compiler errors:

        https://gcc.gnu.org/bugzilla/show_bug.cgi?id=110422

     which are worked around by adding the extra empty 'asm' as a
     barrier, as in the original workaround.

but the problem Sean sees may be a third thing since it involves bad
code generation (not an ICE) even with the manually added 'volatile'.

but the same old workaround works for this case, even if this feels a
bit like voodoo programming and may only be hiding the issue.

Reported-and-tested-by: Sean Christopherson <seanjc@google.com>
Link: https://lore.kernel.org/all/20240208220604.140859-1-seanjc@google.com/
Cc: Nick Desaulniers <ndesaulniers@google.com>
Cc: Uros Bizjak <ubizjak@gmail.com>
Cc: Jakub Jelinek <jakub@redhat.com>
Cc: Andrew Pinski <quic_apinski@quicinc.com>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
35 files changed:
arch/arc/include/asm/jump_label.h
arch/arm/include/asm/jump_label.h
arch/arm64/include/asm/alternative-macros.h
arch/arm64/include/asm/jump_label.h
arch/csky/include/asm/jump_label.h
arch/loongarch/include/asm/jump_label.h
arch/mips/include/asm/jump_label.h
arch/parisc/include/asm/jump_label.h
arch/powerpc/include/asm/jump_label.h
arch/powerpc/include/asm/uaccess.h
arch/powerpc/kernel/irq_64.c
arch/riscv/include/asm/arch_hweight.h
arch/riscv/include/asm/bitops.h
arch/riscv/include/asm/checksum.h
arch/riscv/include/asm/cpufeature.h
arch/riscv/include/asm/jump_label.h
arch/riscv/lib/csum.c
arch/s390/include/asm/jump_label.h
arch/sparc/include/asm/jump_label.h
arch/um/include/asm/cpufeature.h
arch/x86/include/asm/cpufeature.h
arch/x86/include/asm/jump_label.h
arch/x86/include/asm/rmwcc.h
arch/x86/include/asm/special_insns.h
arch/x86/include/asm/uaccess.h
arch/x86/kvm/svm/svm_ops.h
arch/x86/kvm/vmx/vmx.c
arch/x86/kvm/vmx/vmx_ops.h
arch/xtensa/include/asm/jump_label.h
include/linux/compiler-gcc.h
include/linux/compiler_types.h
net/netfilter/nft_set_pipapo_avx2.c
samples/bpf/asm_goto_workaround.h
tools/arch/x86/include/asm/rmwcc.h
tools/include/linux/compiler_types.h

index 9d96180..a339223 100644 (file)
@@ -31,7 +31,7 @@
 static __always_inline bool arch_static_branch(struct static_key *key,
                                               bool branch)
 {
-       asm_volatile_goto(".balign "__stringify(JUMP_LABEL_NOP_SIZE)"   \n"
+       asm goto(".balign "__stringify(JUMP_LABEL_NOP_SIZE)"            \n"
                 "1:                                                    \n"
                 "nop                                                   \n"
                 ".pushsection __jump_table, \"aw\"                     \n"
@@ -47,7 +47,7 @@ l_yes:
 static __always_inline bool arch_static_branch_jump(struct static_key *key,
                                                    bool branch)
 {
-       asm_volatile_goto(".balign "__stringify(JUMP_LABEL_NOP_SIZE)"   \n"
+       asm goto(".balign "__stringify(JUMP_LABEL_NOP_SIZE)"            \n"
                 "1:                                                    \n"
                 "b %l[l_yes]                                           \n"
                 ".pushsection __jump_table, \"aw\"                     \n"
index e12d7d0..e4eb54f 100644 (file)
@@ -11,7 +11,7 @@
 
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 WASM(nop) "\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
                 ".word 1b, %l[l_yes], %c0\n\t"
@@ -25,7 +25,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 WASM(b) " %l[l_yes]\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
                 ".word 1b, %l[l_yes], %c0\n\t"
index 210bb43..d328f54 100644 (file)
@@ -229,7 +229,7 @@ alternative_has_cap_likely(const unsigned long cpucap)
        if (!cpucap_is_possible(cpucap))
                return false;
 
-       asm_volatile_goto(
+       asm goto(
        ALTERNATIVE_CB("b       %l[l_no]", %[cpucap], alt_cb_patch_nops)
        :
        : [cpucap] "i" (cpucap)
@@ -247,7 +247,7 @@ alternative_has_cap_unlikely(const unsigned long cpucap)
        if (!cpucap_is_possible(cpucap))
                return false;
 
-       asm_volatile_goto(
+       asm goto(
        ALTERNATIVE("nop", "b   %l[l_yes]", %[cpucap])
        :
        : [cpucap] "i" (cpucap)
index 48ddc0f..6aafbb7 100644 (file)
@@ -18,7 +18,7 @@
 static __always_inline bool arch_static_branch(struct static_key * const key,
                                               const bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "1:     nop                                     \n\t"
                 "      .pushsection    __jump_table, \"aw\"    \n\t"
                 "      .align          3                       \n\t"
@@ -35,7 +35,7 @@ l_yes:
 static __always_inline bool arch_static_branch_jump(struct static_key * const key,
                                                    const bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "1:     b               %l[l_yes]               \n\t"
                 "      .pushsection    __jump_table, \"aw\"    \n\t"
                 "      .align          3                       \n\t"
index 98a3f4b..ef2e37a 100644 (file)
@@ -12,7 +12,7 @@
 static __always_inline bool arch_static_branch(struct static_key *key,
                                               bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "1:     nop32                                   \n"
                "       .pushsection    __jump_table, \"aw\"    \n"
                "       .align          2                       \n"
@@ -29,7 +29,7 @@ label:
 static __always_inline bool arch_static_branch_jump(struct static_key *key,
                                                    bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "1:     bsr32           %l[label]               \n"
                "       .pushsection    __jump_table, \"aw\"    \n"
                "       .align          2                       \n"
index 3cea299..29acfe3 100644 (file)
@@ -22,7 +22,7 @@
 
 static __always_inline bool arch_static_branch(struct static_key * const key, const bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "1:     nop                     \n\t"
                JUMP_TABLE_ENTRY
                :  :  "i"(&((char *)key)[branch]) :  : l_yes);
@@ -35,7 +35,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key * const key, const bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "1:     b       %l[l_yes]       \n\t"
                JUMP_TABLE_ENTRY
                :  :  "i"(&((char *)key)[branch]) :  : l_yes);
index 081be98..ff5d388 100644 (file)
@@ -39,7 +39,7 @@ extern void jump_label_apply_nops(struct module *mod);
 
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\t" B_INSN " 2f\n\t"
+       asm goto("1:\t" B_INSN " 2f\n\t"
                "2:\t.insn\n\t"
                ".pushsection __jump_table,  \"aw\"\n\t"
                WORD_INSN " 1b, %l[l_yes], %0\n\t"
@@ -53,7 +53,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\t" J_INSN " %l[l_yes]\n\t"
+       asm goto("1:\t" J_INSN " %l[l_yes]\n\t"
                ".pushsection __jump_table,  \"aw\"\n\t"
                WORD_INSN " 1b, %l[l_yes], %0\n\t"
                ".popsection\n\t"
index 9442879..317ebc5 100644 (file)
@@ -12,7 +12,7 @@
 
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 "nop\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
                 ".align %1\n\t"
@@ -29,7 +29,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 "b,n %l[l_yes]\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
                 ".align %1\n\t"
index 93ce3ec..2f2a86e 100644 (file)
@@ -17,7 +17,7 @@
 
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 "nop # arch_static_branch\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
                 ".long 1b - ., %l[l_yes] - .\n\t"
@@ -32,7 +32,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 "b %l[l_yes] # arch_static_branch_jump\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
                 ".long 1b - ., %l[l_yes] - .\n\t"
index f1f9890..de10437 100644 (file)
@@ -74,7 +74,7 @@ __pu_failed:                                                  \
 /* -mprefixed can generate offsets beyond range, fall back hack */
 #ifdef CONFIG_PPC_KERNEL_PREFIXED
 #define __put_user_asm_goto(x, addr, label, op)                        \
-       asm_volatile_goto(                                      \
+       asm goto(                                       \
                "1:     " op " %0,0(%1) # put_user\n"           \
                EX_TABLE(1b, %l2)                               \
                :                                               \
@@ -83,7 +83,7 @@ __pu_failed:                                                  \
                : label)
 #else
 #define __put_user_asm_goto(x, addr, label, op)                        \
-       asm_volatile_goto(                                      \
+       asm goto(                                       \
                "1:     " op "%U1%X1 %0,%1      # put_user\n"   \
                EX_TABLE(1b, %l2)                               \
                :                                               \
@@ -97,7 +97,7 @@ __pu_failed:                                                  \
        __put_user_asm_goto(x, ptr, label, "std")
 #else /* __powerpc64__ */
 #define __put_user_asm2_goto(x, addr, label)                   \
-       asm_volatile_goto(                                      \
+       asm goto(                                       \
                "1:     stw%X1 %0, %1\n"                        \
                "2:     stw%X1 %L0, %L1\n"                      \
                EX_TABLE(1b, %l2)                               \
@@ -146,7 +146,7 @@ do {                                                                \
 /* -mprefixed can generate offsets beyond range, fall back hack */
 #ifdef CONFIG_PPC_KERNEL_PREFIXED
 #define __get_user_asm_goto(x, addr, label, op)                        \
-       asm_volatile_goto(                                      \
+       asm_goto_output(                                        \
                "1:     "op" %0,0(%1)   # get_user\n"           \
                EX_TABLE(1b, %l2)                               \
                : "=r" (x)                                      \
@@ -155,7 +155,7 @@ do {                                                                \
                : label)
 #else
 #define __get_user_asm_goto(x, addr, label, op)                        \
-       asm_volatile_goto(                                      \
+       asm_goto_output(                                        \
                "1:     "op"%U1%X1 %0, %1       # get_user\n"   \
                EX_TABLE(1b, %l2)                               \
                : "=r" (x)                                      \
@@ -169,7 +169,7 @@ do {                                                                \
        __get_user_asm_goto(x, addr, label, "ld")
 #else /* __powerpc64__ */
 #define __get_user_asm2_goto(x, addr, label)                   \
-       asm_volatile_goto(                                      \
+       asm_goto_output(                                        \
                "1:     lwz%X1 %0, %1\n"                        \
                "2:     lwz%X1 %L0, %L1\n"                      \
                EX_TABLE(1b, %l2)                               \
index 938e668..d5c48d1 100644 (file)
@@ -230,7 +230,7 @@ again:
         * This allows interrupts to be unmasked without hard disabling, and
         * also without new hard interrupts coming in ahead of pending ones.
         */
-       asm_volatile_goto(
+       asm goto(
 "1:                                    \n"
 "              lbz     9,%0(13)        \n"
 "              cmpwi   9,0             \n"
index c20236a..85b2c44 100644 (file)
@@ -20,7 +20,7 @@
 static __always_inline unsigned int __arch_hweight32(unsigned int w)
 {
 #ifdef CONFIG_RISCV_ISA_ZBB
-       asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
+       asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
                                      RISCV_ISA_EXT_ZBB, 1)
                          : : : : legacy);
 
@@ -51,7 +51,7 @@ static inline unsigned int __arch_hweight8(unsigned int w)
 static __always_inline unsigned long __arch_hweight64(__u64 w)
 {
 # ifdef CONFIG_RISCV_ISA_ZBB
-       asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
+       asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
                                      RISCV_ISA_EXT_ZBB, 1)
                          : : : : legacy);
 
index 9ffc355..329d824 100644 (file)
@@ -39,7 +39,7 @@ static __always_inline unsigned long variable__ffs(unsigned long word)
 {
        int num;
 
-       asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
+       asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
                                      RISCV_ISA_EXT_ZBB, 1)
                          : : : : legacy);
 
@@ -95,7 +95,7 @@ static __always_inline unsigned long variable__fls(unsigned long word)
 {
        int num;
 
-       asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
+       asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
                                      RISCV_ISA_EXT_ZBB, 1)
                          : : : : legacy);
 
@@ -154,7 +154,7 @@ static __always_inline int variable_ffs(int x)
        if (!x)
                return 0;
 
-       asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
+       asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
                                      RISCV_ISA_EXT_ZBB, 1)
                          : : : : legacy);
 
@@ -209,7 +209,7 @@ static __always_inline int variable_fls(unsigned int x)
        if (!x)
                return 0;
 
-       asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
+       asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
                                      RISCV_ISA_EXT_ZBB, 1)
                          : : : : legacy);
 
index a5b60b5..88e6f14 100644 (file)
@@ -53,7 +53,7 @@ static inline __sum16 ip_fast_csum(const void *iph, unsigned int ihl)
            IS_ENABLED(CONFIG_RISCV_ALTERNATIVE)) {
                unsigned long fold_temp;
 
-               asm_volatile_goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
+               asm goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
                                              RISCV_ISA_EXT_ZBB, 1)
                    :
                    :
index 5a626ed..0bd1186 100644 (file)
@@ -80,7 +80,7 @@ riscv_has_extension_likely(const unsigned long ext)
                           "ext must be < RISCV_ISA_EXT_MAX");
 
        if (IS_ENABLED(CONFIG_RISCV_ALTERNATIVE)) {
-               asm_volatile_goto(
+               asm goto(
                ALTERNATIVE("j  %l[l_no]", "nop", 0, %[ext], 1)
                :
                : [ext] "i" (ext)
@@ -103,7 +103,7 @@ riscv_has_extension_unlikely(const unsigned long ext)
                           "ext must be < RISCV_ISA_EXT_MAX");
 
        if (IS_ENABLED(CONFIG_RISCV_ALTERNATIVE)) {
-               asm_volatile_goto(
+               asm goto(
                ALTERNATIVE("nop", "j   %l[l_yes]", 0, %[ext], 1)
                :
                : [ext] "i" (ext)
index 14a5ea8..4a35d78 100644 (file)
@@ -17,7 +17,7 @@
 static __always_inline bool arch_static_branch(struct static_key * const key,
                                               const bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "       .align          2                       \n\t"
                "       .option push                            \n\t"
                "       .option norelax                         \n\t"
@@ -39,7 +39,7 @@ label:
 static __always_inline bool arch_static_branch_jump(struct static_key * const key,
                                                    const bool branch)
 {
-       asm_volatile_goto(
+       asm goto(
                "       .align          2                       \n\t"
                "       .option push                            \n\t"
                "       .option norelax                         \n\t"
index af3df52..74af3ab 100644 (file)
@@ -53,7 +53,7 @@ __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
                 * support, so nop when Zbb is available and jump when Zbb is
                 * not available.
                 */
-               asm_volatile_goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
+               asm goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
                                              RISCV_ISA_EXT_ZBB, 1)
                                  :
                                  :
@@ -170,7 +170,7 @@ do_csum_with_alignment(const unsigned char *buff, int len)
                 * support, so nop when Zbb is available and jump when Zbb is
                 * not available.
                 */
-               asm_volatile_goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
+               asm goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
                                              RISCV_ISA_EXT_ZBB, 1)
                                  :
                                  :
@@ -178,7 +178,7 @@ do_csum_with_alignment(const unsigned char *buff, int len)
                                  : no_zbb);
 
 #ifdef CONFIG_32BIT
-               asm_volatile_goto(".option push                 \n\
+               asm_goto_output(".option push                   \n\
                .option arch,+zbb                               \n\
                        rori    %[fold_temp], %[csum], 16       \n\
                        andi    %[offset], %[offset], 1         \n\
@@ -193,7 +193,7 @@ do_csum_with_alignment(const unsigned char *buff, int len)
 
                return (unsigned short)csum;
 #else /* !CONFIG_32BIT */
-               asm_volatile_goto(".option push                 \n\
+               asm_goto_output(".option push                   \n\
                .option arch,+zbb                               \n\
                        rori    %[fold_temp], %[csum], 32       \n\
                        add     %[csum], %[fold_temp], %[csum]  \n\
@@ -257,7 +257,7 @@ do_csum_no_alignment(const unsigned char *buff, int len)
                 * support, so nop when Zbb is available and jump when Zbb is
                 * not available.
                 */
-               asm_volatile_goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
+               asm goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0,
                                              RISCV_ISA_EXT_ZBB, 1)
                                  :
                                  :
index 895f774..bf78cf3 100644 (file)
@@ -25,7 +25,7 @@
  */
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("0:   brcl 0,%l[label]\n"
+       asm goto("0:    brcl 0,%l[label]\n"
                          ".pushsection __jump_table,\"aw\"\n"
                          ".balign      8\n"
                          ".long        0b-.,%l[label]-.\n"
@@ -39,7 +39,7 @@ label:
 
 static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("0:   brcl 15,%l[label]\n"
+       asm goto("0:    brcl 15,%l[label]\n"
                          ".pushsection __jump_table,\"aw\"\n"
                          ".balign      8\n"
                          ".long        0b-.,%l[label]-.\n"
index 94eb529..2718cbe 100644 (file)
@@ -10,7 +10,7 @@
 
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 "nop\n\t"
                 "nop\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
@@ -26,7 +26,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                 "b %l[l_yes]\n\t"
                 "nop\n\t"
                 ".pushsection __jump_table,  \"aw\"\n\t"
index 4b6d1b5..66fe06d 100644 (file)
@@ -75,7 +75,7 @@ extern void setup_clear_cpu_cap(unsigned int bit);
  */
 static __always_inline bool _static_cpu_has(u16 bit)
 {
-       asm_volatile_goto("1: jmp 6f\n"
+       asm goto("1: jmp 6f\n"
                 "2:\n"
                 ".skip -(((5f-4f) - (2b-1b)) > 0) * "
                         "((5f-4f) - (2b-1b)),0x90\n"
index a26bebb..a127369 100644 (file)
@@ -168,7 +168,7 @@ extern void clear_cpu_cap(struct cpuinfo_x86 *c, unsigned int bit);
  */
 static __always_inline bool _static_cpu_has(u16 bit)
 {
-       asm_volatile_goto(
+       asm goto(
                ALTERNATIVE_TERNARY("jmp 6f", %P[feature], "", "jmp %l[t_no]")
                ".pushsection .altinstr_aux,\"ax\"\n"
                "6:\n"
index 071572e..cbbef32 100644 (file)
@@ -24,7 +24,7 @@
 
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:"
+       asm goto("1:"
                "jmp %l[l_yes] # objtool NOPs this \n\t"
                JUMP_TABLE_ENTRY
                : :  "i" (key), "i" (2 | branch) : : l_yes);
@@ -38,7 +38,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch(struct static_key * const key, const bool branch)
 {
-       asm_volatile_goto("1:"
+       asm goto("1:"
                ".byte " __stringify(BYTES_NOP5) "\n\t"
                JUMP_TABLE_ENTRY
                : :  "i" (key), "i" (branch) : : l_yes);
@@ -52,7 +52,7 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key * const key, const bool branch)
 {
-       asm_volatile_goto("1:"
+       asm goto("1:"
                "jmp %l[l_yes]\n\t"
                JUMP_TABLE_ENTRY
                : :  "i" (key), "i" (branch) : : l_yes);
index 4b081e0..363266c 100644 (file)
@@ -13,7 +13,7 @@
 #define __GEN_RMWcc(fullop, _var, cc, clobbers, ...)                   \
 ({                                                                     \
        bool c = false;                                                 \
-       asm_volatile_goto (fullop "; j" #cc " %l[cc_label]"             \
+       asm goto (fullop "; j" #cc " %l[cc_label]"              \
                        : : [var] "m" (_var), ## __VA_ARGS__            \
                        : clobbers : cc_label);                         \
        if (0) {                                                        \
index d6cd934..48f8dd4 100644 (file)
@@ -205,7 +205,7 @@ static inline void clwb(volatile void *__p)
 #ifdef CONFIG_X86_USER_SHADOW_STACK
 static inline int write_user_shstk_64(u64 __user *addr, u64 val)
 {
-       asm_volatile_goto("1: wrussq %[val], (%[addr])\n"
+       asm goto("1: wrussq %[val], (%[addr])\n"
                          _ASM_EXTABLE(1b, %l[fail])
                          :: [addr] "r" (addr), [val] "r" (val)
                          :: fail);
index 5c367c1..237dc8c 100644 (file)
@@ -133,7 +133,7 @@ extern int __get_user_bad(void);
 
 #ifdef CONFIG_X86_32
 #define __put_user_goto_u64(x, addr, label)                    \
-       asm_volatile_goto("\n"                                  \
+       asm goto("\n"                                   \
                     "1:        movl %%eax,0(%1)\n"             \
                     "2:        movl %%edx,4(%1)\n"             \
                     _ASM_EXTABLE_UA(1b, %l2)                   \
@@ -295,7 +295,7 @@ do {                                                                        \
 } while (0)
 
 #define __get_user_asm(x, addr, itype, ltype, label)                   \
-       asm_volatile_goto("\n"                                          \
+       asm_goto_output("\n"                                            \
                     "1:        mov"itype" %[umem],%[output]\n"         \
                     _ASM_EXTABLE_UA(1b, %l2)                           \
                     : [output] ltype(x)                                \
@@ -375,7 +375,7 @@ do {                                                                        \
        __typeof__(_ptr) _old = (__typeof__(_ptr))(_pold);              \
        __typeof__(*(_ptr)) __old = *_old;                              \
        __typeof__(*(_ptr)) __new = (_new);                             \
-       asm_volatile_goto("\n"                                          \
+       asm_goto_output("\n"                                            \
                     "1: " LOCK_PREFIX "cmpxchg"itype" %[new], %[ptr]\n"\
                     _ASM_EXTABLE_UA(1b, %l[label])                     \
                     : CC_OUT(z) (success),                             \
@@ -394,7 +394,7 @@ do {                                                                        \
        __typeof__(_ptr) _old = (__typeof__(_ptr))(_pold);              \
        __typeof__(*(_ptr)) __old = *_old;                              \
        __typeof__(*(_ptr)) __new = (_new);                             \
-       asm_volatile_goto("\n"                                          \
+       asm_goto_output("\n"                                            \
                     "1: " LOCK_PREFIX "cmpxchg8b %[ptr]\n"             \
                     _ASM_EXTABLE_UA(1b, %l[label])                     \
                     : CC_OUT(z) (success),                             \
@@ -477,7 +477,7 @@ struct __large_struct { unsigned long buf[100]; };
  * aliasing issues.
  */
 #define __put_user_goto(x, addr, itype, ltype, label)                  \
-       asm_volatile_goto("\n"                                          \
+       asm goto("\n"                                                   \
                "1:     mov"itype" %0,%1\n"                             \
                _ASM_EXTABLE_UA(1b, %l2)                                \
                : : ltype(x), "m" (__m(addr))                           \
index 36c8af8..4e72585 100644 (file)
@@ -8,7 +8,7 @@
 
 #define svm_asm(insn, clobber...)                              \
 do {                                                           \
-       asm_volatile_goto("1: " __stringify(insn) "\n\t"        \
+       asm goto("1: " __stringify(insn) "\n\t" \
                          _ASM_EXTABLE(1b, %l[fault])           \
                          ::: clobber : fault);                 \
        return;                                                 \
@@ -18,7 +18,7 @@ fault:                                                                \
 
 #define svm_asm1(insn, op1, clobber...)                                \
 do {                                                           \
-       asm_volatile_goto("1: "  __stringify(insn) " %0\n\t"    \
+       asm goto("1: "  __stringify(insn) " %0\n\t"     \
                          _ASM_EXTABLE(1b, %l[fault])           \
                          :: op1 : clobber : fault);            \
        return;                                                 \
@@ -28,7 +28,7 @@ fault:                                                                \
 
 #define svm_asm2(insn, op1, op2, clobber...)                           \
 do {                                                                   \
-       asm_volatile_goto("1: "  __stringify(insn) " %1, %0\n\t"        \
+       asm goto("1: "  __stringify(insn) " %1, %0\n\t" \
                          _ASM_EXTABLE(1b, %l[fault])                   \
                          :: op1, op2 : clobber : fault);               \
        return;                                                         \
index e262bc2..1111d9d 100644 (file)
@@ -738,7 +738,7 @@ static int vmx_set_guest_uret_msr(struct vcpu_vmx *vmx,
  */
 static int kvm_cpu_vmxoff(void)
 {
-       asm_volatile_goto("1: vmxoff\n\t"
+       asm goto("1: vmxoff\n\t"
                          _ASM_EXTABLE(1b, %l[fault])
                          ::: "cc", "memory" : fault);
 
@@ -2784,7 +2784,7 @@ static int kvm_cpu_vmxon(u64 vmxon_pointer)
 
        cr4_set_bits(X86_CR4_VMXE);
 
-       asm_volatile_goto("1: vmxon %[vmxon_pointer]\n\t"
+       asm goto("1: vmxon %[vmxon_pointer]\n\t"
                          _ASM_EXTABLE(1b, %l[fault])
                          : : [vmxon_pointer] "m"(vmxon_pointer)
                          : : fault);
index f41ce3c..8060e5f 100644 (file)
@@ -94,7 +94,7 @@ static __always_inline unsigned long __vmcs_readl(unsigned long field)
 
 #ifdef CONFIG_CC_HAS_ASM_GOTO_OUTPUT
 
-       asm_volatile_goto("1: vmread %[field], %[output]\n\t"
+       asm_goto_output("1: vmread %[field], %[output]\n\t"
                          "jna %l[do_fail]\n\t"
 
                          _ASM_EXTABLE(1b, %l[do_exception])
@@ -188,7 +188,7 @@ static __always_inline unsigned long vmcs_readl(unsigned long field)
 
 #define vmx_asm1(insn, op1, error_args...)                             \
 do {                                                                   \
-       asm_volatile_goto("1: " __stringify(insn) " %0\n\t"             \
+       asm goto("1: " __stringify(insn) " %0\n\t"                      \
                          ".byte 0x2e\n\t" /* branch not taken hint */  \
                          "jna %l[error]\n\t"                           \
                          _ASM_EXTABLE(1b, %l[fault])                   \
@@ -205,7 +205,7 @@ fault:                                                                      \
 
 #define vmx_asm2(insn, op1, op2, error_args...)                                \
 do {                                                                   \
-       asm_volatile_goto("1: "  __stringify(insn) " %1, %0\n\t"        \
+       asm goto("1: "  __stringify(insn) " %1, %0\n\t"                 \
                          ".byte 0x2e\n\t" /* branch not taken hint */  \
                          "jna %l[error]\n\t"                           \
                          _ASM_EXTABLE(1b, %l[fault])                   \
index c812bf8..46c8596 100644 (file)
@@ -13,7 +13,7 @@
 static __always_inline bool arch_static_branch(struct static_key *key,
                                               bool branch)
 {
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                          "_nop\n\t"
                          ".pushsection __jump_table,  \"aw\"\n\t"
                          ".word 1b, %l[l_yes], %c0\n\t"
@@ -38,7 +38,7 @@ static __always_inline bool arch_static_branch_jump(struct static_key *key,
         * make it reachable and wrap both into a no-transform block
         * to avoid any assembler interference with this.
         */
-       asm_volatile_goto("1:\n\t"
+       asm goto("1:\n\t"
                          ".begin no-transform\n\t"
                          "_j %l[l_yes]\n\t"
                          "2:\n\t"
index aebb65b..c1a963b 100644 (file)
                __builtin_unreachable();        \
        } while (0)
 
+/*
+ * GCC 'asm goto' with outputs miscompiles certain code sequences:
+ *
+ *   https://gcc.gnu.org/bugzilla/show_bug.cgi?id=110420
+ *   https://gcc.gnu.org/bugzilla/show_bug.cgi?id=110422
+ *
+ * Work it around via the same compiler barrier quirk that we used
+ * to use for the old 'asm goto' workaround.
+ *
+ * Also, always mark such 'asm goto' statements as volatile: all
+ * asm goto statements are supposed to be volatile as per the
+ * documentation, but some versions of gcc didn't actually do
+ * that for asms with outputs:
+ *
+ *    https://gcc.gnu.org/bugzilla/show_bug.cgi?id=98619
+ */
+#define asm_goto_output(x...) \
+       do { asm volatile goto(x); asm (""); } while (0)
+
 #if defined(CONFIG_ARCH_USE_BUILTIN_BSWAP)
 #define __HAVE_BUILTIN_BSWAP32__
 #define __HAVE_BUILTIN_BSWAP64__
index 6f1ca49..663d879 100644 (file)
@@ -362,8 +362,8 @@ struct ftrace_likely_data {
 #define __member_size(p)       __builtin_object_size(p, 1)
 #endif
 
-#ifndef asm_volatile_goto
-#define asm_volatile_goto(x...) asm goto(x)
+#ifndef asm_goto_output
+#define asm_goto_output(x...) asm goto(x)
 #endif
 
 #ifdef CONFIG_CC_HAS_ASM_INLINE
index 90e275b..a3a8ddc 100644 (file)
@@ -57,7 +57,7 @@
 
 /* Jump to label if @reg is zero */
 #define NFT_PIPAPO_AVX2_NOMATCH_GOTO(reg, label)                       \
-       asm_volatile_goto("vptest %%ymm" #reg ", %%ymm" #reg ";"        \
+       asm goto("vptest %%ymm" #reg ", %%ymm" #reg ";" \
                          "je %l[" #label "]" : : : : label)
 
 /* Store 256 bits from YMM register into memory. Contrary to bucket load
index 7048bb3..634e81d 100644 (file)
@@ -4,14 +4,14 @@
 #define __ASM_GOTO_WORKAROUND_H
 
 /*
- * This will bring in asm_volatile_goto and asm_inline macro definitions
+ * This will bring in asm_goto_output and asm_inline macro definitions
  * if enabled by compiler and config options.
  */
 #include <linux/types.h>
 
-#ifdef asm_volatile_goto
-#undef asm_volatile_goto
-#define asm_volatile_goto(x...) asm volatile("invalid use of asm_volatile_goto")
+#ifdef asm_goto_output
+#undef asm_goto_output
+#define asm_goto_output(x...) asm volatile("invalid use of asm_goto_output")
 #endif
 
 /*
index 11ff975..e2ff22b 100644 (file)
@@ -4,7 +4,7 @@
 
 #define __GEN_RMWcc(fullop, var, cc, ...)                              \
 do {                                                                   \
-       asm_volatile_goto (fullop "; j" cc " %l[cc_label]"              \
+       asm goto (fullop "; j" cc " %l[cc_label]"               \
                        : : "m" (var), ## __VA_ARGS__                   \
                        : "memory" : cc_label);                         \
        return 0;                                                       \
index 1bdd834..d09f9dc 100644 (file)
@@ -36,8 +36,8 @@
 #include <linux/compiler-gcc.h>
 #endif
 
-#ifndef asm_volatile_goto
-#define asm_volatile_goto(x...) asm goto(x)
+#ifndef asm_goto_output
+#define asm_goto_output(x...) asm goto(x)
 #endif
 
 #endif /* __LINUX_COMPILER_TYPES_H */