Revert "PCI/ASPM: Do not initialize link state when aspm_disabled is set"
[linux-2.6-microblaze.git] / tools / arch / arm64 / include / asm / barrier.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _TOOLS_LINUX_ASM_AARCH64_BARRIER_H
3 #define _TOOLS_LINUX_ASM_AARCH64_BARRIER_H
4
5 /*
6  * From tools/perf/perf-sys.h, last modified in:
7  * f428ebd184c82a7914b2aa7e9f868918aaf7ea78 perf tools: Fix AAAAARGH64 memory barriers
8  *
9  * XXX: arch/arm64/include/asm/barrier.h in the kernel sources use dsb, is this
10  * a case like for arm32 where we do things differently in userspace?
11  */
12
13 #define mb()            asm volatile("dmb ish" ::: "memory")
14 #define wmb()           asm volatile("dmb ishst" ::: "memory")
15 #define rmb()           asm volatile("dmb ishld" ::: "memory")
16
17 #define smp_store_release(p, v)                                 \
18 do {                                                            \
19         union { typeof(*p) __val; char __c[1]; } __u =          \
20                 { .__val = (__force typeof(*p)) (v) };          \
21                                                                 \
22         switch (sizeof(*p)) {                                   \
23         case 1:                                                 \
24                 asm volatile ("stlrb %w1, %0"                   \
25                                 : "=Q" (*p)                     \
26                                 : "r" (*(__u8 *)__u.__c)        \
27                                 : "memory");                    \
28                 break;                                          \
29         case 2:                                                 \
30                 asm volatile ("stlrh %w1, %0"                   \
31                                 : "=Q" (*p)                     \
32                                 : "r" (*(__u16 *)__u.__c)       \
33                                 : "memory");                    \
34                 break;                                          \
35         case 4:                                                 \
36                 asm volatile ("stlr %w1, %0"                    \
37                                 : "=Q" (*p)                     \
38                                 : "r" (*(__u32 *)__u.__c)       \
39                                 : "memory");                    \
40                 break;                                          \
41         case 8:                                                 \
42                 asm volatile ("stlr %1, %0"                     \
43                                 : "=Q" (*p)                     \
44                                 : "r" (*(__u64 *)__u.__c)       \
45                                 : "memory");                    \
46                 break;                                          \
47         default:                                                \
48                 /* Only to shut up gcc ... */                   \
49                 mb();                                           \
50                 break;                                          \
51         }                                                       \
52 } while (0)
53
54 #define smp_load_acquire(p)                                     \
55 ({                                                              \
56         union { typeof(*p) __val; char __c[1]; } __u;           \
57                                                                 \
58         switch (sizeof(*p)) {                                   \
59         case 1:                                                 \
60                 asm volatile ("ldarb %w0, %1"                   \
61                         : "=r" (*(__u8 *)__u.__c)               \
62                         : "Q" (*p) : "memory");                 \
63                 break;                                          \
64         case 2:                                                 \
65                 asm volatile ("ldarh %w0, %1"                   \
66                         : "=r" (*(__u16 *)__u.__c)              \
67                         : "Q" (*p) : "memory");                 \
68                 break;                                          \
69         case 4:                                                 \
70                 asm volatile ("ldar %w0, %1"                    \
71                         : "=r" (*(__u32 *)__u.__c)              \
72                         : "Q" (*p) : "memory");                 \
73                 break;                                          \
74         case 8:                                                 \
75                 asm volatile ("ldar %0, %1"                     \
76                         : "=r" (*(__u64 *)__u.__c)              \
77                         : "Q" (*p) : "memory");                 \
78                 break;                                          \
79         default:                                                \
80                 /* Only to shut up gcc ... */                   \
81                 mb();                                           \
82                 break;                                          \
83         }                                                       \
84         __u.__val;                                              \
85 })
86
87 #endif /* _TOOLS_LINUX_ASM_AARCH64_BARRIER_H */