Linux-libre 5.3.12-gnu
[librecmc/linux-libre.git] / arch / arm64 / include / asm / barrier.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Based on arch/arm/include/asm/barrier.h
4  *
5  * Copyright (C) 2012 ARM Ltd.
6  */
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
9
10 #ifndef __ASSEMBLY__
11
12 #include <linux/kasan-checks.h>
13
14 #define __nops(n)       ".rept  " #n "\nnop\n.endr\n"
15 #define nops(n)         asm volatile(__nops(n))
16
17 #define sev()           asm volatile("sev" : : : "memory")
18 #define wfe()           asm volatile("wfe" : : : "memory")
19 #define wfi()           asm volatile("wfi" : : : "memory")
20
21 #define isb()           asm volatile("isb" : : : "memory")
22 #define dmb(opt)        asm volatile("dmb " #opt : : : "memory")
23 #define dsb(opt)        asm volatile("dsb " #opt : : : "memory")
24
25 #define psb_csync()     asm volatile("hint #17" : : : "memory")
26 #define csdb()          asm volatile("hint #20" : : : "memory")
27
28 #define spec_bar()      asm volatile(ALTERNATIVE("dsb nsh\nisb\n",              \
29                                                  SB_BARRIER_INSN"nop\n",        \
30                                                  ARM64_HAS_SB))
31
32 #define mb()            dsb(sy)
33 #define rmb()           dsb(ld)
34 #define wmb()           dsb(st)
35
36 #define dma_rmb()       dmb(oshld)
37 #define dma_wmb()       dmb(oshst)
38
39 /*
40  * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
41  * and 0 otherwise.
42  */
43 #define array_index_mask_nospec array_index_mask_nospec
44 static inline unsigned long array_index_mask_nospec(unsigned long idx,
45                                                     unsigned long sz)
46 {
47         unsigned long mask;
48
49         asm volatile(
50         "       cmp     %1, %2\n"
51         "       sbc     %0, xzr, xzr\n"
52         : "=r" (mask)
53         : "r" (idx), "Ir" (sz)
54         : "cc");
55
56         csdb();
57         return mask;
58 }
59
60 #define __smp_mb()      dmb(ish)
61 #define __smp_rmb()     dmb(ishld)
62 #define __smp_wmb()     dmb(ishst)
63
64 #define __smp_store_release(p, v)                                       \
65 do {                                                                    \
66         typeof(p) __p = (p);                                            \
67         union { typeof(*p) __val; char __c[1]; } __u =                  \
68                 { .__val = (__force typeof(*p)) (v) };                  \
69         compiletime_assert_atomic_type(*p);                             \
70         kasan_check_write(__p, sizeof(*p));                             \
71         switch (sizeof(*p)) {                                           \
72         case 1:                                                         \
73                 asm volatile ("stlrb %w1, %0"                           \
74                                 : "=Q" (*__p)                           \
75                                 : "r" (*(__u8 *)__u.__c)                \
76                                 : "memory");                            \
77                 break;                                                  \
78         case 2:                                                         \
79                 asm volatile ("stlrh %w1, %0"                           \
80                                 : "=Q" (*__p)                           \
81                                 : "r" (*(__u16 *)__u.__c)               \
82                                 : "memory");                            \
83                 break;                                                  \
84         case 4:                                                         \
85                 asm volatile ("stlr %w1, %0"                            \
86                                 : "=Q" (*__p)                           \
87                                 : "r" (*(__u32 *)__u.__c)               \
88                                 : "memory");                            \
89                 break;                                                  \
90         case 8:                                                         \
91                 asm volatile ("stlr %1, %0"                             \
92                                 : "=Q" (*__p)                           \
93                                 : "r" (*(__u64 *)__u.__c)               \
94                                 : "memory");                            \
95                 break;                                                  \
96         }                                                               \
97 } while (0)
98
99 #define __smp_load_acquire(p)                                           \
100 ({                                                                      \
101         union { typeof(*p) __val; char __c[1]; } __u;                   \
102         typeof(p) __p = (p);                                            \
103         compiletime_assert_atomic_type(*p);                             \
104         kasan_check_read(__p, sizeof(*p));                              \
105         switch (sizeof(*p)) {                                           \
106         case 1:                                                         \
107                 asm volatile ("ldarb %w0, %1"                           \
108                         : "=r" (*(__u8 *)__u.__c)                       \
109                         : "Q" (*__p) : "memory");                       \
110                 break;                                                  \
111         case 2:                                                         \
112                 asm volatile ("ldarh %w0, %1"                           \
113                         : "=r" (*(__u16 *)__u.__c)                      \
114                         : "Q" (*__p) : "memory");                       \
115                 break;                                                  \
116         case 4:                                                         \
117                 asm volatile ("ldar %w0, %1"                            \
118                         : "=r" (*(__u32 *)__u.__c)                      \
119                         : "Q" (*__p) : "memory");                       \
120                 break;                                                  \
121         case 8:                                                         \
122                 asm volatile ("ldar %0, %1"                             \
123                         : "=r" (*(__u64 *)__u.__c)                      \
124                         : "Q" (*__p) : "memory");                       \
125                 break;                                                  \
126         }                                                               \
127         __u.__val;                                                      \
128 })
129
130 #define smp_cond_load_relaxed(ptr, cond_expr)                           \
131 ({                                                                      \
132         typeof(ptr) __PTR = (ptr);                                      \
133         typeof(*ptr) VAL;                                               \
134         for (;;) {                                                      \
135                 VAL = READ_ONCE(*__PTR);                                \
136                 if (cond_expr)                                          \
137                         break;                                          \
138                 __cmpwait_relaxed(__PTR, VAL);                          \
139         }                                                               \
140         VAL;                                                            \
141 })
142
143 #define smp_cond_load_acquire(ptr, cond_expr)                           \
144 ({                                                                      \
145         typeof(ptr) __PTR = (ptr);                                      \
146         typeof(*ptr) VAL;                                               \
147         for (;;) {                                                      \
148                 VAL = smp_load_acquire(__PTR);                          \
149                 if (cond_expr)                                          \
150                         break;                                          \
151                 __cmpwait_relaxed(__PTR, VAL);                          \
152         }                                                               \
153         VAL;                                                            \
154 })
155
156 #include <asm-generic/barrier.h>
157
158 #endif  /* __ASSEMBLY__ */
159
160 #endif  /* __ASM_BARRIER_H */