]> git.karo-electronics.de Git - karo-tx-linux.git/blob - arch/s390/include/asm/percpu.h
Merge remote-tracking branch 'hid/for-next'
[karo-tx-linux.git] / arch / s390 / include / asm / percpu.h
1 #ifndef __ARCH_S390_PERCPU__
2 #define __ARCH_S390_PERCPU__
3
4 #include <linux/preempt.h>
5 #include <asm/cmpxchg.h>
6
7 /*
8  * s390 uses its own implementation for per cpu data, the offset of
9  * the cpu local data area is cached in the cpu's lowcore memory.
10  */
11 #define __my_cpu_offset S390_lowcore.percpu_offset
12
13 #ifdef CONFIG_64BIT
14
15 /*
16  * For 64 bit module code, the module may be more than 4G above the
17  * per cpu area, use weak definitions to force the compiler to
18  * generate external references.
19  */
20 #if defined(CONFIG_SMP) && defined(MODULE)
21 #define ARCH_NEEDS_WEAK_PER_CPU
22 #endif
23
24 /*
25  * We use a compare-and-swap loop since that uses less cpu cycles than
26  * disabling and enabling interrupts like the generic variant would do.
27  */
28 #define arch_this_cpu_to_op_simple(pcp, val, op)                        \
29 ({                                                                      \
30         typedef typeof(pcp) pcp_op_T__;                                 \
31         pcp_op_T__ old__, new__, prev__;                                \
32         pcp_op_T__ *ptr__;                                              \
33         preempt_disable();                                              \
34         ptr__ = __this_cpu_ptr(&(pcp));                                 \
35         prev__ = *ptr__;                                                \
36         do {                                                            \
37                 old__ = prev__;                                         \
38                 new__ = old__ op (val);                                 \
39                 prev__ = cmpxchg(ptr__, old__, new__);                  \
40         } while (prev__ != old__);                                      \
41         preempt_enable();                                               \
42         new__;                                                          \
43 })
44
45 #define this_cpu_add_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
46 #define this_cpu_add_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
47 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
48 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
49 #define this_cpu_and_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
50 #define this_cpu_and_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
51 #define this_cpu_or_1(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
52 #define this_cpu_or_2(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
53 #define this_cpu_xor_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, ^)
54 #define this_cpu_xor_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, ^)
55
56 #ifndef CONFIG_HAVE_MARCH_Z196_FEATURES
57
58 #define this_cpu_add_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
59 #define this_cpu_add_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
60 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
61 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
62 #define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
63 #define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
64 #define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
65 #define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
66 #define this_cpu_xor_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, ^)
67 #define this_cpu_xor_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, ^)
68
69 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
70
71 #define arch_this_cpu_add(pcp, val, op1, op2, szcast)                   \
72 {                                                                       \
73         typedef typeof(pcp) pcp_op_T__;                                 \
74         pcp_op_T__ val__ = (val);                                       \
75         pcp_op_T__ old__, *ptr__;                                       \
76         preempt_disable();                                              \
77         ptr__ = __this_cpu_ptr(&(pcp));                                 \
78         if (__builtin_constant_p(val__) &&                              \
79             ((szcast)val__ > -129) && ((szcast)val__ < 128)) {          \
80                 asm volatile(                                           \
81                         op2 "   %[ptr__],%[val__]\n"                    \
82                         : [ptr__] "+Q" (*ptr__)                         \
83                         : [val__] "i" ((szcast)val__)                   \
84                         : "cc");                                        \
85         } else {                                                        \
86                 asm volatile(                                           \
87                         op1 "   %[old__],%[val__],%[ptr__]\n"           \
88                         : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)   \
89                         : [val__] "d" (val__)                           \
90                         : "cc");                                        \
91         }                                                               \
92         preempt_enable();                                               \
93 }
94
95 #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
96 #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
97
98 #define arch_this_cpu_add_return(pcp, val, op)                          \
99 ({                                                                      \
100         typedef typeof(pcp) pcp_op_T__;                                 \
101         pcp_op_T__ val__ = (val);                                       \
102         pcp_op_T__ old__, *ptr__;                                       \
103         preempt_disable();                                              \
104         ptr__ = __this_cpu_ptr(&(pcp));                                 \
105         asm volatile(                                                   \
106                 op "    %[old__],%[val__],%[ptr__]\n"                   \
107                 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
108                 : [val__] "d" (val__)                                   \
109                 : "cc");                                                \
110         preempt_enable();                                               \
111         old__ + val__;                                                  \
112 })
113
114 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
115 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
116
117 #define arch_this_cpu_to_op(pcp, val, op)                               \
118 {                                                                       \
119         typedef typeof(pcp) pcp_op_T__;                                 \
120         pcp_op_T__ val__ = (val);                                       \
121         pcp_op_T__ old__, *ptr__;                                       \
122         preempt_disable();                                              \
123         ptr__ = __this_cpu_ptr(&(pcp));                                 \
124         asm volatile(                                                   \
125                 op "    %[old__],%[val__],%[ptr__]\n"                   \
126                 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
127                 : [val__] "d" (val__)                                   \
128                 : "cc");                                                \
129         preempt_enable();                                               \
130 }
131
132 #define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op(pcp, val, "lan")
133 #define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op(pcp, val, "lang")
134 #define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op(pcp, val, "lao")
135 #define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op(pcp, val, "laog")
136 #define this_cpu_xor_4(pcp, val)        arch_this_cpu_to_op(pcp, val, "lax")
137 #define this_cpu_xor_8(pcp, val)        arch_this_cpu_to_op(pcp, val, "laxg")
138
139 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
140
141 #define arch_this_cpu_cmpxchg(pcp, oval, nval)                          \
142 ({                                                                      \
143         typedef typeof(pcp) pcp_op_T__;                                 \
144         pcp_op_T__ ret__;                                               \
145         pcp_op_T__ *ptr__;                                              \
146         preempt_disable();                                              \
147         ptr__ = __this_cpu_ptr(&(pcp));                                 \
148         ret__ = cmpxchg(ptr__, oval, nval);                             \
149         preempt_enable();                                               \
150         ret__;                                                          \
151 })
152
153 #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
154 #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
155 #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
156 #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
157
158 #define arch_this_cpu_xchg(pcp, nval)                                   \
159 ({                                                                      \
160         typeof(pcp) *ptr__;                                             \
161         typeof(pcp) ret__;                                              \
162         preempt_disable();                                              \
163         ptr__ = __this_cpu_ptr(&(pcp));                                 \
164         ret__ = xchg(ptr__, nval);                                      \
165         preempt_enable();                                               \
166         ret__;                                                          \
167 })
168
169 #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
170 #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
171 #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
172 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
173
174 #define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2)        \
175 ({                                                                      \
176         typeof(pcp1) o1__ = (o1), n1__ = (n1);                          \
177         typeof(pcp2) o2__ = (o2), n2__ = (n2);                          \
178         typeof(pcp1) *p1__;                                             \
179         typeof(pcp2) *p2__;                                             \
180         int ret__;                                                      \
181         preempt_disable();                                              \
182         p1__ = __this_cpu_ptr(&(pcp1));                                 \
183         p2__ = __this_cpu_ptr(&(pcp2));                                 \
184         ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__);   \
185         preempt_enable();                                               \
186         ret__;                                                          \
187 })
188
189 #define this_cpu_cmpxchg_double_4 arch_this_cpu_cmpxchg_double
190 #define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double
191
192 #endif /* CONFIG_64BIT */
193
194 #include <asm-generic/percpu.h>
195
196 #endif /* __ARCH_S390_PERCPU__ */