GNU Linux-libre 4.19.264-gnu1
[releases.git] / include / asm-generic / percpu.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_PERCPU_H_
3 #define _ASM_GENERIC_PERCPU_H_
4
5 #include <linux/compiler.h>
6 #include <linux/threads.h>
7 #include <linux/percpu-defs.h>
8
9 #ifdef CONFIG_SMP
10
11 /*
12  * per_cpu_offset() is the offset that has to be added to a
13  * percpu variable to get to the instance for a certain processor.
14  *
15  * Most arches use the __per_cpu_offset array for those offsets but
16  * some arches have their own ways of determining the offset (x86_64, s390).
17  */
18 #ifndef __per_cpu_offset
19 extern unsigned long __per_cpu_offset[NR_CPUS];
20
21 #define per_cpu_offset(x) (__per_cpu_offset[x])
22 #endif
23
24 /*
25  * Determine the offset for the currently active processor.
26  * An arch may define __my_cpu_offset to provide a more effective
27  * means of obtaining the offset to the per cpu variables of the
28  * current processor.
29  */
30 #ifndef __my_cpu_offset
31 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32 #endif
33 #ifdef CONFIG_DEBUG_PREEMPT
34 #define my_cpu_offset per_cpu_offset(smp_processor_id())
35 #else
36 #define my_cpu_offset __my_cpu_offset
37 #endif
38
39 /*
40  * Arch may define arch_raw_cpu_ptr() to provide more efficient address
41  * translations for raw_cpu_ptr().
42  */
43 #ifndef arch_raw_cpu_ptr
44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
45 #endif
46
47 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
48 extern void setup_per_cpu_areas(void);
49 #endif
50
51 #endif  /* SMP */
52
53 #ifndef PER_CPU_BASE_SECTION
54 #ifdef CONFIG_SMP
55 #define PER_CPU_BASE_SECTION ".data..percpu"
56 #else
57 #define PER_CPU_BASE_SECTION ".data"
58 #endif
59 #endif
60
61 #ifndef PER_CPU_ATTRIBUTES
62 #define PER_CPU_ATTRIBUTES
63 #endif
64
65 #ifndef PER_CPU_DEF_ATTRIBUTES
66 #define PER_CPU_DEF_ATTRIBUTES
67 #endif
68
69 #define raw_cpu_generic_read(pcp)                                       \
70 ({                                                                      \
71         *raw_cpu_ptr(&(pcp));                                           \
72 })
73
74 #define raw_cpu_generic_to_op(pcp, val, op)                             \
75 do {                                                                    \
76         *raw_cpu_ptr(&(pcp)) op val;                                    \
77 } while (0)
78
79 #define raw_cpu_generic_add_return(pcp, val)                            \
80 ({                                                                      \
81         typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp));                       \
82                                                                         \
83         *__p += val;                                                    \
84         *__p;                                                           \
85 })
86
87 #define raw_cpu_generic_xchg(pcp, nval)                                 \
88 ({                                                                      \
89         typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp));                       \
90         typeof(pcp) __ret;                                              \
91         __ret = *__p;                                                   \
92         *__p = nval;                                                    \
93         __ret;                                                          \
94 })
95
96 #define raw_cpu_generic_cmpxchg(pcp, oval, nval)                        \
97 ({                                                                      \
98         typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp));                       \
99         typeof(pcp) __ret;                                              \
100         __ret = *__p;                                                   \
101         if (__ret == (oval))                                            \
102                 *__p = nval;                                            \
103         __ret;                                                          \
104 })
105
106 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
107 ({                                                                      \
108         typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1));                    \
109         typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2));                    \
110         int __ret = 0;                                                  \
111         if (*__p1 == (oval1) && *__p2  == (oval2)) {                    \
112                 *__p1 = nval1;                                          \
113                 *__p2 = nval2;                                          \
114                 __ret = 1;                                              \
115         }                                                               \
116         (__ret);                                                        \
117 })
118
119 #define __this_cpu_generic_read_nopreempt(pcp)                          \
120 ({                                                                      \
121         typeof(pcp) __ret;                                              \
122         preempt_disable_notrace();                                      \
123         __ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));                        \
124         preempt_enable_notrace();                                       \
125         __ret;                                                          \
126 })
127
128 #define __this_cpu_generic_read_noirq(pcp)                              \
129 ({                                                                      \
130         typeof(pcp) __ret;                                              \
131         unsigned long __flags;                                          \
132         raw_local_irq_save(__flags);                                    \
133         __ret = raw_cpu_generic_read(pcp);                              \
134         raw_local_irq_restore(__flags);                                 \
135         __ret;                                                          \
136 })
137
138 #define this_cpu_generic_read(pcp)                                      \
139 ({                                                                      \
140         typeof(pcp) __ret;                                              \
141         if (__native_word(pcp))                                         \
142                 __ret = __this_cpu_generic_read_nopreempt(pcp);         \
143         else                                                            \
144                 __ret = __this_cpu_generic_read_noirq(pcp);             \
145         __ret;                                                          \
146 })
147
148 #define this_cpu_generic_to_op(pcp, val, op)                            \
149 do {                                                                    \
150         unsigned long __flags;                                          \
151         raw_local_irq_save(__flags);                                    \
152         raw_cpu_generic_to_op(pcp, val, op);                            \
153         raw_local_irq_restore(__flags);                                 \
154 } while (0)
155
156
157 #define this_cpu_generic_add_return(pcp, val)                           \
158 ({                                                                      \
159         typeof(pcp) __ret;                                              \
160         unsigned long __flags;                                          \
161         raw_local_irq_save(__flags);                                    \
162         __ret = raw_cpu_generic_add_return(pcp, val);                   \
163         raw_local_irq_restore(__flags);                                 \
164         __ret;                                                          \
165 })
166
167 #define this_cpu_generic_xchg(pcp, nval)                                \
168 ({                                                                      \
169         typeof(pcp) __ret;                                              \
170         unsigned long __flags;                                          \
171         raw_local_irq_save(__flags);                                    \
172         __ret = raw_cpu_generic_xchg(pcp, nval);                        \
173         raw_local_irq_restore(__flags);                                 \
174         __ret;                                                          \
175 })
176
177 #define this_cpu_generic_cmpxchg(pcp, oval, nval)                       \
178 ({                                                                      \
179         typeof(pcp) __ret;                                              \
180         unsigned long __flags;                                          \
181         raw_local_irq_save(__flags);                                    \
182         __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);               \
183         raw_local_irq_restore(__flags);                                 \
184         __ret;                                                          \
185 })
186
187 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
188 ({                                                                      \
189         int __ret;                                                      \
190         unsigned long __flags;                                          \
191         raw_local_irq_save(__flags);                                    \
192         __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2,              \
193                         oval1, oval2, nval1, nval2);                    \
194         raw_local_irq_restore(__flags);                                 \
195         __ret;                                                          \
196 })
197
198 #ifndef raw_cpu_read_1
199 #define raw_cpu_read_1(pcp)             raw_cpu_generic_read(pcp)
200 #endif
201 #ifndef raw_cpu_read_2
202 #define raw_cpu_read_2(pcp)             raw_cpu_generic_read(pcp)
203 #endif
204 #ifndef raw_cpu_read_4
205 #define raw_cpu_read_4(pcp)             raw_cpu_generic_read(pcp)
206 #endif
207 #ifndef raw_cpu_read_8
208 #define raw_cpu_read_8(pcp)             raw_cpu_generic_read(pcp)
209 #endif
210
211 #ifndef raw_cpu_write_1
212 #define raw_cpu_write_1(pcp, val)       raw_cpu_generic_to_op(pcp, val, =)
213 #endif
214 #ifndef raw_cpu_write_2
215 #define raw_cpu_write_2(pcp, val)       raw_cpu_generic_to_op(pcp, val, =)
216 #endif
217 #ifndef raw_cpu_write_4
218 #define raw_cpu_write_4(pcp, val)       raw_cpu_generic_to_op(pcp, val, =)
219 #endif
220 #ifndef raw_cpu_write_8
221 #define raw_cpu_write_8(pcp, val)       raw_cpu_generic_to_op(pcp, val, =)
222 #endif
223
224 #ifndef raw_cpu_add_1
225 #define raw_cpu_add_1(pcp, val)         raw_cpu_generic_to_op(pcp, val, +=)
226 #endif
227 #ifndef raw_cpu_add_2
228 #define raw_cpu_add_2(pcp, val)         raw_cpu_generic_to_op(pcp, val, +=)
229 #endif
230 #ifndef raw_cpu_add_4
231 #define raw_cpu_add_4(pcp, val)         raw_cpu_generic_to_op(pcp, val, +=)
232 #endif
233 #ifndef raw_cpu_add_8
234 #define raw_cpu_add_8(pcp, val)         raw_cpu_generic_to_op(pcp, val, +=)
235 #endif
236
237 #ifndef raw_cpu_and_1
238 #define raw_cpu_and_1(pcp, val)         raw_cpu_generic_to_op(pcp, val, &=)
239 #endif
240 #ifndef raw_cpu_and_2
241 #define raw_cpu_and_2(pcp, val)         raw_cpu_generic_to_op(pcp, val, &=)
242 #endif
243 #ifndef raw_cpu_and_4
244 #define raw_cpu_and_4(pcp, val)         raw_cpu_generic_to_op(pcp, val, &=)
245 #endif
246 #ifndef raw_cpu_and_8
247 #define raw_cpu_and_8(pcp, val)         raw_cpu_generic_to_op(pcp, val, &=)
248 #endif
249
250 #ifndef raw_cpu_or_1
251 #define raw_cpu_or_1(pcp, val)          raw_cpu_generic_to_op(pcp, val, |=)
252 #endif
253 #ifndef raw_cpu_or_2
254 #define raw_cpu_or_2(pcp, val)          raw_cpu_generic_to_op(pcp, val, |=)
255 #endif
256 #ifndef raw_cpu_or_4
257 #define raw_cpu_or_4(pcp, val)          raw_cpu_generic_to_op(pcp, val, |=)
258 #endif
259 #ifndef raw_cpu_or_8
260 #define raw_cpu_or_8(pcp, val)          raw_cpu_generic_to_op(pcp, val, |=)
261 #endif
262
263 #ifndef raw_cpu_add_return_1
264 #define raw_cpu_add_return_1(pcp, val)  raw_cpu_generic_add_return(pcp, val)
265 #endif
266 #ifndef raw_cpu_add_return_2
267 #define raw_cpu_add_return_2(pcp, val)  raw_cpu_generic_add_return(pcp, val)
268 #endif
269 #ifndef raw_cpu_add_return_4
270 #define raw_cpu_add_return_4(pcp, val)  raw_cpu_generic_add_return(pcp, val)
271 #endif
272 #ifndef raw_cpu_add_return_8
273 #define raw_cpu_add_return_8(pcp, val)  raw_cpu_generic_add_return(pcp, val)
274 #endif
275
276 #ifndef raw_cpu_xchg_1
277 #define raw_cpu_xchg_1(pcp, nval)       raw_cpu_generic_xchg(pcp, nval)
278 #endif
279 #ifndef raw_cpu_xchg_2
280 #define raw_cpu_xchg_2(pcp, nval)       raw_cpu_generic_xchg(pcp, nval)
281 #endif
282 #ifndef raw_cpu_xchg_4
283 #define raw_cpu_xchg_4(pcp, nval)       raw_cpu_generic_xchg(pcp, nval)
284 #endif
285 #ifndef raw_cpu_xchg_8
286 #define raw_cpu_xchg_8(pcp, nval)       raw_cpu_generic_xchg(pcp, nval)
287 #endif
288
289 #ifndef raw_cpu_cmpxchg_1
290 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
291         raw_cpu_generic_cmpxchg(pcp, oval, nval)
292 #endif
293 #ifndef raw_cpu_cmpxchg_2
294 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
295         raw_cpu_generic_cmpxchg(pcp, oval, nval)
296 #endif
297 #ifndef raw_cpu_cmpxchg_4
298 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
299         raw_cpu_generic_cmpxchg(pcp, oval, nval)
300 #endif
301 #ifndef raw_cpu_cmpxchg_8
302 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
303         raw_cpu_generic_cmpxchg(pcp, oval, nval)
304 #endif
305
306 #ifndef raw_cpu_cmpxchg_double_1
307 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
308         raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
309 #endif
310 #ifndef raw_cpu_cmpxchg_double_2
311 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
312         raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
313 #endif
314 #ifndef raw_cpu_cmpxchg_double_4
315 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
316         raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
317 #endif
318 #ifndef raw_cpu_cmpxchg_double_8
319 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
320         raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
321 #endif
322
323 #ifndef this_cpu_read_1
324 #define this_cpu_read_1(pcp)            this_cpu_generic_read(pcp)
325 #endif
326 #ifndef this_cpu_read_2
327 #define this_cpu_read_2(pcp)            this_cpu_generic_read(pcp)
328 #endif
329 #ifndef this_cpu_read_4
330 #define this_cpu_read_4(pcp)            this_cpu_generic_read(pcp)
331 #endif
332 #ifndef this_cpu_read_8
333 #define this_cpu_read_8(pcp)            this_cpu_generic_read(pcp)
334 #endif
335
336 #ifndef this_cpu_write_1
337 #define this_cpu_write_1(pcp, val)      this_cpu_generic_to_op(pcp, val, =)
338 #endif
339 #ifndef this_cpu_write_2
340 #define this_cpu_write_2(pcp, val)      this_cpu_generic_to_op(pcp, val, =)
341 #endif
342 #ifndef this_cpu_write_4
343 #define this_cpu_write_4(pcp, val)      this_cpu_generic_to_op(pcp, val, =)
344 #endif
345 #ifndef this_cpu_write_8
346 #define this_cpu_write_8(pcp, val)      this_cpu_generic_to_op(pcp, val, =)
347 #endif
348
349 #ifndef this_cpu_add_1
350 #define this_cpu_add_1(pcp, val)        this_cpu_generic_to_op(pcp, val, +=)
351 #endif
352 #ifndef this_cpu_add_2
353 #define this_cpu_add_2(pcp, val)        this_cpu_generic_to_op(pcp, val, +=)
354 #endif
355 #ifndef this_cpu_add_4
356 #define this_cpu_add_4(pcp, val)        this_cpu_generic_to_op(pcp, val, +=)
357 #endif
358 #ifndef this_cpu_add_8
359 #define this_cpu_add_8(pcp, val)        this_cpu_generic_to_op(pcp, val, +=)
360 #endif
361
362 #ifndef this_cpu_and_1
363 #define this_cpu_and_1(pcp, val)        this_cpu_generic_to_op(pcp, val, &=)
364 #endif
365 #ifndef this_cpu_and_2
366 #define this_cpu_and_2(pcp, val)        this_cpu_generic_to_op(pcp, val, &=)
367 #endif
368 #ifndef this_cpu_and_4
369 #define this_cpu_and_4(pcp, val)        this_cpu_generic_to_op(pcp, val, &=)
370 #endif
371 #ifndef this_cpu_and_8
372 #define this_cpu_and_8(pcp, val)        this_cpu_generic_to_op(pcp, val, &=)
373 #endif
374
375 #ifndef this_cpu_or_1
376 #define this_cpu_or_1(pcp, val)         this_cpu_generic_to_op(pcp, val, |=)
377 #endif
378 #ifndef this_cpu_or_2
379 #define this_cpu_or_2(pcp, val)         this_cpu_generic_to_op(pcp, val, |=)
380 #endif
381 #ifndef this_cpu_or_4
382 #define this_cpu_or_4(pcp, val)         this_cpu_generic_to_op(pcp, val, |=)
383 #endif
384 #ifndef this_cpu_or_8
385 #define this_cpu_or_8(pcp, val)         this_cpu_generic_to_op(pcp, val, |=)
386 #endif
387
388 #ifndef this_cpu_add_return_1
389 #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
390 #endif
391 #ifndef this_cpu_add_return_2
392 #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
393 #endif
394 #ifndef this_cpu_add_return_4
395 #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
396 #endif
397 #ifndef this_cpu_add_return_8
398 #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
399 #endif
400
401 #ifndef this_cpu_xchg_1
402 #define this_cpu_xchg_1(pcp, nval)      this_cpu_generic_xchg(pcp, nval)
403 #endif
404 #ifndef this_cpu_xchg_2
405 #define this_cpu_xchg_2(pcp, nval)      this_cpu_generic_xchg(pcp, nval)
406 #endif
407 #ifndef this_cpu_xchg_4
408 #define this_cpu_xchg_4(pcp, nval)      this_cpu_generic_xchg(pcp, nval)
409 #endif
410 #ifndef this_cpu_xchg_8
411 #define this_cpu_xchg_8(pcp, nval)      this_cpu_generic_xchg(pcp, nval)
412 #endif
413
414 #ifndef this_cpu_cmpxchg_1
415 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
416         this_cpu_generic_cmpxchg(pcp, oval, nval)
417 #endif
418 #ifndef this_cpu_cmpxchg_2
419 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
420         this_cpu_generic_cmpxchg(pcp, oval, nval)
421 #endif
422 #ifndef this_cpu_cmpxchg_4
423 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
424         this_cpu_generic_cmpxchg(pcp, oval, nval)
425 #endif
426 #ifndef this_cpu_cmpxchg_8
427 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
428         this_cpu_generic_cmpxchg(pcp, oval, nval)
429 #endif
430
431 #ifndef this_cpu_cmpxchg_double_1
432 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
433         this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
434 #endif
435 #ifndef this_cpu_cmpxchg_double_2
436 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
437         this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
438 #endif
439 #ifndef this_cpu_cmpxchg_double_4
440 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
441         this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
442 #endif
443 #ifndef this_cpu_cmpxchg_double_8
444 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
445         this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
446 #endif
447
448 #endif /* _ASM_GENERIC_PERCPU_H_ */