GNU Linux-libre 4.9.337-gnu1
[releases.git] / arch / frv / include / asm / cmpxchg.h
1 /* xchg and cmpxchg operation emulation for FR-V
2  *
3  * For an explanation of how atomic ops work in this arch, see:
4  *   Documentation/frv/atomic-ops.txt
5  *
6  * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
7  * Written by David Howells (dhowells@redhat.com)
8  *
9  * This program is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU General Public License
11  * as published by the Free Software Foundation; either version
12  * 2 of the License, or (at your option) any later version.
13  */
14 #ifndef _ASM_CMPXCHG_H
15 #define _ASM_CMPXCHG_H
16
17 #include <linux/types.h>
18
19 /*****************************************************************************/
20 /*
21  * exchange value with memory
22  */
23 extern uint64_t __xchg_64(uint64_t i, volatile void *v);
24
25 #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
26
27 #define xchg(ptr, x)                                                            \
28 ({                                                                              \
29         __typeof__(ptr) __xg_ptr = (ptr);                                       \
30         __typeof__(*(ptr)) __xg_orig;                                           \
31                                                                                 \
32         switch (sizeof(__xg_orig)) {                                            \
33         case 4:                                                                 \
34                 asm volatile(                                                   \
35                         "swap%I0 %M0,%1"                                        \
36                         : "+m"(*__xg_ptr), "=r"(__xg_orig)                      \
37                         : "1"(x)                                                \
38                         : "memory"                                              \
39                         );                                                      \
40                 break;                                                          \
41                                                                                 \
42         default:                                                                \
43                 __xg_orig = (__typeof__(__xg_orig))0;                           \
44                 asm volatile("break");                                          \
45                 break;                                                          \
46         }                                                                       \
47                                                                                 \
48         __xg_orig;                                                              \
49 })
50
51 #else
52
53 extern uint32_t __xchg_32(uint32_t i, volatile void *v);
54
55 #define xchg(ptr, x)                                                                            \
56 ({                                                                                              \
57         __typeof__(ptr) __xg_ptr = (ptr);                                                       \
58         __typeof__(*(ptr)) __xg_orig;                                                           \
59                                                                                                 \
60         switch (sizeof(__xg_orig)) {                                                            \
61         case 4: __xg_orig = (__typeof__(*(ptr))) __xchg_32((uint32_t) x, __xg_ptr);     break;  \
62         default:                                                                                \
63                 __xg_orig = (__typeof__(__xg_orig))0;                                                                   \
64                 asm volatile("break");                                                          \
65                 break;                                                                          \
66         }                                                                                       \
67         __xg_orig;                                                                              \
68 })
69
70 #endif
71
72 /*****************************************************************************/
73 /*
74  * compare and conditionally exchange value with memory
75  * - if (*ptr == test) then orig = *ptr; *ptr = test;
76  * - if (*ptr != test) then orig = *ptr;
77  */
78 extern uint64_t __cmpxchg_64(uint64_t test, uint64_t new, volatile uint64_t *v);
79
80 #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
81
82 #define cmpxchg(ptr, test, new)                                                 \
83 ({                                                                              \
84         __typeof__(ptr) __xg_ptr = (ptr);                                       \
85         __typeof__(*(ptr)) __xg_orig, __xg_tmp;                                 \
86         __typeof__(*(ptr)) __xg_test = (test);                                  \
87         __typeof__(*(ptr)) __xg_new = (new);                                    \
88                                                                                 \
89         switch (sizeof(__xg_orig)) {                                            \
90         case 4:                                                                 \
91                 asm volatile(                                                   \
92                         "0:                                             \n"     \
93                         "       orcc            gr0,gr0,gr0,icc3        \n"     \
94                         "       ckeq            icc3,cc7                \n"     \
95                         "       ld.p            %M0,%1                  \n"     \
96                         "       orcr            cc7,cc7,cc3             \n"     \
97                         "       sub%I4cc        %1,%4,%2,icc0           \n"     \
98                         "       bne             icc0,#0,1f              \n"     \
99                         "       cst.p           %3,%M0          ,cc3,#1 \n"     \
100                         "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     \
101                         "       beq             icc3,#0,0b              \n"     \
102                         "1:                                             \n"     \
103                         : "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp)    \
104                         : "r"(__xg_new), "NPr"(__xg_test)                       \
105                         : "memory", "cc7", "cc3", "icc3", "icc0"                \
106                         );                                                      \
107                 break;                                                          \
108                                                                                 \
109         default:                                                                \
110                 __xg_orig = (__typeof__(__xg_orig))0;                           \
111                 asm volatile("break");                                          \
112                 break;                                                          \
113         }                                                                       \
114                                                                                 \
115         __xg_orig;                                                              \
116 })
117
118 #else
119
120 extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
121
122 #define cmpxchg(ptr, test, new)                                                 \
123 ({                                                                              \
124         __typeof__(ptr) __xg_ptr = (ptr);                                       \
125         __typeof__(*(ptr)) __xg_orig;                                           \
126         __typeof__(*(ptr)) __xg_test = (test);                                  \
127         __typeof__(*(ptr)) __xg_new = (new);                                    \
128                                                                                 \
129         switch (sizeof(__xg_orig)) {                                            \
130         case 4: __xg_orig = (__force __typeof__(*ptr))                          \
131                         __cmpxchg_32((__force uint32_t *)__xg_ptr,              \
132                                          (__force uint32_t)__xg_test,           \
133                                          (__force uint32_t)__xg_new); break;    \
134         default:                                                                \
135                 __xg_orig = (__typeof__(__xg_orig))0;                           \
136                 asm volatile("break");                                          \
137                 break;                                                          \
138         }                                                                       \
139                                                                                 \
140         __xg_orig;                                                              \
141 })
142
143 #endif
144
145 #include <asm-generic/cmpxchg-local.h>
146
147 static inline unsigned long __cmpxchg_local(volatile void *ptr,
148                                       unsigned long old,
149                                       unsigned long new, int size)
150 {
151         switch (size) {
152         case 4:
153                 return cmpxchg((unsigned long *)ptr, old, new);
154         default:
155                 return __cmpxchg_local_generic(ptr, old, new, size);
156         }
157
158         return old;
159 }
160
161 /*
162  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
163  * them available.
164  */
165 #define cmpxchg_local(ptr, o, n)                                        \
166         ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
167                         (unsigned long)(n), sizeof(*(ptr))))
168 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
169
170 #endif /* _ASM_CMPXCHG_H */