GNU Linux-libre 4.14.266-gnu1
[releases.git] / arch / m32r / include / asm / cmpxchg.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_M32R_CMPXCHG_H
3 #define _ASM_M32R_CMPXCHG_H
4
5 /*
6  *  M32R version:
7  *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
8  *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
9  */
10
11 #include <linux/irqflags.h>
12 #include <asm/assembler.h>
13 #include <asm/dcache_clear.h>
14
15 extern void  __xchg_called_with_bad_pointer(void);
16
17 static __always_inline unsigned long
18 __xchg(unsigned long x, volatile void *ptr, int size)
19 {
20         unsigned long flags;
21         unsigned long tmp = 0;
22
23         local_irq_save(flags);
24
25         switch (size) {
26 #ifndef CONFIG_SMP
27         case 1:
28                 __asm__ __volatile__ (
29                         "ldb    %0, @%2 \n\t"
30                         "stb    %1, @%2 \n\t"
31                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
32                 break;
33         case 2:
34                 __asm__ __volatile__ (
35                         "ldh    %0, @%2 \n\t"
36                         "sth    %1, @%2 \n\t"
37                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
38                 break;
39         case 4:
40                 __asm__ __volatile__ (
41                         "ld     %0, @%2 \n\t"
42                         "st     %1, @%2 \n\t"
43                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
44                 break;
45 #else  /* CONFIG_SMP */
46         case 4:
47                 __asm__ __volatile__ (
48                         DCACHE_CLEAR("%0", "r4", "%2")
49                         "lock   %0, @%2;        \n\t"
50                         "unlock %1, @%2;        \n\t"
51                         : "=&r" (tmp) : "r" (x), "r" (ptr)
52                         : "memory"
53 #ifdef CONFIG_CHIP_M32700_TS1
54                         , "r4"
55 #endif  /* CONFIG_CHIP_M32700_TS1 */
56                 );
57                 break;
58 #endif  /* CONFIG_SMP */
59         default:
60                 __xchg_called_with_bad_pointer();
61         }
62
63         local_irq_restore(flags);
64
65         return (tmp);
66 }
67
68 #define xchg(ptr, x) ({                                                 \
69         ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr),          \
70                                     sizeof(*(ptr))));                   \
71 })
72
73 static __always_inline unsigned long
74 __xchg_local(unsigned long x, volatile void *ptr, int size)
75 {
76         unsigned long flags;
77         unsigned long tmp = 0;
78
79         local_irq_save(flags);
80
81         switch (size) {
82         case 1:
83                 __asm__ __volatile__ (
84                         "ldb    %0, @%2 \n\t"
85                         "stb    %1, @%2 \n\t"
86                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
87                 break;
88         case 2:
89                 __asm__ __volatile__ (
90                         "ldh    %0, @%2 \n\t"
91                         "sth    %1, @%2 \n\t"
92                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
93                 break;
94         case 4:
95                 __asm__ __volatile__ (
96                         "ld     %0, @%2 \n\t"
97                         "st     %1, @%2 \n\t"
98                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
99                 break;
100         default:
101                 __xchg_called_with_bad_pointer();
102         }
103
104         local_irq_restore(flags);
105
106         return (tmp);
107 }
108
109 #define xchg_local(ptr, x)                                              \
110         ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr),    \
111                         sizeof(*(ptr))))
112
113 static inline unsigned long
114 __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
115 {
116         unsigned long flags;
117         unsigned int retval;
118
119         local_irq_save(flags);
120         __asm__ __volatile__ (
121                         DCACHE_CLEAR("%0", "r4", "%1")
122                         M32R_LOCK" %0, @%1;     \n"
123                 "       bne     %0, %2, 1f;     \n"
124                         M32R_UNLOCK" %3, @%1;   \n"
125                 "       bra     2f;             \n"
126                 "       .fillinsn               \n"
127                 "1:"
128                         M32R_UNLOCK" %0, @%1;   \n"
129                 "       .fillinsn               \n"
130                 "2:"
131                         : "=&r" (retval)
132                         : "r" (p), "r" (old), "r" (new)
133                         : "cbit", "memory"
134 #ifdef CONFIG_CHIP_M32700_TS1
135                         , "r4"
136 #endif  /* CONFIG_CHIP_M32700_TS1 */
137                 );
138         local_irq_restore(flags);
139
140         return retval;
141 }
142
143 static inline unsigned long
144 __cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
145                         unsigned int new)
146 {
147         unsigned long flags;
148         unsigned int retval;
149
150         local_irq_save(flags);
151         __asm__ __volatile__ (
152                         DCACHE_CLEAR("%0", "r4", "%1")
153                         "ld %0, @%1;            \n"
154                 "       bne     %0, %2, 1f;     \n"
155                         "st %3, @%1;            \n"
156                 "       bra     2f;             \n"
157                 "       .fillinsn               \n"
158                 "1:"
159                         "st %0, @%1;            \n"
160                 "       .fillinsn               \n"
161                 "2:"
162                         : "=&r" (retval)
163                         : "r" (p), "r" (old), "r" (new)
164                         : "cbit", "memory"
165 #ifdef CONFIG_CHIP_M32700_TS1
166                         , "r4"
167 #endif  /* CONFIG_CHIP_M32700_TS1 */
168                 );
169         local_irq_restore(flags);
170
171         return retval;
172 }
173
174 /* This function doesn't exist, so you'll get a linker error
175    if something tries to do an invalid cmpxchg().  */
176 extern void __cmpxchg_called_with_bad_pointer(void);
177
178 static inline unsigned long
179 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
180 {
181         switch (size) {
182         case 4:
183                 return __cmpxchg_u32(ptr, old, new);
184 #if 0   /* we don't have __cmpxchg_u64 */
185         case 8:
186                 return __cmpxchg_u64(ptr, old, new);
187 #endif /* 0 */
188         }
189         __cmpxchg_called_with_bad_pointer();
190         return old;
191 }
192
193 #define cmpxchg(ptr, o, n) ({                           \
194         ((__typeof__(*(ptr)))                           \
195                  __cmpxchg((ptr), (unsigned long)(o),   \
196                            (unsigned long)(n),          \
197                            sizeof(*(ptr))));            \
198 })
199
200 #include <asm-generic/cmpxchg-local.h>
201
202 static inline unsigned long __cmpxchg_local(volatile void *ptr,
203                                       unsigned long old,
204                                       unsigned long new, int size)
205 {
206         switch (size) {
207         case 4:
208                 return __cmpxchg_local_u32(ptr, old, new);
209         default:
210                 return __cmpxchg_local_generic(ptr, old, new, size);
211         }
212
213         return old;
214 }
215
216 /*
217  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
218  * them available.
219  */
220 #define cmpxchg_local(ptr, o, n)                                            \
221         ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o),     \
222                         (unsigned long)(n), sizeof(*(ptr))))
223 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
224
225 #endif /* _ASM_M32R_CMPXCHG_H */