GNU Linux-libre 4.19.286-gnu1
[releases.git] / arch / ia64 / include / uapi / asm / gcc_intrin.h
1 /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
2 /*
3  *
4  * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
5  * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
6  */
7 #ifndef _UAPI_ASM_IA64_GCC_INTRIN_H
8 #define _UAPI_ASM_IA64_GCC_INTRIN_H
9
10 #include <linux/types.h>
11 #include <linux/compiler.h>
12
13 /* define this macro to get some asm stmts included in 'c' files */
14 #define ASM_SUPPORTED
15
16 /* Optimization barrier */
17 /* The "volatile" is due to gcc bugs */
18 #define ia64_barrier()  asm volatile ("":::"memory")
19
20 #define ia64_stop()     asm volatile (";;"::)
21
22 #define ia64_invala_gr(regnum)  asm volatile ("invala.e r%0" :: "i"(regnum))
23
24 #define ia64_invala_fr(regnum)  asm volatile ("invala.e f%0" :: "i"(regnum))
25
26 #define ia64_flushrs() asm volatile ("flushrs;;":::"memory")
27
28 #define ia64_loadrs() asm volatile ("loadrs;;":::"memory")
29
30 extern void ia64_bad_param_for_setreg (void);
31 extern void ia64_bad_param_for_getreg (void);
32
33
34 #define ia64_native_setreg(regnum, val)                                         \
35 ({                                                                              \
36         switch (regnum) {                                                       \
37             case _IA64_REG_PSR_L:                                               \
38                     asm volatile ("mov psr.l=%0" :: "r"(val) : "memory");       \
39                     break;                                                      \
40             case _IA64_REG_AR_KR0 ... _IA64_REG_AR_EC:                          \
41                     asm volatile ("mov ar%0=%1" ::                              \
42                                           "i" (regnum - _IA64_REG_AR_KR0),      \
43                                           "r"(val): "memory");                  \
44                     break;                                                      \
45             case _IA64_REG_CR_DCR ... _IA64_REG_CR_LRR1:                        \
46                     asm volatile ("mov cr%0=%1" ::                              \
47                                           "i" (regnum - _IA64_REG_CR_DCR),      \
48                                           "r"(val): "memory" );                 \
49                     break;                                                      \
50             case _IA64_REG_SP:                                                  \
51                     asm volatile ("mov r12=%0" ::                               \
52                                           "r"(val): "memory");                  \
53                     break;                                                      \
54             case _IA64_REG_GP:                                                  \
55                     asm volatile ("mov gp=%0" :: "r"(val) : "memory");          \
56                 break;                                                          \
57             default:                                                            \
58                     ia64_bad_param_for_setreg();                                \
59                     break;                                                      \
60         }                                                                       \
61 })
62
63 #define ia64_native_getreg(regnum)                                              \
64 ({                                                                              \
65         __u64 ia64_intri_res;                                                   \
66                                                                                 \
67         switch (regnum) {                                                       \
68         case _IA64_REG_GP:                                                      \
69                 asm volatile ("mov %0=gp" : "=r"(ia64_intri_res));              \
70                 break;                                                          \
71         case _IA64_REG_IP:                                                      \
72                 asm volatile ("mov %0=ip" : "=r"(ia64_intri_res));              \
73                 break;                                                          \
74         case _IA64_REG_PSR:                                                     \
75                 asm volatile ("mov %0=psr" : "=r"(ia64_intri_res));             \
76                 break;                                                          \
77         case _IA64_REG_TP:      /* for current() */                             \
78                 ia64_intri_res = ia64_r13;                                      \
79                 break;                                                          \
80         case _IA64_REG_AR_KR0 ... _IA64_REG_AR_EC:                              \
81                 asm volatile ("mov %0=ar%1" : "=r" (ia64_intri_res)             \
82                                       : "i"(regnum - _IA64_REG_AR_KR0));        \
83                 break;                                                          \
84         case _IA64_REG_CR_DCR ... _IA64_REG_CR_LRR1:                            \
85                 asm volatile ("mov %0=cr%1" : "=r" (ia64_intri_res)             \
86                                       : "i" (regnum - _IA64_REG_CR_DCR));       \
87                 break;                                                          \
88         case _IA64_REG_SP:                                                      \
89                 asm volatile ("mov %0=sp" : "=r" (ia64_intri_res));             \
90                 break;                                                          \
91         default:                                                                \
92                 ia64_bad_param_for_getreg();                                    \
93                 break;                                                          \
94         }                                                                       \
95         ia64_intri_res;                                                         \
96 })
97
98 #define ia64_hint_pause 0
99
100 #define ia64_hint(mode)                                         \
101 ({                                                              \
102         switch (mode) {                                         \
103         case ia64_hint_pause:                                   \
104                 asm volatile ("hint @pause" ::: "memory");      \
105                 break;                                          \
106         }                                                       \
107 })
108
109
110 /* Integer values for mux1 instruction */
111 #define ia64_mux1_brcst 0
112 #define ia64_mux1_mix   8
113 #define ia64_mux1_shuf  9
114 #define ia64_mux1_alt  10
115 #define ia64_mux1_rev  11
116
117 #define ia64_mux1(x, mode)                                                      \
118 ({                                                                              \
119         __u64 ia64_intri_res;                                                   \
120                                                                                 \
121         switch (mode) {                                                         \
122         case ia64_mux1_brcst:                                                   \
123                 asm ("mux1 %0=%1,@brcst" : "=r" (ia64_intri_res) : "r" (x));    \
124                 break;                                                          \
125         case ia64_mux1_mix:                                                     \
126                 asm ("mux1 %0=%1,@mix" : "=r" (ia64_intri_res) : "r" (x));      \
127                 break;                                                          \
128         case ia64_mux1_shuf:                                                    \
129                 asm ("mux1 %0=%1,@shuf" : "=r" (ia64_intri_res) : "r" (x));     \
130                 break;                                                          \
131         case ia64_mux1_alt:                                                     \
132                 asm ("mux1 %0=%1,@alt" : "=r" (ia64_intri_res) : "r" (x));      \
133                 break;                                                          \
134         case ia64_mux1_rev:                                                     \
135                 asm ("mux1 %0=%1,@rev" : "=r" (ia64_intri_res) : "r" (x));      \
136                 break;                                                          \
137         }                                                                       \
138         ia64_intri_res;                                                         \
139 })
140
141 #if __GNUC__ >= 4 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)
142 # define ia64_popcnt(x)         __builtin_popcountl(x)
143 #else
144 # define ia64_popcnt(x)                                         \
145   ({                                                            \
146         __u64 ia64_intri_res;                                   \
147         asm ("popcnt %0=%1" : "=r" (ia64_intri_res) : "r" (x)); \
148                                                                 \
149         ia64_intri_res;                                         \
150   })
151 #endif
152
153 #define ia64_getf_exp(x)                                        \
154 ({                                                              \
155         long ia64_intri_res;                                    \
156                                                                 \
157         asm ("getf.exp %0=%1" : "=r"(ia64_intri_res) : "f"(x)); \
158                                                                 \
159         ia64_intri_res;                                         \
160 })
161
162 #define ia64_shrp(a, b, count)                                                          \
163 ({                                                                                      \
164         __u64 ia64_intri_res;                                                           \
165         asm ("shrp %0=%1,%2,%3" : "=r"(ia64_intri_res) : "r"(a), "r"(b), "i"(count));   \
166         ia64_intri_res;                                                                 \
167 })
168
169 #define ia64_ldfs(regnum, x)                                    \
170 ({                                                              \
171         register double __f__ asm ("f"#regnum);                 \
172         asm volatile ("ldfs %0=[%1]" :"=f"(__f__): "r"(x));     \
173 })
174
175 #define ia64_ldfd(regnum, x)                                    \
176 ({                                                              \
177         register double __f__ asm ("f"#regnum);                 \
178         asm volatile ("ldfd %0=[%1]" :"=f"(__f__): "r"(x));     \
179 })
180
181 #define ia64_ldfe(regnum, x)                                    \
182 ({                                                              \
183         register double __f__ asm ("f"#regnum);                 \
184         asm volatile ("ldfe %0=[%1]" :"=f"(__f__): "r"(x));     \
185 })
186
187 #define ia64_ldf8(regnum, x)                                    \
188 ({                                                              \
189         register double __f__ asm ("f"#regnum);                 \
190         asm volatile ("ldf8 %0=[%1]" :"=f"(__f__): "r"(x));     \
191 })
192
193 #define ia64_ldf_fill(regnum, x)                                \
194 ({                                                              \
195         register double __f__ asm ("f"#regnum);                 \
196         asm volatile ("ldf.fill %0=[%1]" :"=f"(__f__): "r"(x)); \
197 })
198
199 #define ia64_st4_rel_nta(m, val)                                        \
200 ({                                                                      \
201         asm volatile ("st4.rel.nta [%0] = %1\n\t" :: "r"(m), "r"(val)); \
202 })
203
204 #define ia64_stfs(x, regnum)                                            \
205 ({                                                                      \
206         register double __f__ asm ("f"#regnum);                         \
207         asm volatile ("stfs [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
208 })
209
210 #define ia64_stfd(x, regnum)                                            \
211 ({                                                                      \
212         register double __f__ asm ("f"#regnum);                         \
213         asm volatile ("stfd [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
214 })
215
216 #define ia64_stfe(x, regnum)                                            \
217 ({                                                                      \
218         register double __f__ asm ("f"#regnum);                         \
219         asm volatile ("stfe [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
220 })
221
222 #define ia64_stf8(x, regnum)                                            \
223 ({                                                                      \
224         register double __f__ asm ("f"#regnum);                         \
225         asm volatile ("stf8 [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
226 })
227
228 #define ia64_stf_spill(x, regnum)                                               \
229 ({                                                                              \
230         register double __f__ asm ("f"#regnum);                                 \
231         asm volatile ("stf.spill [%0]=%1" :: "r"(x), "f"(__f__) : "memory");    \
232 })
233
234 #define ia64_fetchadd4_acq(p, inc)                                              \
235 ({                                                                              \
236                                                                                 \
237         __u64 ia64_intri_res;                                                   \
238         asm volatile ("fetchadd4.acq %0=[%1],%2"                                \
239                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
240                                 : "memory");                                    \
241                                                                                 \
242         ia64_intri_res;                                                         \
243 })
244
245 #define ia64_fetchadd4_rel(p, inc)                                              \
246 ({                                                                              \
247         __u64 ia64_intri_res;                                                   \
248         asm volatile ("fetchadd4.rel %0=[%1],%2"                                \
249                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
250                                 : "memory");                                    \
251                                                                                 \
252         ia64_intri_res;                                                         \
253 })
254
255 #define ia64_fetchadd8_acq(p, inc)                                              \
256 ({                                                                              \
257                                                                                 \
258         __u64 ia64_intri_res;                                                   \
259         asm volatile ("fetchadd8.acq %0=[%1],%2"                                \
260                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
261                                 : "memory");                                    \
262                                                                                 \
263         ia64_intri_res;                                                         \
264 })
265
266 #define ia64_fetchadd8_rel(p, inc)                                              \
267 ({                                                                              \
268         __u64 ia64_intri_res;                                                   \
269         asm volatile ("fetchadd8.rel %0=[%1],%2"                                \
270                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
271                                 : "memory");                                    \
272                                                                                 \
273         ia64_intri_res;                                                         \
274 })
275
276 #define ia64_xchg1(ptr,x)                                                       \
277 ({                                                                              \
278         __u64 ia64_intri_res;                                                   \
279         asm volatile ("xchg1 %0=[%1],%2"                                        \
280                       : "=r" (ia64_intri_res) : "r" (ptr), "r" (x) : "memory"); \
281         ia64_intri_res;                                                         \
282 })
283
284 #define ia64_xchg2(ptr,x)                                               \
285 ({                                                                      \
286         __u64 ia64_intri_res;                                           \
287         asm volatile ("xchg2 %0=[%1],%2" : "=r" (ia64_intri_res)        \
288                       : "r" (ptr), "r" (x) : "memory");                 \
289         ia64_intri_res;                                                 \
290 })
291
292 #define ia64_xchg4(ptr,x)                                               \
293 ({                                                                      \
294         __u64 ia64_intri_res;                                           \
295         asm volatile ("xchg4 %0=[%1],%2" : "=r" (ia64_intri_res)        \
296                       : "r" (ptr), "r" (x) : "memory");                 \
297         ia64_intri_res;                                                 \
298 })
299
300 #define ia64_xchg8(ptr,x)                                               \
301 ({                                                                      \
302         __u64 ia64_intri_res;                                           \
303         asm volatile ("xchg8 %0=[%1],%2" : "=r" (ia64_intri_res)        \
304                       : "r" (ptr), "r" (x) : "memory");                 \
305         ia64_intri_res;                                                 \
306 })
307
308 #define ia64_cmpxchg1_acq(ptr, new, old)                                                \
309 ({                                                                                      \
310         __u64 ia64_intri_res;                                                           \
311         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
312         asm volatile ("cmpxchg1.acq %0=[%1],%2,ar.ccv":                                 \
313                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
314         ia64_intri_res;                                                                 \
315 })
316
317 #define ia64_cmpxchg1_rel(ptr, new, old)                                                \
318 ({                                                                                      \
319         __u64 ia64_intri_res;                                                           \
320         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
321         asm volatile ("cmpxchg1.rel %0=[%1],%2,ar.ccv":                                 \
322                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
323         ia64_intri_res;                                                                 \
324 })
325
326 #define ia64_cmpxchg2_acq(ptr, new, old)                                                \
327 ({                                                                                      \
328         __u64 ia64_intri_res;                                                           \
329         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
330         asm volatile ("cmpxchg2.acq %0=[%1],%2,ar.ccv":                                 \
331                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
332         ia64_intri_res;                                                                 \
333 })
334
335 #define ia64_cmpxchg2_rel(ptr, new, old)                                                \
336 ({                                                                                      \
337         __u64 ia64_intri_res;                                                           \
338         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
339                                                                                         \
340         asm volatile ("cmpxchg2.rel %0=[%1],%2,ar.ccv":                                 \
341                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
342         ia64_intri_res;                                                                 \
343 })
344
345 #define ia64_cmpxchg4_acq(ptr, new, old)                                                \
346 ({                                                                                      \
347         __u64 ia64_intri_res;                                                           \
348         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
349         asm volatile ("cmpxchg4.acq %0=[%1],%2,ar.ccv":                                 \
350                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
351         ia64_intri_res;                                                                 \
352 })
353
354 #define ia64_cmpxchg4_rel(ptr, new, old)                                                \
355 ({                                                                                      \
356         __u64 ia64_intri_res;                                                           \
357         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
358         asm volatile ("cmpxchg4.rel %0=[%1],%2,ar.ccv":                                 \
359                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
360         ia64_intri_res;                                                                 \
361 })
362
363 #define ia64_cmpxchg8_acq(ptr, new, old)                                                \
364 ({                                                                                      \
365         __u64 ia64_intri_res;                                                           \
366         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
367         asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv":                                 \
368                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
369         ia64_intri_res;                                                                 \
370 })
371
372 #define ia64_cmpxchg8_rel(ptr, new, old)                                                \
373 ({                                                                                      \
374         __u64 ia64_intri_res;                                                           \
375         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
376                                                                                         \
377         asm volatile ("cmpxchg8.rel %0=[%1],%2,ar.ccv":                                 \
378                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
379         ia64_intri_res;                                                                 \
380 })
381
382 #define ia64_mf()       asm volatile ("mf" ::: "memory")
383 #define ia64_mfa()      asm volatile ("mf.a" ::: "memory")
384
385 #define ia64_invala() asm volatile ("invala" ::: "memory")
386
387 #define ia64_native_thash(addr)                                                 \
388 ({                                                                              \
389         unsigned long ia64_intri_res;                                           \
390         asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr));       \
391         ia64_intri_res;                                                         \
392 })
393
394 #define ia64_srlz_i()   asm volatile (";; srlz.i ;;" ::: "memory")
395 #define ia64_srlz_d()   asm volatile (";; srlz.d" ::: "memory");
396
397 #ifdef HAVE_SERIALIZE_DIRECTIVE
398 # define ia64_dv_serialize_data()               asm volatile (".serialize.data");
399 # define ia64_dv_serialize_instruction()        asm volatile (".serialize.instruction");
400 #else
401 # define ia64_dv_serialize_data()
402 # define ia64_dv_serialize_instruction()
403 #endif
404
405 #define ia64_nop(x)     asm volatile ("nop %0"::"i"(x));
406
407 #define ia64_itci(addr) asm volatile ("itc.i %0;;" :: "r"(addr) : "memory")
408
409 #define ia64_itcd(addr) asm volatile ("itc.d %0;;" :: "r"(addr) : "memory")
410
411
412 #define ia64_itri(trnum, addr) asm volatile ("itr.i itr[%0]=%1"                         \
413                                              :: "r"(trnum), "r"(addr) : "memory")
414
415 #define ia64_itrd(trnum, addr) asm volatile ("itr.d dtr[%0]=%1"                         \
416                                              :: "r"(trnum), "r"(addr) : "memory")
417
418 #define ia64_tpa(addr)                                                          \
419 ({                                                                              \
420         unsigned long ia64_pa;                                                  \
421         asm volatile ("tpa %0 = %1" : "=r"(ia64_pa) : "r"(addr) : "memory");    \
422         ia64_pa;                                                                \
423 })
424
425 #define __ia64_set_dbr(index, val)                                              \
426         asm volatile ("mov dbr[%0]=%1" :: "r"(index), "r"(val) : "memory")
427
428 #define ia64_set_ibr(index, val)                                                \
429         asm volatile ("mov ibr[%0]=%1" :: "r"(index), "r"(val) : "memory")
430
431 #define ia64_set_pkr(index, val)                                                \
432         asm volatile ("mov pkr[%0]=%1" :: "r"(index), "r"(val) : "memory")
433
434 #define ia64_set_pmc(index, val)                                                \
435         asm volatile ("mov pmc[%0]=%1" :: "r"(index), "r"(val) : "memory")
436
437 #define ia64_set_pmd(index, val)                                                \
438         asm volatile ("mov pmd[%0]=%1" :: "r"(index), "r"(val) : "memory")
439
440 #define ia64_native_set_rr(index, val)                                                  \
441         asm volatile ("mov rr[%0]=%1" :: "r"(index), "r"(val) : "memory");
442
443 #define ia64_native_get_cpuid(index)                                                    \
444 ({                                                                                      \
445         unsigned long ia64_intri_res;                                                   \
446         asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index));        \
447         ia64_intri_res;                                                                 \
448 })
449
450 #define __ia64_get_dbr(index)                                                   \
451 ({                                                                              \
452         unsigned long ia64_intri_res;                                           \
453         asm volatile ("mov %0=dbr[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
454         ia64_intri_res;                                                         \
455 })
456
457 #define ia64_get_ibr(index)                                                     \
458 ({                                                                              \
459         unsigned long ia64_intri_res;                                           \
460         asm volatile ("mov %0=ibr[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
461         ia64_intri_res;                                                         \
462 })
463
464 #define ia64_get_pkr(index)                                                     \
465 ({                                                                              \
466         unsigned long ia64_intri_res;                                           \
467         asm volatile ("mov %0=pkr[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
468         ia64_intri_res;                                                         \
469 })
470
471 #define ia64_get_pmc(index)                                                     \
472 ({                                                                              \
473         unsigned long ia64_intri_res;                                           \
474         asm volatile ("mov %0=pmc[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
475         ia64_intri_res;                                                         \
476 })
477
478
479 #define ia64_native_get_pmd(index)                                              \
480 ({                                                                              \
481         unsigned long ia64_intri_res;                                           \
482         asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
483         ia64_intri_res;                                                         \
484 })
485
486 #define ia64_native_get_rr(index)                                               \
487 ({                                                                              \
488         unsigned long ia64_intri_res;                                           \
489         asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index));    \
490         ia64_intri_res;                                                         \
491 })
492
493 #define ia64_native_fc(addr)    asm volatile ("fc %0" :: "r"(addr) : "memory")
494
495
496 #define ia64_sync_i()   asm volatile (";; sync.i" ::: "memory")
497
498 #define ia64_native_ssm(mask)   asm volatile ("ssm %0":: "i"((mask)) : "memory")
499 #define ia64_native_rsm(mask)   asm volatile ("rsm %0":: "i"((mask)) : "memory")
500 #define ia64_sum(mask)  asm volatile ("sum %0":: "i"((mask)) : "memory")
501 #define ia64_rum(mask)  asm volatile ("rum %0":: "i"((mask)) : "memory")
502
503 #define ia64_ptce(addr) asm volatile ("ptc.e %0" :: "r"(addr))
504
505 #define ia64_native_ptcga(addr, size)                                           \
506 do {                                                                            \
507         asm volatile ("ptc.ga %0,%1" :: "r"(addr), "r"(size) : "memory");       \
508         ia64_dv_serialize_data();                                               \
509 } while (0)
510
511 #define ia64_ptcl(addr, size)                                                   \
512 do {                                                                            \
513         asm volatile ("ptc.l %0,%1" :: "r"(addr), "r"(size) : "memory");        \
514         ia64_dv_serialize_data();                                               \
515 } while (0)
516
517 #define ia64_ptri(addr, size)                                           \
518         asm volatile ("ptr.i %0,%1" :: "r"(addr), "r"(size) : "memory")
519
520 #define ia64_ptrd(addr, size)                                           \
521         asm volatile ("ptr.d %0,%1" :: "r"(addr), "r"(size) : "memory")
522
523 #define ia64_ttag(addr)                                                 \
524 ({                                                                        \
525         __u64 ia64_intri_res;                                              \
526         asm volatile ("ttag %0=%1" : "=r"(ia64_intri_res) : "r" (addr));   \
527         ia64_intri_res;                                                  \
528 })
529
530
531 /* Values for lfhint in ia64_lfetch and ia64_lfetch_fault */
532
533 #define ia64_lfhint_none   0
534 #define ia64_lfhint_nt1    1
535 #define ia64_lfhint_nt2    2
536 #define ia64_lfhint_nta    3
537
538 #define ia64_lfetch(lfhint, y)                                  \
539 ({                                                              \
540         switch (lfhint) {                                       \
541         case ia64_lfhint_none:                                  \
542                 asm volatile ("lfetch [%0]" : : "r"(y));        \
543                 break;                                          \
544         case ia64_lfhint_nt1:                                   \
545                 asm volatile ("lfetch.nt1 [%0]" : : "r"(y));    \
546                 break;                                          \
547         case ia64_lfhint_nt2:                                   \
548                 asm volatile ("lfetch.nt2 [%0]" : : "r"(y));    \
549                 break;                                          \
550         case ia64_lfhint_nta:                                   \
551                 asm volatile ("lfetch.nta [%0]" : : "r"(y));    \
552                 break;                                          \
553         }                                                       \
554 })
555
556 #define ia64_lfetch_excl(lfhint, y)                                     \
557 ({                                                                      \
558         switch (lfhint) {                                               \
559         case ia64_lfhint_none:                                          \
560                 asm volatile ("lfetch.excl [%0]" :: "r"(y));            \
561                 break;                                                  \
562         case ia64_lfhint_nt1:                                           \
563                 asm volatile ("lfetch.excl.nt1 [%0]" :: "r"(y));        \
564                 break;                                                  \
565         case ia64_lfhint_nt2:                                           \
566                 asm volatile ("lfetch.excl.nt2 [%0]" :: "r"(y));        \
567                 break;                                                  \
568         case ia64_lfhint_nta:                                           \
569                 asm volatile ("lfetch.excl.nta [%0]" :: "r"(y));        \
570                 break;                                                  \
571         }                                                               \
572 })
573
574 #define ia64_lfetch_fault(lfhint, y)                                    \
575 ({                                                                      \
576         switch (lfhint) {                                               \
577         case ia64_lfhint_none:                                          \
578                 asm volatile ("lfetch.fault [%0]" : : "r"(y));          \
579                 break;                                                  \
580         case ia64_lfhint_nt1:                                           \
581                 asm volatile ("lfetch.fault.nt1 [%0]" : : "r"(y));      \
582                 break;                                                  \
583         case ia64_lfhint_nt2:                                           \
584                 asm volatile ("lfetch.fault.nt2 [%0]" : : "r"(y));      \
585                 break;                                                  \
586         case ia64_lfhint_nta:                                           \
587                 asm volatile ("lfetch.fault.nta [%0]" : : "r"(y));      \
588                 break;                                                  \
589         }                                                               \
590 })
591
592 #define ia64_lfetch_fault_excl(lfhint, y)                               \
593 ({                                                                      \
594         switch (lfhint) {                                               \
595         case ia64_lfhint_none:                                          \
596                 asm volatile ("lfetch.fault.excl [%0]" :: "r"(y));      \
597                 break;                                                  \
598         case ia64_lfhint_nt1:                                           \
599                 asm volatile ("lfetch.fault.excl.nt1 [%0]" :: "r"(y));  \
600                 break;                                                  \
601         case ia64_lfhint_nt2:                                           \
602                 asm volatile ("lfetch.fault.excl.nt2 [%0]" :: "r"(y));  \
603                 break;                                                  \
604         case ia64_lfhint_nta:                                           \
605                 asm volatile ("lfetch.fault.excl.nta [%0]" :: "r"(y));  \
606                 break;                                                  \
607         }                                                               \
608 })
609
610 #define ia64_native_intrin_local_irq_restore(x)                 \
611 do {                                                            \
612         asm volatile (";;   cmp.ne p6,p7=%0,r0;;"               \
613                       "(p6) ssm psr.i;"                         \
614                       "(p7) rsm psr.i;;"                        \
615                       "(p6) srlz.d"                             \
616                       :: "r"((x)) : "p6", "p7", "memory");      \
617 } while (0)
618
619 #endif /* _UAPI_ASM_IA64_GCC_INTRIN_H */