ftp://ftp.kernel.org/pub/linux/kernel/v2.6/linux-2.6.6.tar.bz2
[linux-2.6.git] / include / asm-ia64 / gcc_intrin.h
1 #ifndef _ASM_IA64_GCC_INTRIN_H
2 #define _ASM_IA64_GCC_INTRIN_H
3 /*
4  *
5  * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
6  * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
7  *
8  */
9
10 /* define this macro to get some asm stmts included in 'c' files */
11 #define ASM_SUPPORTED
12
13 /* Optimization barrier */
14 /* The "volatile" is due to gcc bugs */
15 #define ia64_barrier()  asm volatile ("":::"memory")
16
17 #define ia64_stop()     asm volatile (";;"::)
18
19 #define ia64_invala_gr(regnum)  asm volatile ("invala.e r%0" :: "i"(regnum))
20
21 #define ia64_invala_fr(regnum)  asm volatile ("invala.e f%0" :: "i"(regnum))
22
23 extern void ia64_bad_param_for_setreg (void);
24 extern void ia64_bad_param_for_getreg (void);
25
26 register unsigned long ia64_r13 asm ("r13");
27
28 #define ia64_setreg(regnum, val)                                                \
29 ({                                                                              \
30         switch (regnum) {                                                       \
31             case _IA64_REG_PSR_L:                                               \
32                     asm volatile ("mov psr.l=%0" :: "r"(val) : "memory");       \
33                     break;                                                      \
34             case _IA64_REG_AR_KR0 ... _IA64_REG_AR_EC:                          \
35                     asm volatile ("mov ar%0=%1" ::                              \
36                                           "i" (regnum - _IA64_REG_AR_KR0),      \
37                                           "r"(val): "memory");                  \
38                     break;                                                      \
39             case _IA64_REG_CR_DCR ... _IA64_REG_CR_LRR1:                        \
40                     asm volatile ("mov cr%0=%1" ::                              \
41                                           "i" (regnum - _IA64_REG_CR_DCR),      \
42                                           "r"(val): "memory" );                 \
43                     break;                                                      \
44             case _IA64_REG_SP:                                                  \
45                     asm volatile ("mov r12=%0" ::                               \
46                                           "r"(val): "memory");                  \
47                     break;                                                      \
48             case _IA64_REG_GP:                                                  \
49                     asm volatile ("mov gp=%0" :: "r"(val) : "memory");          \
50                 break;                                                          \
51             default:                                                            \
52                     ia64_bad_param_for_setreg();                                \
53                     break;                                                      \
54         }                                                                       \
55 })
56
57 #define ia64_getreg(regnum)                                                     \
58 ({                                                                              \
59         __u64 ia64_intri_res;                                                   \
60                                                                                 \
61         switch (regnum) {                                                       \
62         case _IA64_REG_GP:                                                      \
63                 asm volatile ("mov %0=gp" : "=r"(ia64_intri_res));              \
64                 break;                                                          \
65         case _IA64_REG_IP:                                                      \
66                 asm volatile ("mov %0=ip" : "=r"(ia64_intri_res));              \
67                 break;                                                          \
68         case _IA64_REG_PSR:                                                     \
69                 asm volatile ("mov %0=psr" : "=r"(ia64_intri_res));             \
70                 break;                                                          \
71         case _IA64_REG_TP:      /* for current() */                             \
72                 ia64_intri_res = ia64_r13;                                      \
73                 break;                                                          \
74         case _IA64_REG_AR_KR0 ... _IA64_REG_AR_EC:                              \
75                 asm volatile ("mov %0=ar%1" : "=r" (ia64_intri_res)             \
76                                       : "i"(regnum - _IA64_REG_AR_KR0));        \
77                 break;                                                          \
78         case _IA64_REG_CR_DCR ... _IA64_REG_CR_LRR1:                            \
79                 asm volatile ("mov %0=cr%1" : "=r" (ia64_intri_res)             \
80                                       : "i" (regnum - _IA64_REG_CR_DCR));       \
81                 break;                                                          \
82         case _IA64_REG_SP:                                                      \
83                 asm volatile ("mov %0=sp" : "=r" (ia64_intri_res));             \
84                 break;                                                          \
85         default:                                                                \
86                 ia64_bad_param_for_getreg();                                    \
87                 break;                                                          \
88         }                                                                       \
89         ia64_intri_res;                                                         \
90 })
91
92 #define ia64_hint_pause 0
93
94 #define ia64_hint(mode)                                         \
95 ({                                                              \
96         switch (mode) {                                         \
97         case ia64_hint_pause:                                   \
98                 asm volatile ("hint @pause" ::: "memory");      \
99                 break;                                          \
100         }                                                       \
101 })
102
103
104 /* Integer values for mux1 instruction */
105 #define ia64_mux1_brcst 0
106 #define ia64_mux1_mix   8
107 #define ia64_mux1_shuf  9
108 #define ia64_mux1_alt  10
109 #define ia64_mux1_rev  11
110
111 #define ia64_mux1(x, mode)                                                      \
112 ({                                                                              \
113         __u64 ia64_intri_res;                                                   \
114                                                                                 \
115         switch (mode) {                                                         \
116         case ia64_mux1_brcst:                                                   \
117                 asm ("mux1 %0=%1,@brcst" : "=r" (ia64_intri_res) : "r" (x));    \
118                 break;                                                          \
119         case ia64_mux1_mix:                                                     \
120                 asm ("mux1 %0=%1,@mix" : "=r" (ia64_intri_res) : "r" (x));      \
121                 break;                                                          \
122         case ia64_mux1_shuf:                                                    \
123                 asm ("mux1 %0=%1,@shuf" : "=r" (ia64_intri_res) : "r" (x));     \
124                 break;                                                          \
125         case ia64_mux1_alt:                                                     \
126                 asm ("mux1 %0=%1,@alt" : "=r" (ia64_intri_res) : "r" (x));      \
127                 break;                                                          \
128         case ia64_mux1_rev:                                                     \
129                 asm ("mux1 %0=%1,@rev" : "=r" (ia64_intri_res) : "r" (x));      \
130                 break;                                                          \
131         }                                                                       \
132         ia64_intri_res;                                                         \
133 })
134
135 #define ia64_popcnt(x)                                          \
136 ({                                                              \
137         __u64 ia64_intri_res;                                   \
138         asm ("popcnt %0=%1" : "=r" (ia64_intri_res) : "r" (x)); \
139                                                                 \
140         ia64_intri_res;                                         \
141 })
142
143 #define ia64_getf_exp(x)                                        \
144 ({                                                              \
145         long ia64_intri_res;                                    \
146                                                                 \
147         asm ("getf.exp %0=%1" : "=r"(ia64_intri_res) : "f"(x)); \
148                                                                 \
149         ia64_intri_res;                                         \
150 })
151
152 #define ia64_shrp(a, b, count)                                                          \
153 ({                                                                                      \
154         __u64 ia64_intri_res;                                                           \
155         asm ("shrp %0=%1,%2,%3" : "=r"(ia64_intri_res) : "r"(a), "r"(b), "i"(count));   \
156         ia64_intri_res;                                                                 \
157 })
158
159 #define ia64_ldfs(regnum, x)                                    \
160 ({                                                              \
161         register double __f__ asm ("f"#regnum);                 \
162         asm volatile ("ldfs %0=[%1]" :"=f"(__f__): "r"(x));     \
163 })
164
165 #define ia64_ldfd(regnum, x)                                    \
166 ({                                                              \
167         register double __f__ asm ("f"#regnum);                 \
168         asm volatile ("ldfd %0=[%1]" :"=f"(__f__): "r"(x));     \
169 })
170
171 #define ia64_ldfe(regnum, x)                                    \
172 ({                                                              \
173         register double __f__ asm ("f"#regnum);                 \
174         asm volatile ("ldfe %0=[%1]" :"=f"(__f__): "r"(x));     \
175 })
176
177 #define ia64_ldf8(regnum, x)                                    \
178 ({                                                              \
179         register double __f__ asm ("f"#regnum);                 \
180         asm volatile ("ldf8 %0=[%1]" :"=f"(__f__): "r"(x));     \
181 })
182
183 #define ia64_ldf_fill(regnum, x)                                \
184 ({                                                              \
185         register double __f__ asm ("f"#regnum);                 \
186         asm volatile ("ldf.fill %0=[%1]" :"=f"(__f__): "r"(x)); \
187 })
188
189 #define ia64_stfs(x, regnum)                                            \
190 ({                                                                      \
191         register double __f__ asm ("f"#regnum);                         \
192         asm volatile ("stfs [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
193 })
194
195 #define ia64_stfd(x, regnum)                                            \
196 ({                                                                      \
197         register double __f__ asm ("f"#regnum);                         \
198         asm volatile ("stfd [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
199 })
200
201 #define ia64_stfe(x, regnum)                                            \
202 ({                                                                      \
203         register double __f__ asm ("f"#regnum);                         \
204         asm volatile ("stfe [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
205 })
206
207 #define ia64_stf8(x, regnum)                                            \
208 ({                                                                      \
209         register double __f__ asm ("f"#regnum);                         \
210         asm volatile ("stf8 [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \
211 })
212
213 #define ia64_stf_spill(x, regnum)                                               \
214 ({                                                                              \
215         register double __f__ asm ("f"#regnum);                                 \
216         asm volatile ("stf.spill [%0]=%1" :: "r"(x), "f"(__f__) : "memory");    \
217 })
218
219 #define ia64_fetchadd4_acq(p, inc)                                              \
220 ({                                                                              \
221                                                                                 \
222         __u64 ia64_intri_res;                                                   \
223         asm volatile ("fetchadd4.acq %0=[%1],%2"                                \
224                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
225                                 : "memory");                                    \
226                                                                                 \
227         ia64_intri_res;                                                         \
228 })
229
230 #define ia64_fetchadd4_rel(p, inc)                                              \
231 ({                                                                              \
232         __u64 ia64_intri_res;                                                   \
233         asm volatile ("fetchadd4.rel %0=[%1],%2"                                \
234                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
235                                 : "memory");                                    \
236                                                                                 \
237         ia64_intri_res;                                                         \
238 })
239
240 #define ia64_fetchadd8_acq(p, inc)                                              \
241 ({                                                                              \
242                                                                                 \
243         __u64 ia64_intri_res;                                                   \
244         asm volatile ("fetchadd8.acq %0=[%1],%2"                                \
245                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
246                                 : "memory");                                    \
247                                                                                 \
248         ia64_intri_res;                                                         \
249 })
250
251 #define ia64_fetchadd8_rel(p, inc)                                              \
252 ({                                                                              \
253         __u64 ia64_intri_res;                                                   \
254         asm volatile ("fetchadd8.rel %0=[%1],%2"                                \
255                                 : "=r"(ia64_intri_res) : "r"(p), "i" (inc)      \
256                                 : "memory");                                    \
257                                                                                 \
258         ia64_intri_res;                                                         \
259 })
260
261 #define ia64_xchg1(ptr,x)                                               \
262 ({                                                                      \
263         __u64 ia64_intri_res;                                           \
264         asm __volatile ("xchg1 %0=[%1],%2" : "=r" (ia64_intri_res)      \
265                             : "r" (ptr), "r" (x) : "memory");           \
266         ia64_intri_res;                                                 \
267 })
268
269 #define ia64_xchg2(ptr,x)                                               \
270 ({                                                                      \
271         __u64 ia64_intri_res;                                           \
272         asm __volatile ("xchg2 %0=[%1],%2" : "=r" (ia64_intri_res)      \
273                             : "r" (ptr), "r" (x) : "memory");           \
274         ia64_intri_res;                                                 \
275 })
276
277 #define ia64_xchg4(ptr,x)                                               \
278 ({                                                                      \
279         __u64 ia64_intri_res;                                           \
280         asm __volatile ("xchg4 %0=[%1],%2" : "=r" (ia64_intri_res)      \
281                             : "r" (ptr), "r" (x) : "memory");           \
282         ia64_intri_res;                                                 \
283 })
284
285 #define ia64_xchg8(ptr,x)                                               \
286 ({                                                                      \
287         __u64 ia64_intri_res;                                           \
288         asm __volatile ("xchg8 %0=[%1],%2" : "=r" (ia64_intri_res)      \
289                             : "r" (ptr), "r" (x) : "memory");           \
290         ia64_intri_res;                                                 \
291 })
292
293 #define ia64_cmpxchg1_acq(ptr, new, old)                                                \
294 ({                                                                                      \
295         __u64 ia64_intri_res;                                                           \
296         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
297         asm volatile ("cmpxchg1.acq %0=[%1],%2,ar.ccv":                                 \
298                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
299         ia64_intri_res;                                                                 \
300 })
301
302 #define ia64_cmpxchg1_rel(ptr, new, old)                                                \
303 ({                                                                                      \
304         __u64 ia64_intri_res;                                                           \
305         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
306         asm volatile ("cmpxchg1.rel %0=[%1],%2,ar.ccv":                                 \
307                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
308         ia64_intri_res;                                                                 \
309 })
310
311 #define ia64_cmpxchg2_acq(ptr, new, old)                                                \
312 ({                                                                                      \
313         __u64 ia64_intri_res;                                                           \
314         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
315         asm volatile ("cmpxchg2.acq %0=[%1],%2,ar.ccv":                                 \
316                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
317         ia64_intri_res;                                                                 \
318 })
319
320 #define ia64_cmpxchg2_rel(ptr, new, old)                                                \
321 ({                                                                                      \
322         __u64 ia64_intri_res;                                                           \
323         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
324                                                                                         \
325         asm volatile ("cmpxchg2.rel %0=[%1],%2,ar.ccv":                                 \
326                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
327         ia64_intri_res;                                                                 \
328 })
329
330 #define ia64_cmpxchg4_acq(ptr, new, old)                                                \
331 ({                                                                                      \
332         __u64 ia64_intri_res;                                                           \
333         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
334         asm volatile ("cmpxchg4.acq %0=[%1],%2,ar.ccv":                                 \
335                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
336         ia64_intri_res;                                                                 \
337 })
338
339 #define ia64_cmpxchg4_rel(ptr, new, old)                                                \
340 ({                                                                                      \
341         __u64 ia64_intri_res;                                                           \
342         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
343         asm volatile ("cmpxchg4.rel %0=[%1],%2,ar.ccv":                                 \
344                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
345         ia64_intri_res;                                                                 \
346 })
347
348 #define ia64_cmpxchg8_acq(ptr, new, old)                                                \
349 ({                                                                                      \
350         __u64 ia64_intri_res;                                                           \
351         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
352         asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv":                                 \
353                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
354         ia64_intri_res;                                                                 \
355 })
356
357 #define ia64_cmpxchg8_rel(ptr, new, old)                                                \
358 ({                                                                                      \
359         __u64 ia64_intri_res;                                                           \
360         asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));                                  \
361                                                                                         \
362         asm volatile ("cmpxchg8.rel %0=[%1],%2,ar.ccv":                                 \
363                               "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");    \
364         ia64_intri_res;                                                                 \
365 })
366
367 #define ia64_mf()       asm volatile ("mf" ::: "memory")
368 #define ia64_mfa()      asm volatile ("mf.a" ::: "memory")
369
370 #define ia64_invala() asm volatile ("invala" ::: "memory")
371
372 #define ia64_thash(addr)                                                        \
373 ({                                                                              \
374         __u64 ia64_intri_res;                                                   \
375         asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr));       \
376         ia64_intri_res;                                                         \
377 })
378
379 #define ia64_srlz_i()   asm volatile (";; srlz.i ;;" ::: "memory")
380
381 #define ia64_srlz_d()   asm volatile (";; srlz.d" ::: "memory");
382
383 #define ia64_nop(x)     asm volatile ("nop %0"::"i"(x));
384
385 #define ia64_itci(addr) asm volatile ("itc.i %0;;" :: "r"(addr) : "memory")
386
387 #define ia64_itcd(addr) asm volatile ("itc.d %0;;" :: "r"(addr) : "memory")
388
389
390 #define ia64_itri(trnum, addr) asm volatile ("itr.i itr[%0]=%1"                         \
391                                              :: "r"(trnum), "r"(addr) : "memory")
392
393 #define ia64_itrd(trnum, addr) asm volatile ("itr.d dtr[%0]=%1"                         \
394                                              :: "r"(trnum), "r"(addr) : "memory")
395
396 #define ia64_tpa(addr)                                                          \
397 ({                                                                              \
398         __u64 ia64_pa;                                                          \
399         asm volatile ("tpa %0 = %1" : "=r"(ia64_pa) : "r"(addr) : "memory");    \
400         ia64_pa;                                                                \
401 })
402
403 #define __ia64_set_dbr(index, val)                                              \
404         asm volatile ("mov dbr[%0]=%1" :: "r"(index), "r"(val) : "memory")
405
406 #define ia64_set_ibr(index, val)                                                \
407         asm volatile ("mov ibr[%0]=%1" :: "r"(index), "r"(val) : "memory")
408
409 #define ia64_set_pkr(index, val)                                                \
410         asm volatile ("mov pkr[%0]=%1" :: "r"(index), "r"(val) : "memory")
411
412 #define ia64_set_pmc(index, val)                                                \
413         asm volatile ("mov pmc[%0]=%1" :: "r"(index), "r"(val) : "memory")
414
415 #define ia64_set_pmd(index, val)                                                \
416         asm volatile ("mov pmd[%0]=%1" :: "r"(index), "r"(val) : "memory")
417
418 #define ia64_set_rr(index, val)                                                 \
419         asm volatile ("mov rr[%0]=%1" :: "r"(index), "r"(val) : "memory");
420
421 #define ia64_get_cpuid(index)                                                           \
422 ({                                                                                      \
423         __u64 ia64_intri_res;                                                           \
424         asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index));        \
425         ia64_intri_res;                                                                 \
426 })
427
428 #define __ia64_get_dbr(index)                                                   \
429 ({                                                                              \
430         __u64 ia64_intri_res;                                                   \
431         asm volatile ("mov %0=dbr[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
432         ia64_intri_res;                                                         \
433 })
434
435 #define ia64_get_ibr(index)                                                     \
436 ({                                                                              \
437         __u64 ia64_intri_res;                                                   \
438         asm volatile ("mov %0=ibr[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
439         ia64_intri_res;                                                         \
440 })
441
442 #define ia64_get_pkr(index)                                                     \
443 ({                                                                              \
444         __u64 ia64_intri_res;                                                   \
445         asm volatile ("mov %0=pkr[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
446         ia64_intri_res;                                                         \
447 })
448
449 #define ia64_get_pmc(index)                                                     \
450 ({                                                                              \
451         __u64 ia64_intri_res;                                                   \
452         asm volatile ("mov %0=pmc[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
453         ia64_intri_res;                                                         \
454 })
455
456
457 #define ia64_get_pmd(index)                                                     \
458 ({                                                                              \
459         __u64 ia64_intri_res;                                                   \
460         asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index));    \
461         ia64_intri_res;                                                         \
462 })
463
464 #define ia64_get_rr(index)                                                      \
465 ({                                                                              \
466         __u64 ia64_intri_res;                                                   \
467         asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index));    \
468         ia64_intri_res;                                                         \
469 })
470
471 #define ia64_fc(addr)   asm volatile ("fc %0" :: "r"(addr) : "memory")
472
473
474 #define ia64_sync_i()   asm volatile (";; sync.i" ::: "memory")
475
476 #define ia64_ssm(mask)  asm volatile ("ssm %0":: "i"((mask)) : "memory")
477 #define ia64_rsm(mask)  asm volatile ("rsm %0":: "i"((mask)) : "memory")
478 #define ia64_sum(mask)  asm volatile ("sum %0":: "i"((mask)) : "memory")
479 #define ia64_rum(mask)  asm volatile ("rum %0":: "i"((mask)) : "memory")
480
481 #define ia64_ptce(addr) asm volatile ("ptc.e %0" :: "r"(addr))
482
483 #define ia64_ptcga(addr, size)                                                  \
484         asm volatile ("ptc.ga %0,%1" :: "r"(addr), "r"(size) : "memory")
485
486 #define ia64_ptcl(addr, size)                                           \
487         asm volatile ("ptc.l %0,%1" :: "r"(addr), "r"(size) : "memory")
488
489 #define ia64_ptri(addr, size)                                           \
490         asm volatile ("ptr.i %0,%1" :: "r"(addr), "r"(size) : "memory")
491
492 #define ia64_ptrd(addr, size)                                           \
493         asm volatile ("ptr.d %0,%1" :: "r"(addr), "r"(size) : "memory")
494
495 /* Values for lfhint in ia64_lfetch and ia64_lfetch_fault */
496
497 #define ia64_lfhint_none   0
498 #define ia64_lfhint_nt1    1
499 #define ia64_lfhint_nt2    2
500 #define ia64_lfhint_nta    3
501
502 #define ia64_lfetch(lfhint, y)                                  \
503 ({                                                              \
504         switch (lfhint) {                                       \
505         case ia64_lfhint_none:                                  \
506                 asm volatile ("lfetch [%0]" : : "r"(y));        \
507                 break;                                          \
508         case ia64_lfhint_nt1:                                   \
509                 asm volatile ("lfetch.nt1 [%0]" : : "r"(y));    \
510                 break;                                          \
511         case ia64_lfhint_nt2:                                   \
512                 asm volatile ("lfetch.nt2 [%0]" : : "r"(y));    \
513                 break;                                          \
514         case ia64_lfhint_nta:                                   \
515                 asm volatile ("lfetch.nta [%0]" : : "r"(y));    \
516                 break;                                          \
517         }                                                       \
518 })
519
520 #define ia64_lfetch_excl(lfhint, y)                                     \
521 ({                                                                      \
522         switch (lfhint) {                                               \
523         case ia64_lfhint_none:                                          \
524                 asm volatile ("lfetch.excl [%0]" :: "r"(y));            \
525                 break;                                                  \
526         case ia64_lfhint_nt1:                                           \
527                 asm volatile ("lfetch.excl.nt1 [%0]" :: "r"(y));        \
528                 break;                                                  \
529         case ia64_lfhint_nt2:                                           \
530                 asm volatile ("lfetch.excl.nt2 [%0]" :: "r"(y));        \
531                 break;                                                  \
532         case ia64_lfhint_nta:                                           \
533                 asm volatile ("lfetch.excl.nta [%0]" :: "r"(y));        \
534                 break;                                                  \
535         }                                                               \
536 })
537
538 #define ia64_lfetch_fault(lfhint, y)                                    \
539 ({                                                                      \
540         switch (lfhint) {                                               \
541         case ia64_lfhint_none:                                          \
542                 asm volatile ("lfetch.fault [%0]" : : "r"(y));          \
543                 break;                                                  \
544         case ia64_lfhint_nt1:                                           \
545                 asm volatile ("lfetch.fault.nt1 [%0]" : : "r"(y));      \
546                 break;                                                  \
547         case ia64_lfhint_nt2:                                           \
548                 asm volatile ("lfetch.fault.nt2 [%0]" : : "r"(y));      \
549                 break;                                                  \
550         case ia64_lfhint_nta:                                           \
551                 asm volatile ("lfetch.fault.nta [%0]" : : "r"(y));      \
552                 break;                                                  \
553         }                                                               \
554 })
555
556 #define ia64_lfetch_fault_excl(lfhint, y)                               \
557 ({                                                                      \
558         switch (lfhint) {                                               \
559         case ia64_lfhint_none:                                          \
560                 asm volatile ("lfetch.fault.excl [%0]" :: "r"(y));      \
561                 break;                                                  \
562         case ia64_lfhint_nt1:                                           \
563                 asm volatile ("lfetch.fault.excl.nt1 [%0]" :: "r"(y));  \
564                 break;                                                  \
565         case ia64_lfhint_nt2:                                           \
566                 asm volatile ("lfetch.fault.excl.nt2 [%0]" :: "r"(y));  \
567                 break;                                                  \
568         case ia64_lfhint_nta:                                           \
569                 asm volatile ("lfetch.fault.excl.nta [%0]" :: "r"(y));  \
570                 break;                                                  \
571         }                                                               \
572 })
573
574 #define ia64_intrin_local_irq_restore(x)                        \
575 do {                                                            \
576         asm volatile ("     cmp.ne p6,p7=%0,r0;;"               \
577                       "(p6) ssm psr.i;"                         \
578                       "(p7) rsm psr.i;;"                        \
579                       "(p6) srlz.d"                             \
580                       :: "r"((x)) : "p6", "p7", "memory");      \
581 } while (0)
582
583 #endif /* _ASM_IA64_GCC_INTRIN_H */