976e3c8b5b2af1a1ceb88a64e71cf4587d93daa3
[linux-flexiantxendom0-3.2.10.git] / include / asm-ia64 / intrinsics.h
1 #ifndef _ASM_IA64_INTRINSICS_H
2 #define _ASM_IA64_INTRINSICS_H
3
4 /*
5  * Compiler-dependent intrinsics.
6  *
7  * Copyright (C) 2002-2003 Hewlett-Packard Co
8  *      David Mosberger-Tang <davidm@hpl.hp.com>
9  */
10
11 #include <linux/config.h>
12
13 /*
14  * Force an unresolved reference if someone tries to use
15  * ia64_fetch_and_add() with a bad value.
16  */
17 extern unsigned long __bad_size_for_ia64_fetch_and_add (void);
18 extern unsigned long __bad_increment_for_ia64_fetch_and_add (void);
19
20 #define IA64_FETCHADD(tmp,v,n,sz)                                               \
21 ({                                                                              \
22         switch (sz) {                                                           \
23               case 4:                                                           \
24                 __asm__ __volatile__ ("fetchadd4.rel %0=[%1],%2"                \
25                                       : "=r"(tmp) : "r"(v), "i"(n) : "memory"); \
26                 break;                                                          \
27                                                                                 \
28               case 8:                                                           \
29                 __asm__ __volatile__ ("fetchadd8.rel %0=[%1],%2"                \
30                                       : "=r"(tmp) : "r"(v), "i"(n) : "memory"); \
31                 break;                                                          \
32                                                                                 \
33               default:                                                          \
34                 __bad_size_for_ia64_fetch_and_add();                            \
35         }                                                                       \
36 })
37
38 #define ia64_fetch_and_add(i,v)                                                         \
39 ({                                                                                      \
40         __u64 _tmp;                                                                     \
41         volatile __typeof__(*(v)) *_v = (v);                                            \
42         /* Can't use a switch () here: gcc isn't always smart enough for that... */     \
43         if ((i) == -16)                                                                 \
44                 IA64_FETCHADD(_tmp, _v, -16, sizeof(*(v)));                             \
45         else if ((i) == -8)                                                             \
46                 IA64_FETCHADD(_tmp, _v, -8, sizeof(*(v)));                              \
47         else if ((i) == -4)                                                             \
48                 IA64_FETCHADD(_tmp, _v, -4, sizeof(*(v)));                              \
49         else if ((i) == -1)                                                             \
50                 IA64_FETCHADD(_tmp, _v, -1, sizeof(*(v)));                              \
51         else if ((i) == 1)                                                              \
52                 IA64_FETCHADD(_tmp, _v, 1, sizeof(*(v)));                               \
53         else if ((i) == 4)                                                              \
54                 IA64_FETCHADD(_tmp, _v, 4, sizeof(*(v)));                               \
55         else if ((i) == 8)                                                              \
56                 IA64_FETCHADD(_tmp, _v, 8, sizeof(*(v)));                               \
57         else if ((i) == 16)                                                             \
58                 IA64_FETCHADD(_tmp, _v, 16, sizeof(*(v)));                              \
59         else                                                                            \
60                 _tmp = __bad_increment_for_ia64_fetch_and_add();                        \
61         (__typeof__(*(v))) (_tmp + (i));        /* return new value */                  \
62 })
63
64 /*
65  * This function doesn't exist, so you'll get a linker error if
66  * something tries to do an invalid xchg().
67  */
68 extern void __xchg_called_with_bad_pointer (void);
69
70 static __inline__ unsigned long
71 __xchg (unsigned long x, volatile void *ptr, int size)
72 {
73         unsigned long result;
74
75         switch (size) {
76               case 1:
77                 __asm__ __volatile ("xchg1 %0=[%1],%2" : "=r" (result)
78                                     : "r" (ptr), "r" (x) : "memory");
79                 return result;
80
81               case 2:
82                 __asm__ __volatile ("xchg2 %0=[%1],%2" : "=r" (result)
83                                     : "r" (ptr), "r" (x) : "memory");
84                 return result;
85
86               case 4:
87                 __asm__ __volatile ("xchg4 %0=[%1],%2" : "=r" (result)
88                                     : "r" (ptr), "r" (x) : "memory");
89                 return result;
90
91               case 8:
92                 __asm__ __volatile ("xchg8 %0=[%1],%2" : "=r" (result)
93                                     : "r" (ptr), "r" (x) : "memory");
94                 return result;
95         }
96         __xchg_called_with_bad_pointer();
97         return x;
98 }
99
100 #define xchg(ptr,x)                                                          \
101   ((__typeof__(*(ptr))) __xchg ((unsigned long) (x), (ptr), sizeof(*(ptr))))
102
103 /*
104  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
105  * store NEW in MEM.  Return the initial value in MEM.  Success is
106  * indicated by comparing RETURN with OLD.
107  */
108
109 #define __HAVE_ARCH_CMPXCHG 1
110
111 /*
112  * This function doesn't exist, so you'll get a linker error
113  * if something tries to do an invalid cmpxchg().
114  */
115 extern long __cmpxchg_called_with_bad_pointer(void);
116
117 #define ia64_cmpxchg(sem,ptr,old,new,size)                                              \
118 ({                                                                                      \
119         __typeof__(ptr) _p_ = (ptr);                                                    \
120         __typeof__(new) _n_ = (new);                                                    \
121         __u64 _o_, _r_;                                                                 \
122                                                                                         \
123         switch (size) {                                                                 \
124               case 1: _o_ = (__u8 ) (long) (old); break;                                \
125               case 2: _o_ = (__u16) (long) (old); break;                                \
126               case 4: _o_ = (__u32) (long) (old); break;                                \
127               case 8: _o_ = (__u64) (long) (old); break;                                \
128               default: break;                                                           \
129         }                                                                               \
130          __asm__ __volatile__ ("mov ar.ccv=%0;;" :: "rO"(_o_));                         \
131         switch (size) {                                                                 \
132               case 1:                                                                   \
133                 __asm__ __volatile__ ("cmpxchg1."sem" %0=[%1],%2,ar.ccv"                \
134                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
135                 break;                                                                  \
136                                                                                         \
137               case 2:                                                                   \
138                 __asm__ __volatile__ ("cmpxchg2."sem" %0=[%1],%2,ar.ccv"                \
139                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
140                 break;                                                                  \
141                                                                                         \
142               case 4:                                                                   \
143                 __asm__ __volatile__ ("cmpxchg4."sem" %0=[%1],%2,ar.ccv"                \
144                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
145                 break;                                                                  \
146                                                                                         \
147               case 8:                                                                   \
148                 __asm__ __volatile__ ("cmpxchg8."sem" %0=[%1],%2,ar.ccv"                \
149                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
150                 break;                                                                  \
151                                                                                         \
152               default:                                                                  \
153                 _r_ = __cmpxchg_called_with_bad_pointer();                              \
154                 break;                                                                  \
155         }                                                                               \
156         (__typeof__(old)) _r_;                                                          \
157 })
158
159 #define cmpxchg_acq(ptr,o,n)    ia64_cmpxchg("acq", (ptr), (o), (n), sizeof(*(ptr)))
160 #define cmpxchg_rel(ptr,o,n)    ia64_cmpxchg("rel", (ptr), (o), (n), sizeof(*(ptr)))
161
162 /* for compatibility with other platforms: */
163 #define cmpxchg(ptr,o,n)        cmpxchg_acq(ptr,o,n)
164
165 #ifdef CONFIG_IA64_DEBUG_CMPXCHG
166 # define CMPXCHG_BUGCHECK_DECL  int _cmpxchg_bugcheck_count = 128;
167 # define CMPXCHG_BUGCHECK(v)                                                    \
168   do {                                                                          \
169         if (_cmpxchg_bugcheck_count-- <= 0) {                                   \
170                 void *ip;                                                       \
171                 extern int printk(const char *fmt, ...);                        \
172                 asm ("mov %0=ip" : "=r"(ip));                                   \
173                 printk("CMPXCHG_BUGCHECK: stuck at %p on word %p\n", ip, (v));  \
174                 break;                                                          \
175         }                                                                       \
176   } while (0)
177 #else /* !CONFIG_IA64_DEBUG_CMPXCHG */
178 # define CMPXCHG_BUGCHECK_DECL
179 # define CMPXCHG_BUGCHECK(v)
180 #endif /* !CONFIG_IA64_DEBUG_CMPXCHG */
181
182 #endif /* _ASM_IA64_INTRINSICS_H */