19408747bd1778c482e7183d8a281706256024b7
[linux-flexiantxendom0-3.2.10.git] / include / asm-ia64 / intrinsics.h
1 #ifndef _ASM_IA64_INTRINSICS_H
2 #define _ASM_IA64_INTRINSICS_H
3
4 /*
5  * Compiler-dependent intrinsics.
6  *
7  * Copyright (C) 2002-2003 Hewlett-Packard Co
8  *      David Mosberger-Tang <davidm@hpl.hp.com>
9  */
10
11 #include <linux/config.h>
12
13 /*
14  * Force an unresolved reference if someone tries to use
15  * ia64_fetch_and_add() with a bad value.
16  */
17 extern unsigned long __bad_size_for_ia64_fetch_and_add (void);
18 extern unsigned long __bad_increment_for_ia64_fetch_and_add (void);
19
20 #define IA64_FETCHADD(tmp,v,n,sz,sem)                                           \
21 ({                                                                              \
22         switch (sz) {                                                           \
23               case 4:                                                           \
24                 __asm__ __volatile__ ("fetchadd4."sem" %0=[%1],%2"              \
25                                       : "=r"(tmp) : "r"(v), "i"(n) : "memory"); \
26                 break;                                                          \
27                                                                                 \
28               case 8:                                                           \
29                 __asm__ __volatile__ ("fetchadd8."sem" %0=[%1],%2"              \
30                                       : "=r"(tmp) : "r"(v), "i"(n) : "memory"); \
31                 break;                                                          \
32                                                                                 \
33               default:                                                          \
34                 __bad_size_for_ia64_fetch_and_add();                            \
35         }                                                                       \
36 })
37
38 #define ia64_fetchadd(i,v,sem)                                                          \
39 ({                                                                                      \
40         __u64 _tmp;                                                                     \
41         volatile __typeof__(*(v)) *_v = (v);                                            \
42         /* Can't use a switch () here: gcc isn't always smart enough for that... */     \
43         if ((i) == -16)                                                                 \
44                 IA64_FETCHADD(_tmp, _v, -16, sizeof(*(v)), sem);                        \
45         else if ((i) == -8)                                                             \
46                 IA64_FETCHADD(_tmp, _v, -8, sizeof(*(v)), sem);                         \
47         else if ((i) == -4)                                                             \
48                 IA64_FETCHADD(_tmp, _v, -4, sizeof(*(v)), sem);                         \
49         else if ((i) == -1)                                                             \
50                 IA64_FETCHADD(_tmp, _v, -1, sizeof(*(v)), sem);                         \
51         else if ((i) == 1)                                                              \
52                 IA64_FETCHADD(_tmp, _v, 1, sizeof(*(v)), sem);                          \
53         else if ((i) == 4)                                                              \
54                 IA64_FETCHADD(_tmp, _v, 4, sizeof(*(v)), sem);                          \
55         else if ((i) == 8)                                                              \
56                 IA64_FETCHADD(_tmp, _v, 8, sizeof(*(v)), sem);                          \
57         else if ((i) == 16)                                                             \
58                 IA64_FETCHADD(_tmp, _v, 16, sizeof(*(v)), sem);                         \
59         else                                                                            \
60                 _tmp = __bad_increment_for_ia64_fetch_and_add();                        \
61         (__typeof__(*(v))) (_tmp);      /* return old value */                          \
62 })
63
64 #define ia64_fetch_and_add(i,v) (ia64_fetchadd(i, v, "rel") + (i)) /* return new value */
65
66 /*
67  * This function doesn't exist, so you'll get a linker error if
68  * something tries to do an invalid xchg().
69  */
70 extern void __xchg_called_with_bad_pointer (void);
71
72 static __inline__ unsigned long
73 __xchg (unsigned long x, volatile void *ptr, int size)
74 {
75         unsigned long result;
76
77         switch (size) {
78               case 1:
79                 __asm__ __volatile ("xchg1 %0=[%1],%2" : "=r" (result)
80                                     : "r" (ptr), "r" (x) : "memory");
81                 return result;
82
83               case 2:
84                 __asm__ __volatile ("xchg2 %0=[%1],%2" : "=r" (result)
85                                     : "r" (ptr), "r" (x) : "memory");
86                 return result;
87
88               case 4:
89                 __asm__ __volatile ("xchg4 %0=[%1],%2" : "=r" (result)
90                                     : "r" (ptr), "r" (x) : "memory");
91                 return result;
92
93               case 8:
94                 __asm__ __volatile ("xchg8 %0=[%1],%2" : "=r" (result)
95                                     : "r" (ptr), "r" (x) : "memory");
96                 return result;
97         }
98         __xchg_called_with_bad_pointer();
99         return x;
100 }
101
102 #define xchg(ptr,x)                                                          \
103   ((__typeof__(*(ptr))) __xchg ((unsigned long) (x), (ptr), sizeof(*(ptr))))
104
105 /*
106  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
107  * store NEW in MEM.  Return the initial value in MEM.  Success is
108  * indicated by comparing RETURN with OLD.
109  */
110
111 #define __HAVE_ARCH_CMPXCHG 1
112
113 /*
114  * This function doesn't exist, so you'll get a linker error
115  * if something tries to do an invalid cmpxchg().
116  */
117 extern long __cmpxchg_called_with_bad_pointer(void);
118
119 #define ia64_cmpxchg(sem,ptr,old,new,size)                                              \
120 ({                                                                                      \
121         __typeof__(ptr) _p_ = (ptr);                                                    \
122         __typeof__(new) _n_ = (new);                                                    \
123         __u64 _o_, _r_;                                                                 \
124                                                                                         \
125         switch (size) {                                                                 \
126               case 1: _o_ = (__u8 ) (long) (old); break;                                \
127               case 2: _o_ = (__u16) (long) (old); break;                                \
128               case 4: _o_ = (__u32) (long) (old); break;                                \
129               case 8: _o_ = (__u64) (long) (old); break;                                \
130               default: break;                                                           \
131         }                                                                               \
132         __asm__ __volatile__ ("mov ar.ccv=%0;;" :: "rO"(_o_));                          \
133         switch (size) {                                                                 \
134               case 1:                                                                   \
135                 __asm__ __volatile__ ("cmpxchg1."sem" %0=[%1],%2,ar.ccv"                \
136                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
137                 break;                                                                  \
138                                                                                         \
139               case 2:                                                                   \
140                 __asm__ __volatile__ ("cmpxchg2."sem" %0=[%1],%2,ar.ccv"                \
141                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
142                 break;                                                                  \
143                                                                                         \
144               case 4:                                                                   \
145                 __asm__ __volatile__ ("cmpxchg4."sem" %0=[%1],%2,ar.ccv"                \
146                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
147                 break;                                                                  \
148                                                                                         \
149               case 8:                                                                   \
150                 __asm__ __volatile__ ("cmpxchg8."sem" %0=[%1],%2,ar.ccv"                \
151                                       : "=r"(_r_) : "r"(_p_), "r"(_n_) : "memory");     \
152                 break;                                                                  \
153                                                                                         \
154               default:                                                                  \
155                 _r_ = __cmpxchg_called_with_bad_pointer();                              \
156                 break;                                                                  \
157         }                                                                               \
158         (__typeof__(old)) _r_;                                                          \
159 })
160
161 #define cmpxchg_acq(ptr,o,n)    ia64_cmpxchg("acq", (ptr), (o), (n), sizeof(*(ptr)))
162 #define cmpxchg_rel(ptr,o,n)    ia64_cmpxchg("rel", (ptr), (o), (n), sizeof(*(ptr)))
163
164 /* for compatibility with other platforms: */
165 #define cmpxchg(ptr,o,n)        cmpxchg_acq(ptr,o,n)
166
167 #ifdef CONFIG_IA64_DEBUG_CMPXCHG
168 # define CMPXCHG_BUGCHECK_DECL  int _cmpxchg_bugcheck_count = 128;
169 # define CMPXCHG_BUGCHECK(v)                                                    \
170   do {                                                                          \
171         if (_cmpxchg_bugcheck_count-- <= 0) {                                   \
172                 void *ip;                                                       \
173                 extern int printk(const char *fmt, ...);                        \
174                 asm ("mov %0=ip" : "=r"(ip));                                   \
175                 printk("CMPXCHG_BUGCHECK: stuck at %p on word %p\n", ip, (v));  \
176                 break;                                                          \
177         }                                                                       \
178   } while (0)
179 #else /* !CONFIG_IA64_DEBUG_CMPXCHG */
180 # define CMPXCHG_BUGCHECK_DECL
181 # define CMPXCHG_BUGCHECK(v)
182 #endif /* !CONFIG_IA64_DEBUG_CMPXCHG */
183
184 #endif /* _ASM_IA64_INTRINSICS_H */