1 #ifndef _ASM_IA64_ATOMIC_H
2 #define _ASM_IA64_ATOMIC_H
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
8 * NOTE: don't mess with the types below! The "unsigned long" and
9 * "int" types were carefully placed so as to ensure proper operation
12 * Copyright (C) 1998, 1999, 2002 Hewlett-Packard Co
13 * David Mosberger-Tang <davidm@hpl.hp.com>
15 #include <linux/types.h>
17 #include <asm/intrinsics.h>
20 * On IA-64, counter must always be volatile to ensure that that the
21 * memory accesses are ordered.
23 typedef struct { volatile __s32 counter; } atomic_t;
25 #define ATOMIC_INIT(i) ((atomic_t) { (i) })
27 #define atomic_read(v) ((v)->counter)
28 #define atomic_set(v,i) (((v)->counter) = (i))
31 ia64_atomic_add (int i, atomic_t *v)
40 } while (ia64_cmpxchg("acq", v, old, old + i, sizeof(atomic_t)) != old);
45 ia64_atomic_sub (int i, atomic_t *v)
54 } while (ia64_cmpxchg("acq", v, old, new, sizeof(atomic_t)) != old);
58 #define atomic_add_return(i,v) \
59 ((__builtin_constant_p(i) && \
60 ( (i == 1) || (i == 4) || (i == 8) || (i == 16) \
61 || (i == -1) || (i == -4) || (i == -8) || (i == -16))) \
62 ? ia64_fetch_and_add(i, &(v)->counter) \
63 : ia64_atomic_add(i, v))
66 * Atomically add I to V and return TRUE if the resulting value is
70 atomic_add_negative (int i, atomic_t *v)
72 return atomic_add_return(i, v) < 0;
76 #define atomic_sub_return(i,v) \
77 ((__builtin_constant_p(i) && \
78 ( (i == 1) || (i == 4) || (i == 8) || (i == 16) \
79 || (i == -1) || (i == -4) || (i == -8) || (i == -16))) \
80 ? ia64_fetch_and_add(-(i), &(v)->counter) \
81 : ia64_atomic_sub(i, v))
83 #define atomic_dec_return(v) atomic_sub_return(1, (v))
84 #define atomic_inc_return(v) atomic_add_return(1, (v))
86 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
87 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
88 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) != 0)
90 #define atomic_add(i,v) atomic_add_return((i), (v))
91 #define atomic_sub(i,v) atomic_sub_return((i), (v))
92 #define atomic_inc(v) atomic_add(1, (v))
93 #define atomic_dec(v) atomic_sub(1, (v))
95 /* Atomic operations are already serializing */
96 #define smp_mb__before_atomic_dec() barrier()
97 #define smp_mb__after_atomic_dec() barrier()
98 #define smp_mb__before_atomic_inc() barrier()
99 #define smp_mb__after_atomic_inc() barrier()
101 #endif /* _ASM_IA64_ATOMIC_H */