1 #ifndef __XEN_SYNCH_BITOPS_H__
2 #define __XEN_SYNCH_BITOPS_H__
5 * Copyright 1992, Linus Torvalds.
6 * Heavily modified to provide guaranteed strong synchronisation
7 * when communicating with Xen or other guest OSes running on other CPUs.
10 #ifdef HAVE_XEN_PLATFORM_COMPAT_H
11 #include <xen/platform-compat.h>
14 #define ADDR (*(volatile long *) addr)
16 static __inline__ void synch_set_bit(int nr, volatile void * addr)
18 __asm__ __volatile__ (
20 : "+m" (ADDR) : "Ir" (nr) : "memory" );
23 static __inline__ void synch_clear_bit(int nr, volatile void * addr)
25 __asm__ __volatile__ (
27 : "+m" (ADDR) : "Ir" (nr) : "memory" );
30 static __inline__ void synch_change_bit(int nr, volatile void * addr)
32 __asm__ __volatile__ (
34 : "+m" (ADDR) : "Ir" (nr) : "memory" );
37 static __inline__ int synch_test_and_set_bit(int nr, volatile void * addr)
40 __asm__ __volatile__ (
41 "lock btsl %2,%1\n\tsbbl %0,%0"
42 : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
46 static __inline__ int synch_test_and_clear_bit(int nr, volatile void * addr)
49 __asm__ __volatile__ (
50 "lock btrl %2,%1\n\tsbbl %0,%0"
51 : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
55 static __inline__ int synch_test_and_change_bit(int nr, volatile void * addr)
59 __asm__ __volatile__ (
60 "lock btcl %2,%1\n\tsbbl %0,%0"
61 : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
65 struct __synch_xchg_dummy { unsigned long a[100]; };
66 #define __synch_xg(x) ((struct __synch_xchg_dummy *)(x))
68 #define synch_cmpxchg(ptr, old, new) \
69 ((__typeof__(*(ptr)))__synch_cmpxchg((ptr),\
70 (unsigned long)(old), \
71 (unsigned long)(new), \
74 static inline unsigned long __synch_cmpxchg(volatile void *ptr,
76 unsigned long new, int size)
81 __asm__ __volatile__("lock; cmpxchgb %b1,%2"
83 : "q"(new), "m"(*__synch_xg(ptr)),
88 __asm__ __volatile__("lock; cmpxchgw %w1,%2"
90 : "r"(new), "m"(*__synch_xg(ptr)),
96 __asm__ __volatile__("lock; cmpxchgl %k1,%2"
98 : "r"(new), "m"(*__synch_xg(ptr)),
103 __asm__ __volatile__("lock; cmpxchgq %1,%2"
105 : "r"(new), "m"(*__synch_xg(ptr)),
111 __asm__ __volatile__("lock; cmpxchgl %1,%2"
113 : "r"(new), "m"(*__synch_xg(ptr)),
122 #define synch_test_bit test_bit
124 #define synch_cmpxchg_subword synch_cmpxchg
126 #endif /* __XEN_SYNCH_BITOPS_H__ */