1 /* 64-bit atomic xchg() and cmpxchg() definitions.
2 *
3 * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com)
4 */
5
6 #ifndef __ARCH_SPARC64_CMPXCHG__
7 #define __ARCH_SPARC64_CMPXCHG__
8
xchg32(__volatile__ unsigned int * m,unsigned int val)9 static inline unsigned long xchg32(__volatile__ unsigned int *m, unsigned int val)
10 {
11 unsigned long tmp1, tmp2;
12
13 __asm__ __volatile__(
14 " mov %0, %1\n"
15 "1: lduw [%4], %2\n"
16 " cas [%4], %2, %0\n"
17 " cmp %2, %0\n"
18 " bne,a,pn %%icc, 1b\n"
19 " mov %1, %0\n"
20 : "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
21 : "0" (val), "r" (m)
22 : "cc", "memory");
23 return val;
24 }
25
xchg64(__volatile__ unsigned long * m,unsigned long val)26 static inline unsigned long xchg64(__volatile__ unsigned long *m, unsigned long val)
27 {
28 unsigned long tmp1, tmp2;
29
30 __asm__ __volatile__(
31 " mov %0, %1\n"
32 "1: ldx [%4], %2\n"
33 " casx [%4], %2, %0\n"
34 " cmp %2, %0\n"
35 " bne,a,pn %%xcc, 1b\n"
36 " mov %1, %0\n"
37 : "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
38 : "0" (val), "r" (m)
39 : "cc", "memory");
40 return val;
41 }
42
43 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
44
45 void __xchg_called_with_bad_pointer(void);
46
__xchg(unsigned long x,__volatile__ void * ptr,int size)47 static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr,
48 int size)
49 {
50 switch (size) {
51 case 4:
52 return xchg32(ptr, x);
53 case 8:
54 return xchg64(ptr, x);
55 }
56 __xchg_called_with_bad_pointer();
57 return x;
58 }
59
60 /*
61 * Atomic compare and exchange. Compare OLD with MEM, if identical,
62 * store NEW in MEM. Return the initial value in MEM. Success is
63 * indicated by comparing RETURN with OLD.
64 */
65
66 #include <asm-generic/cmpxchg-local.h>
67
68 static inline unsigned long
__cmpxchg_u32(volatile int * m,int old,int new)69 __cmpxchg_u32(volatile int *m, int old, int new)
70 {
71 __asm__ __volatile__("cas [%2], %3, %0"
72 : "=&r" (new)
73 : "0" (new), "r" (m), "r" (old)
74 : "memory");
75
76 return new;
77 }
78
79 static inline unsigned long
__cmpxchg_u64(volatile long * m,unsigned long old,unsigned long new)80 __cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
81 {
82 __asm__ __volatile__("casx [%2], %3, %0"
83 : "=&r" (new)
84 : "0" (new), "r" (m), "r" (old)
85 : "memory");
86
87 return new;
88 }
89
90 /* This function doesn't exist, so you'll get a linker error
91 if something tries to do an invalid cmpxchg(). */
92 void __cmpxchg_called_with_bad_pointer(void);
93
94 static inline unsigned long
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)95 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
96 {
97 switch (size) {
98 case 4:
99 return __cmpxchg_u32(ptr, old, new);
100 case 8:
101 return __cmpxchg_u64(ptr, old, new);
102 }
103 __cmpxchg_called_with_bad_pointer();
104 return old;
105 }
106
107 #define cmpxchg(ptr,o,n) \
108 ({ \
109 __typeof__(*(ptr)) _o_ = (o); \
110 __typeof__(*(ptr)) _n_ = (n); \
111 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
112 (unsigned long)_n_, sizeof(*(ptr))); \
113 })
114
115 /*
116 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
117 * them available.
118 */
119
__cmpxchg_local(volatile void * ptr,unsigned long old,unsigned long new,int size)120 static inline unsigned long __cmpxchg_local(volatile void *ptr,
121 unsigned long old,
122 unsigned long new, int size)
123 {
124 switch (size) {
125 case 4:
126 case 8: return __cmpxchg(ptr, old, new, size);
127 default:
128 return __cmpxchg_local_generic(ptr, old, new, size);
129 }
130
131 return old;
132 }
133
134 #define cmpxchg_local(ptr, o, n) \
135 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
136 (unsigned long)(n), sizeof(*(ptr))))
137 #define cmpxchg64_local(ptr, o, n) \
138 ({ \
139 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
140 cmpxchg_local((ptr), (o), (n)); \
141 })
142 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
143
144 #endif /* __ARCH_SPARC64_CMPXCHG__ */
145