Lines Matching refs:ptr
35 volatile void *ptr) \
52 : "=&r" (ret), "=&r" (tmp), "+Q" (*(u8 *)ptr) \
80 volatile void *ptr, \
85 return __xchg_case##sfx##_1(x, ptr); \
87 return __xchg_case##sfx##_2(x, ptr); \
89 return __xchg_case##sfx##_4(x, ptr); \
91 return __xchg_case##sfx##_8(x, ptr); \
106 #define __xchg_wrapper(sfx, ptr, x) \ argument
108 __typeof__(*(ptr)) __ret; \
109 __ret = (__typeof__(*(ptr))) \
110 __xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
121 static inline unsigned long __cmpxchg##sfx(volatile void *ptr, \
128 return __cmpxchg_case##sfx##_1(ptr, (u8)old, new); \
130 return __cmpxchg_case##sfx##_2(ptr, (u16)old, new); \
132 return __cmpxchg_case##sfx##_4(ptr, old, new); \
134 return __cmpxchg_case##sfx##_8(ptr, old, new); \
149 #define __cmpxchg_wrapper(sfx, ptr, o, n) \ argument
151 __typeof__(*(ptr)) __ret; \
152 __ret = (__typeof__(*(ptr))) \
153 __cmpxchg##sfx((ptr), (unsigned long)(o), \
154 (unsigned long)(n), sizeof(*(ptr))); \
212 #define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) argument
213 #define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) argument
214 #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) argument
215 #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) argument