1#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
4/*
5 * PowerPC atomic operations
6 */
7
8#ifdef __KERNEL__
9#include <linux/types.h>
10#include <asm/cmpxchg.h>
11#include <asm/barrier.h>
12
13#define ATOMIC_INIT(i)		{ (i) }
14
15static __inline__ int atomic_read(const atomic_t *v)
16{
17	int t;
18
19	__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
20
21	return t;
22}
23
24static __inline__ void atomic_set(atomic_t *v, int i)
25{
26	__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
27}
28
29#define ATOMIC_OP(op, asm_op)						\
30static __inline__ void atomic_##op(int a, atomic_t *v)			\
31{									\
32	int t;								\
33									\
34	__asm__ __volatile__(						\
35"1:	lwarx	%0,0,%3		# atomic_" #op "\n"			\
36	#asm_op " %0,%2,%0\n"						\
37	PPC405_ERR77(0,%3)						\
38"	stwcx.	%0,0,%3 \n"						\
39"	bne-	1b\n"							\
40	: "=&r" (t), "+m" (v->counter)					\
41	: "r" (a), "r" (&v->counter)					\
42	: "cc");							\
43}									\
44
45#define ATOMIC_OP_RETURN(op, asm_op)					\
46static __inline__ int atomic_##op##_return(int a, atomic_t *v)		\
47{									\
48	int t;								\
49									\
50	__asm__ __volatile__(						\
51	PPC_ATOMIC_ENTRY_BARRIER					\
52"1:	lwarx	%0,0,%2		# atomic_" #op "_return\n"		\
53	#asm_op " %0,%1,%0\n"						\
54	PPC405_ERR77(0,%2)						\
55"	stwcx.	%0,0,%2 \n"						\
56"	bne-	1b\n"							\
57	PPC_ATOMIC_EXIT_BARRIER						\
58	: "=&r" (t)							\
59	: "r" (a), "r" (&v->counter)					\
60	: "cc", "memory");						\
61									\
62	return t;							\
63}
64
65#define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
66
67ATOMIC_OPS(add, add)
68ATOMIC_OPS(sub, subf)
69
70#undef ATOMIC_OPS
71#undef ATOMIC_OP_RETURN
72#undef ATOMIC_OP
73
74#define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
75
76static __inline__ void atomic_inc(atomic_t *v)
77{
78	int t;
79
80	__asm__ __volatile__(
81"1:	lwarx	%0,0,%2		# atomic_inc\n\
82	addic	%0,%0,1\n"
83	PPC405_ERR77(0,%2)
84"	stwcx.	%0,0,%2 \n\
85	bne-	1b"
86	: "=&r" (t), "+m" (v->counter)
87	: "r" (&v->counter)
88	: "cc", "xer");
89}
90
91static __inline__ int atomic_inc_return(atomic_t *v)
92{
93	int t;
94
95	__asm__ __volatile__(
96	PPC_ATOMIC_ENTRY_BARRIER
97"1:	lwarx	%0,0,%1		# atomic_inc_return\n\
98	addic	%0,%0,1\n"
99	PPC405_ERR77(0,%1)
100"	stwcx.	%0,0,%1 \n\
101	bne-	1b"
102	PPC_ATOMIC_EXIT_BARRIER
103	: "=&r" (t)
104	: "r" (&v->counter)
105	: "cc", "xer", "memory");
106
107	return t;
108}
109
110/*
111 * atomic_inc_and_test - increment and test
112 * @v: pointer of type atomic_t
113 *
114 * Atomically increments @v by 1
115 * and returns true if the result is zero, or false for all
116 * other cases.
117 */
118#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
119
120static __inline__ void atomic_dec(atomic_t *v)
121{
122	int t;
123
124	__asm__ __volatile__(
125"1:	lwarx	%0,0,%2		# atomic_dec\n\
126	addic	%0,%0,-1\n"
127	PPC405_ERR77(0,%2)\
128"	stwcx.	%0,0,%2\n\
129	bne-	1b"
130	: "=&r" (t), "+m" (v->counter)
131	: "r" (&v->counter)
132	: "cc", "xer");
133}
134
135static __inline__ int atomic_dec_return(atomic_t *v)
136{
137	int t;
138
139	__asm__ __volatile__(
140	PPC_ATOMIC_ENTRY_BARRIER
141"1:	lwarx	%0,0,%1		# atomic_dec_return\n\
142	addic	%0,%0,-1\n"
143	PPC405_ERR77(0,%1)
144"	stwcx.	%0,0,%1\n\
145	bne-	1b"
146	PPC_ATOMIC_EXIT_BARRIER
147	: "=&r" (t)
148	: "r" (&v->counter)
149	: "cc", "xer", "memory");
150
151	return t;
152}
153
154#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
155#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
156
157/**
158 * __atomic_add_unless - add unless the number is a given value
159 * @v: pointer of type atomic_t
160 * @a: the amount to add to v...
161 * @u: ...unless v is equal to u.
162 *
163 * Atomically adds @a to @v, so long as it was not @u.
164 * Returns the old value of @v.
165 */
166static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
167{
168	int t;
169
170	__asm__ __volatile__ (
171	PPC_ATOMIC_ENTRY_BARRIER
172"1:	lwarx	%0,0,%1		# __atomic_add_unless\n\
173	cmpw	0,%0,%3 \n\
174	beq-	2f \n\
175	add	%0,%2,%0 \n"
176	PPC405_ERR77(0,%2)
177"	stwcx.	%0,0,%1 \n\
178	bne-	1b \n"
179	PPC_ATOMIC_EXIT_BARRIER
180"	subf	%0,%2,%0 \n\
1812:"
182	: "=&r" (t)
183	: "r" (&v->counter), "r" (a), "r" (u)
184	: "cc", "memory");
185
186	return t;
187}
188
189/**
190 * atomic_inc_not_zero - increment unless the number is zero
191 * @v: pointer of type atomic_t
192 *
193 * Atomically increments @v by 1, so long as @v is non-zero.
194 * Returns non-zero if @v was non-zero, and zero otherwise.
195 */
196static __inline__ int atomic_inc_not_zero(atomic_t *v)
197{
198	int t1, t2;
199
200	__asm__ __volatile__ (
201	PPC_ATOMIC_ENTRY_BARRIER
202"1:	lwarx	%0,0,%2		# atomic_inc_not_zero\n\
203	cmpwi	0,%0,0\n\
204	beq-	2f\n\
205	addic	%1,%0,1\n"
206	PPC405_ERR77(0,%2)
207"	stwcx.	%1,0,%2\n\
208	bne-	1b\n"
209	PPC_ATOMIC_EXIT_BARRIER
210	"\n\
2112:"
212	: "=&r" (t1), "=&r" (t2)
213	: "r" (&v->counter)
214	: "cc", "xer", "memory");
215
216	return t1;
217}
218#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
219
220#define atomic_sub_and_test(a, v)	(atomic_sub_return((a), (v)) == 0)
221#define atomic_dec_and_test(v)		(atomic_dec_return((v)) == 0)
222
223/*
224 * Atomically test *v and decrement if it is greater than 0.
225 * The function returns the old value of *v minus 1, even if
226 * the atomic variable, v, was not decremented.
227 */
228static __inline__ int atomic_dec_if_positive(atomic_t *v)
229{
230	int t;
231
232	__asm__ __volatile__(
233	PPC_ATOMIC_ENTRY_BARRIER
234"1:	lwarx	%0,0,%1		# atomic_dec_if_positive\n\
235	cmpwi	%0,1\n\
236	addi	%0,%0,-1\n\
237	blt-	2f\n"
238	PPC405_ERR77(0,%1)
239"	stwcx.	%0,0,%1\n\
240	bne-	1b"
241	PPC_ATOMIC_EXIT_BARRIER
242	"\n\
2432:"	: "=&b" (t)
244	: "r" (&v->counter)
245	: "cc", "memory");
246
247	return t;
248}
249#define atomic_dec_if_positive atomic_dec_if_positive
250
251#ifdef __powerpc64__
252
253#define ATOMIC64_INIT(i)	{ (i) }
254
255static __inline__ long atomic64_read(const atomic64_t *v)
256{
257	long t;
258
259	__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
260
261	return t;
262}
263
264static __inline__ void atomic64_set(atomic64_t *v, long i)
265{
266	__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
267}
268
269#define ATOMIC64_OP(op, asm_op)						\
270static __inline__ void atomic64_##op(long a, atomic64_t *v)		\
271{									\
272	long t;								\
273									\
274	__asm__ __volatile__(						\
275"1:	ldarx	%0,0,%3		# atomic64_" #op "\n"			\
276	#asm_op " %0,%2,%0\n"						\
277"	stdcx.	%0,0,%3 \n"						\
278"	bne-	1b\n"							\
279	: "=&r" (t), "+m" (v->counter)					\
280	: "r" (a), "r" (&v->counter)					\
281	: "cc");							\
282}
283
284#define ATOMIC64_OP_RETURN(op, asm_op)					\
285static __inline__ long atomic64_##op##_return(long a, atomic64_t *v)	\
286{									\
287	long t;								\
288									\
289	__asm__ __volatile__(						\
290	PPC_ATOMIC_ENTRY_BARRIER					\
291"1:	ldarx	%0,0,%2		# atomic64_" #op "_return\n"		\
292	#asm_op " %0,%1,%0\n"						\
293"	stdcx.	%0,0,%2 \n"						\
294"	bne-	1b\n"							\
295	PPC_ATOMIC_EXIT_BARRIER						\
296	: "=&r" (t)							\
297	: "r" (a), "r" (&v->counter)					\
298	: "cc", "memory");						\
299									\
300	return t;							\
301}
302
303#define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
304
305ATOMIC64_OPS(add, add)
306ATOMIC64_OPS(sub, subf)
307
308#undef ATOMIC64_OPS
309#undef ATOMIC64_OP_RETURN
310#undef ATOMIC64_OP
311
312#define atomic64_add_negative(a, v)	(atomic64_add_return((a), (v)) < 0)
313
314static __inline__ void atomic64_inc(atomic64_t *v)
315{
316	long t;
317
318	__asm__ __volatile__(
319"1:	ldarx	%0,0,%2		# atomic64_inc\n\
320	addic	%0,%0,1\n\
321	stdcx.	%0,0,%2 \n\
322	bne-	1b"
323	: "=&r" (t), "+m" (v->counter)
324	: "r" (&v->counter)
325	: "cc", "xer");
326}
327
328static __inline__ long atomic64_inc_return(atomic64_t *v)
329{
330	long t;
331
332	__asm__ __volatile__(
333	PPC_ATOMIC_ENTRY_BARRIER
334"1:	ldarx	%0,0,%1		# atomic64_inc_return\n\
335	addic	%0,%0,1\n\
336	stdcx.	%0,0,%1 \n\
337	bne-	1b"
338	PPC_ATOMIC_EXIT_BARRIER
339	: "=&r" (t)
340	: "r" (&v->counter)
341	: "cc", "xer", "memory");
342
343	return t;
344}
345
346/*
347 * atomic64_inc_and_test - increment and test
348 * @v: pointer of type atomic64_t
349 *
350 * Atomically increments @v by 1
351 * and returns true if the result is zero, or false for all
352 * other cases.
353 */
354#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
355
356static __inline__ void atomic64_dec(atomic64_t *v)
357{
358	long t;
359
360	__asm__ __volatile__(
361"1:	ldarx	%0,0,%2		# atomic64_dec\n\
362	addic	%0,%0,-1\n\
363	stdcx.	%0,0,%2\n\
364	bne-	1b"
365	: "=&r" (t), "+m" (v->counter)
366	: "r" (&v->counter)
367	: "cc", "xer");
368}
369
370static __inline__ long atomic64_dec_return(atomic64_t *v)
371{
372	long t;
373
374	__asm__ __volatile__(
375	PPC_ATOMIC_ENTRY_BARRIER
376"1:	ldarx	%0,0,%1		# atomic64_dec_return\n\
377	addic	%0,%0,-1\n\
378	stdcx.	%0,0,%1\n\
379	bne-	1b"
380	PPC_ATOMIC_EXIT_BARRIER
381	: "=&r" (t)
382	: "r" (&v->counter)
383	: "cc", "xer", "memory");
384
385	return t;
386}
387
388#define atomic64_sub_and_test(a, v)	(atomic64_sub_return((a), (v)) == 0)
389#define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
390
391/*
392 * Atomically test *v and decrement if it is greater than 0.
393 * The function returns the old value of *v minus 1.
394 */
395static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
396{
397	long t;
398
399	__asm__ __volatile__(
400	PPC_ATOMIC_ENTRY_BARRIER
401"1:	ldarx	%0,0,%1		# atomic64_dec_if_positive\n\
402	addic.	%0,%0,-1\n\
403	blt-	2f\n\
404	stdcx.	%0,0,%1\n\
405	bne-	1b"
406	PPC_ATOMIC_EXIT_BARRIER
407	"\n\
4082:"	: "=&r" (t)
409	: "r" (&v->counter)
410	: "cc", "xer", "memory");
411
412	return t;
413}
414
415#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
416#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
417
418/**
419 * atomic64_add_unless - add unless the number is a given value
420 * @v: pointer of type atomic64_t
421 * @a: the amount to add to v...
422 * @u: ...unless v is equal to u.
423 *
424 * Atomically adds @a to @v, so long as it was not @u.
425 * Returns the old value of @v.
426 */
427static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
428{
429	long t;
430
431	__asm__ __volatile__ (
432	PPC_ATOMIC_ENTRY_BARRIER
433"1:	ldarx	%0,0,%1		# __atomic_add_unless\n\
434	cmpd	0,%0,%3 \n\
435	beq-	2f \n\
436	add	%0,%2,%0 \n"
437"	stdcx.	%0,0,%1 \n\
438	bne-	1b \n"
439	PPC_ATOMIC_EXIT_BARRIER
440"	subf	%0,%2,%0 \n\
4412:"
442	: "=&r" (t)
443	: "r" (&v->counter), "r" (a), "r" (u)
444	: "cc", "memory");
445
446	return t != u;
447}
448
449/**
450 * atomic_inc64_not_zero - increment unless the number is zero
451 * @v: pointer of type atomic64_t
452 *
453 * Atomically increments @v by 1, so long as @v is non-zero.
454 * Returns non-zero if @v was non-zero, and zero otherwise.
455 */
456static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
457{
458	long t1, t2;
459
460	__asm__ __volatile__ (
461	PPC_ATOMIC_ENTRY_BARRIER
462"1:	ldarx	%0,0,%2		# atomic64_inc_not_zero\n\
463	cmpdi	0,%0,0\n\
464	beq-	2f\n\
465	addic	%1,%0,1\n\
466	stdcx.	%1,0,%2\n\
467	bne-	1b\n"
468	PPC_ATOMIC_EXIT_BARRIER
469	"\n\
4702:"
471	: "=&r" (t1), "=&r" (t2)
472	: "r" (&v->counter)
473	: "cc", "xer", "memory");
474
475	return t1;
476}
477
478#endif /* __powerpc64__ */
479
480#endif /* __KERNEL__ */
481#endif /* _ASM_POWERPC_ATOMIC_H_ */
482