1#ifndef _ARCH_MIPS_LOCAL_H
2#define _ARCH_MIPS_LOCAL_H
3
4#include <linux/percpu.h>
5#include <linux/bitops.h>
6#include <linux/atomic.h>
7#include <asm/cmpxchg.h>
8#include <asm/compiler.h>
9#include <asm/war.h>
10
11typedef struct
12{
13	atomic_long_t a;
14} local_t;
15
16#define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
17
18#define local_read(l)	atomic_long_read(&(l)->a)
19#define local_set(l, i) atomic_long_set(&(l)->a, (i))
20
21#define local_add(i, l) atomic_long_add((i), (&(l)->a))
22#define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
23#define local_inc(l)	atomic_long_inc(&(l)->a)
24#define local_dec(l)	atomic_long_dec(&(l)->a)
25
26/*
27 * Same as above, but return the result value
28 */
29static __inline__ long local_add_return(long i, local_t * l)
30{
31	unsigned long result;
32
33	if (kernel_uses_llsc && R10000_LLSC_WAR) {
34		unsigned long temp;
35
36		__asm__ __volatile__(
37		"	.set	arch=r4000				\n"
38		"1:"	__LL	"%1, %2		# local_add_return	\n"
39		"	addu	%0, %1, %3				\n"
40			__SC	"%0, %2					\n"
41		"	beqzl	%0, 1b					\n"
42		"	addu	%0, %1, %3				\n"
43		"	.set	mips0					\n"
44		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
45		: "Ir" (i), "m" (l->a.counter)
46		: "memory");
47	} else if (kernel_uses_llsc) {
48		unsigned long temp;
49
50		__asm__ __volatile__(
51		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
52		"1:"	__LL	"%1, %2		# local_add_return	\n"
53		"	addu	%0, %1, %3				\n"
54			__SC	"%0, %2					\n"
55		"	beqz	%0, 1b					\n"
56		"	addu	%0, %1, %3				\n"
57		"	.set	mips0					\n"
58		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
59		: "Ir" (i), "m" (l->a.counter)
60		: "memory");
61	} else {
62		unsigned long flags;
63
64		local_irq_save(flags);
65		result = l->a.counter;
66		result += i;
67		l->a.counter = result;
68		local_irq_restore(flags);
69	}
70
71	return result;
72}
73
74static __inline__ long local_sub_return(long i, local_t * l)
75{
76	unsigned long result;
77
78	if (kernel_uses_llsc && R10000_LLSC_WAR) {
79		unsigned long temp;
80
81		__asm__ __volatile__(
82		"	.set	arch=r4000				\n"
83		"1:"	__LL	"%1, %2		# local_sub_return	\n"
84		"	subu	%0, %1, %3				\n"
85			__SC	"%0, %2					\n"
86		"	beqzl	%0, 1b					\n"
87		"	subu	%0, %1, %3				\n"
88		"	.set	mips0					\n"
89		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
90		: "Ir" (i), "m" (l->a.counter)
91		: "memory");
92	} else if (kernel_uses_llsc) {
93		unsigned long temp;
94
95		__asm__ __volatile__(
96		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
97		"1:"	__LL	"%1, %2		# local_sub_return	\n"
98		"	subu	%0, %1, %3				\n"
99			__SC	"%0, %2					\n"
100		"	beqz	%0, 1b					\n"
101		"	subu	%0, %1, %3				\n"
102		"	.set	mips0					\n"
103		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
104		: "Ir" (i), "m" (l->a.counter)
105		: "memory");
106	} else {
107		unsigned long flags;
108
109		local_irq_save(flags);
110		result = l->a.counter;
111		result -= i;
112		l->a.counter = result;
113		local_irq_restore(flags);
114	}
115
116	return result;
117}
118
119#define local_cmpxchg(l, o, n) \
120	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
121#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
122
123/**
124 * local_add_unless - add unless the number is a given value
125 * @l: pointer of type local_t
126 * @a: the amount to add to l...
127 * @u: ...unless l is equal to u.
128 *
129 * Atomically adds @a to @l, so long as it was not @u.
130 * Returns non-zero if @l was not @u, and zero otherwise.
131 */
132#define local_add_unless(l, a, u)				\
133({								\
134	long c, old;						\
135	c = local_read(l);					\
136	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
137		c = old;					\
138	c != (u);						\
139})
140#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
141
142#define local_dec_return(l) local_sub_return(1, (l))
143#define local_inc_return(l) local_add_return(1, (l))
144
145/*
146 * local_sub_and_test - subtract value from variable and test result
147 * @i: integer value to subtract
148 * @l: pointer of type local_t
149 *
150 * Atomically subtracts @i from @l and returns
151 * true if the result is zero, or false for all
152 * other cases.
153 */
154#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
155
156/*
157 * local_inc_and_test - increment and test
158 * @l: pointer of type local_t
159 *
160 * Atomically increments @l by 1
161 * and returns true if the result is zero, or false for all
162 * other cases.
163 */
164#define local_inc_and_test(l) (local_inc_return(l) == 0)
165
166/*
167 * local_dec_and_test - decrement by 1 and test
168 * @l: pointer of type local_t
169 *
170 * Atomically decrements @l by 1 and
171 * returns true if the result is 0, or false for all other
172 * cases.
173 */
174#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
175
176/*
177 * local_add_negative - add and test if negative
178 * @l: pointer of type local_t
179 * @i: integer value to add
180 *
181 * Atomically adds @i to @l and returns true
182 * if the result is negative, or false when
183 * result is greater than or equal to zero.
184 */
185#define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
186
187/* Use these for per-cpu local_t variables: on some archs they are
188 * much more efficient than these naive implementations.  Note they take
189 * a variable, not an address.
190 */
191
192#define __local_inc(l)		((l)->a.counter++)
193#define __local_dec(l)		((l)->a.counter++)
194#define __local_add(i, l)	((l)->a.counter+=(i))
195#define __local_sub(i, l)	((l)->a.counter-=(i))
196
197#endif /* _ARCH_MIPS_LOCAL_H */
198