1#ifndef _ASM_X86_ATOMIC_H
2#define _ASM_X86_ATOMIC_H
3
4#include <linux/compiler.h>
5#include <linux/types.h>
6#include <asm/processor.h>
7#include <asm/alternative.h>
8#include <asm/cmpxchg.h>
9#include <asm/rmwcc.h>
10#include <asm/barrier.h>
11
12/*
13 * Atomic operations that C can't guarantee us.  Useful for
14 * resource counting etc..
15 */
16
17#define ATOMIC_INIT(i)	{ (i) }
18
19/**
20 * atomic_read - read atomic variable
21 * @v: pointer of type atomic_t
22 *
23 * Atomically reads the value of @v.
24 */
25static inline int atomic_read(const atomic_t *v)
26{
27	return ACCESS_ONCE((v)->counter);
28}
29
30/**
31 * atomic_set - set atomic variable
32 * @v: pointer of type atomic_t
33 * @i: required value
34 *
35 * Atomically sets the value of @v to @i.
36 */
37static inline void atomic_set(atomic_t *v, int i)
38{
39	v->counter = i;
40}
41
42/**
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
46 *
47 * Atomically adds @i to @v.
48 */
49static inline void atomic_add(int i, atomic_t *v)
50{
51	asm volatile(LOCK_PREFIX "addl %1,%0"
52		     : "+m" (v->counter)
53		     : "ir" (i));
54}
55
56/**
57 * atomic_sub - subtract integer from atomic variable
58 * @i: integer value to subtract
59 * @v: pointer of type atomic_t
60 *
61 * Atomically subtracts @i from @v.
62 */
63static inline void atomic_sub(int i, atomic_t *v)
64{
65	asm volatile(LOCK_PREFIX "subl %1,%0"
66		     : "+m" (v->counter)
67		     : "ir" (i));
68}
69
70/**
71 * atomic_sub_and_test - subtract value from variable and test result
72 * @i: integer value to subtract
73 * @v: pointer of type atomic_t
74 *
75 * Atomically subtracts @i from @v and returns
76 * true if the result is zero, or false for all
77 * other cases.
78 */
79static inline int atomic_sub_and_test(int i, atomic_t *v)
80{
81	GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
82}
83
84/**
85 * atomic_inc - increment atomic variable
86 * @v: pointer of type atomic_t
87 *
88 * Atomically increments @v by 1.
89 */
90static inline void atomic_inc(atomic_t *v)
91{
92	asm volatile(LOCK_PREFIX "incl %0"
93		     : "+m" (v->counter));
94}
95
96/**
97 * atomic_dec - decrement atomic variable
98 * @v: pointer of type atomic_t
99 *
100 * Atomically decrements @v by 1.
101 */
102static inline void atomic_dec(atomic_t *v)
103{
104	asm volatile(LOCK_PREFIX "decl %0"
105		     : "+m" (v->counter));
106}
107
108/**
109 * atomic_dec_and_test - decrement and test
110 * @v: pointer of type atomic_t
111 *
112 * Atomically decrements @v by 1 and
113 * returns true if the result is 0, or false for all other
114 * cases.
115 */
116static inline int atomic_dec_and_test(atomic_t *v)
117{
118	GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
119}
120
121/**
122 * atomic_inc_and_test - increment and test
123 * @v: pointer of type atomic_t
124 *
125 * Atomically increments @v by 1
126 * and returns true if the result is zero, or false for all
127 * other cases.
128 */
129static inline int atomic_inc_and_test(atomic_t *v)
130{
131	GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
132}
133
134/**
135 * atomic_add_negative - add and test if negative
136 * @i: integer value to add
137 * @v: pointer of type atomic_t
138 *
139 * Atomically adds @i to @v and returns true
140 * if the result is negative, or false when
141 * result is greater than or equal to zero.
142 */
143static inline int atomic_add_negative(int i, atomic_t *v)
144{
145	GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
146}
147
148/**
149 * atomic_add_return - add integer and return
150 * @i: integer value to add
151 * @v: pointer of type atomic_t
152 *
153 * Atomically adds @i to @v and returns @i + @v
154 */
155static inline int atomic_add_return(int i, atomic_t *v)
156{
157	return i + xadd(&v->counter, i);
158}
159
160/**
161 * atomic_sub_return - subtract integer and return
162 * @v: pointer of type atomic_t
163 * @i: integer value to subtract
164 *
165 * Atomically subtracts @i from @v and returns @v - @i
166 */
167static inline int atomic_sub_return(int i, atomic_t *v)
168{
169	return atomic_add_return(-i, v);
170}
171
172#define atomic_inc_return(v)  (atomic_add_return(1, v))
173#define atomic_dec_return(v)  (atomic_sub_return(1, v))
174
175static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
176{
177	return cmpxchg(&v->counter, old, new);
178}
179
180static inline int atomic_xchg(atomic_t *v, int new)
181{
182	return xchg(&v->counter, new);
183}
184
185/**
186 * __atomic_add_unless - add unless the number is already a given value
187 * @v: pointer of type atomic_t
188 * @a: the amount to add to v...
189 * @u: ...unless v is equal to u.
190 *
191 * Atomically adds @a to @v, so long as @v was not already @u.
192 * Returns the old value of @v.
193 */
194static inline int __atomic_add_unless(atomic_t *v, int a, int u)
195{
196	int c, old;
197	c = atomic_read(v);
198	for (;;) {
199		if (unlikely(c == (u)))
200			break;
201		old = atomic_cmpxchg((v), c, c + (a));
202		if (likely(old == c))
203			break;
204		c = old;
205	}
206	return c;
207}
208
209/**
210 * atomic_inc_short - increment of a short integer
211 * @v: pointer to type int
212 *
213 * Atomically adds 1 to @v
214 * Returns the new value of @u
215 */
216static inline short int atomic_inc_short(short int *v)
217{
218	asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
219	return *v;
220}
221
222/* These are x86-specific, used by some header files */
223#define atomic_clear_mask(mask, addr)				\
224	asm volatile(LOCK_PREFIX "andl %0,%1"			\
225		     : : "r" (~(mask)), "m" (*(addr)) : "memory")
226
227#define atomic_set_mask(mask, addr)				\
228	asm volatile(LOCK_PREFIX "orl %0,%1"			\
229		     : : "r" ((unsigned)(mask)), "m" (*(addr))	\
230		     : "memory")
231
232#ifdef CONFIG_X86_32
233# include <asm/atomic64_32.h>
234#else
235# include <asm/atomic64_64.h>
236#endif
237
238#endif /* _ASM_X86_ATOMIC_H */
239