Commit | Line | Data |
---|---|---|
1a3b1d89 BG |
1 | #ifndef _ASM_X86_ATOMIC64_64_H |
2 | #define _ASM_X86_ATOMIC64_64_H | |
3 | ||
4 | #include <linux/types.h> | |
5 | #include <asm/alternative.h> | |
6 | #include <asm/cmpxchg.h> | |
7 | ||
8 | /* The 64-bit atomic type */ | |
9 | ||
10 | #define ATOMIC64_INIT(i) { (i) } | |
11 | ||
12 | /** | |
13 | * atomic64_read - read atomic64 variable | |
14 | * @v: pointer of type atomic64_t | |
15 | * | |
16 | * Atomically reads the value of @v. | |
17 | * Doesn't imply a read memory barrier. | |
18 | */ | |
19 | static inline long atomic64_read(const atomic64_t *v) | |
20 | { | |
2291059c | 21 | return ACCESS_ONCE((v)->counter); |
1a3b1d89 BG |
22 | } |
23 | ||
24 | /** | |
25 | * atomic64_set - set atomic64 variable | |
26 | * @v: pointer to type atomic64_t | |
27 | * @i: required value | |
28 | * | |
29 | * Atomically sets the value of @v to @i. | |
30 | */ | |
31 | static inline void atomic64_set(atomic64_t *v, long i) | |
32 | { | |
33 | v->counter = i; | |
34 | } | |
35 | ||
36 | /** | |
37 | * atomic64_add - add integer to atomic64 variable | |
38 | * @i: integer value to add | |
39 | * @v: pointer to type atomic64_t | |
40 | * | |
41 | * Atomically adds @i to @v. | |
42 | */ | |
3462bd2a | 43 | static __always_inline void atomic64_add(long i, atomic64_t *v) |
1a3b1d89 BG |
44 | { |
45 | asm volatile(LOCK_PREFIX "addq %1,%0" | |
46 | : "=m" (v->counter) | |
47 | : "er" (i), "m" (v->counter)); | |
48 | } | |
49 | ||
50 | /** | |
51 | * atomic64_sub - subtract the atomic64 variable | |
52 | * @i: integer value to subtract | |
53 | * @v: pointer to type atomic64_t | |
54 | * | |
55 | * Atomically subtracts @i from @v. | |
56 | */ | |
57 | static inline void atomic64_sub(long i, atomic64_t *v) | |
58 | { | |
59 | asm volatile(LOCK_PREFIX "subq %1,%0" | |
60 | : "=m" (v->counter) | |
61 | : "er" (i), "m" (v->counter)); | |
62 | } | |
63 | ||
64 | /** | |
65 | * atomic64_sub_and_test - subtract value from variable and test result | |
66 | * @i: integer value to subtract | |
67 | * @v: pointer to type atomic64_t | |
68 | * | |
69 | * Atomically subtracts @i from @v and returns | |
70 | * true if the result is zero, or false for all | |
71 | * other cases. | |
72 | */ | |
73 | static inline int atomic64_sub_and_test(long i, atomic64_t *v) | |
74 | { | |
e0f6dec3 | 75 | GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e"); |
1a3b1d89 BG |
76 | } |
77 | ||
78 | /** | |
79 | * atomic64_inc - increment atomic64 variable | |
80 | * @v: pointer to type atomic64_t | |
81 | * | |
82 | * Atomically increments @v by 1. | |
83 | */ | |
3462bd2a | 84 | static __always_inline void atomic64_inc(atomic64_t *v) |
1a3b1d89 BG |
85 | { |
86 | asm volatile(LOCK_PREFIX "incq %0" | |
87 | : "=m" (v->counter) | |
88 | : "m" (v->counter)); | |
89 | } | |
90 | ||
91 | /** | |
92 | * atomic64_dec - decrement atomic64 variable | |
93 | * @v: pointer to type atomic64_t | |
94 | * | |
95 | * Atomically decrements @v by 1. | |
96 | */ | |
3462bd2a | 97 | static __always_inline void atomic64_dec(atomic64_t *v) |
1a3b1d89 BG |
98 | { |
99 | asm volatile(LOCK_PREFIX "decq %0" | |
100 | : "=m" (v->counter) | |
101 | : "m" (v->counter)); | |
102 | } | |
103 | ||
104 | /** | |
105 | * atomic64_dec_and_test - decrement and test | |
106 | * @v: pointer to type atomic64_t | |
107 | * | |
108 | * Atomically decrements @v by 1 and | |
109 | * returns true if the result is 0, or false for all other | |
110 | * cases. | |
111 | */ | |
112 | static inline int atomic64_dec_and_test(atomic64_t *v) | |
113 | { | |
0c44c2d0 | 114 | GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e"); |
1a3b1d89 BG |
115 | } |
116 | ||
117 | /** | |
118 | * atomic64_inc_and_test - increment and test | |
119 | * @v: pointer to type atomic64_t | |
120 | * | |
121 | * Atomically increments @v by 1 | |
122 | * and returns true if the result is zero, or false for all | |
123 | * other cases. | |
124 | */ | |
125 | static inline int atomic64_inc_and_test(atomic64_t *v) | |
126 | { | |
0c44c2d0 | 127 | GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e"); |
1a3b1d89 BG |
128 | } |
129 | ||
130 | /** | |
131 | * atomic64_add_negative - add and test if negative | |
132 | * @i: integer value to add | |
133 | * @v: pointer to type atomic64_t | |
134 | * | |
135 | * Atomically adds @i to @v and returns true | |
136 | * if the result is negative, or false when | |
137 | * result is greater than or equal to zero. | |
138 | */ | |
139 | static inline int atomic64_add_negative(long i, atomic64_t *v) | |
140 | { | |
e0f6dec3 | 141 | GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", "s"); |
1a3b1d89 BG |
142 | } |
143 | ||
144 | /** | |
145 | * atomic64_add_return - add and return | |
146 | * @i: integer value to add | |
147 | * @v: pointer to type atomic64_t | |
148 | * | |
149 | * Atomically adds @i to @v and returns @i + @v | |
150 | */ | |
3462bd2a | 151 | static __always_inline long atomic64_add_return(long i, atomic64_t *v) |
1a3b1d89 | 152 | { |
8b8bc2f7 | 153 | return i + xadd(&v->counter, i); |
1a3b1d89 BG |
154 | } |
155 | ||
156 | static inline long atomic64_sub_return(long i, atomic64_t *v) | |
157 | { | |
158 | return atomic64_add_return(-i, v); | |
159 | } | |
160 | ||
161 | #define atomic64_inc_return(v) (atomic64_add_return(1, (v))) | |
162 | #define atomic64_dec_return(v) (atomic64_sub_return(1, (v))) | |
163 | ||
164 | static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new) | |
165 | { | |
166 | return cmpxchg(&v->counter, old, new); | |
167 | } | |
168 | ||
169 | static inline long atomic64_xchg(atomic64_t *v, long new) | |
170 | { | |
171 | return xchg(&v->counter, new); | |
172 | } | |
173 | ||
174 | /** | |
175 | * atomic64_add_unless - add unless the number is a given value | |
176 | * @v: pointer of type atomic64_t | |
177 | * @a: the amount to add to v... | |
178 | * @u: ...unless v is equal to u. | |
179 | * | |
180 | * Atomically adds @a to @v, so long as it was not @u. | |
f24219b4 | 181 | * Returns the old value of @v. |
1a3b1d89 BG |
182 | */ |
183 | static inline int atomic64_add_unless(atomic64_t *v, long a, long u) | |
184 | { | |
185 | long c, old; | |
186 | c = atomic64_read(v); | |
187 | for (;;) { | |
188 | if (unlikely(c == (u))) | |
189 | break; | |
190 | old = atomic64_cmpxchg((v), c, c + (a)); | |
191 | if (likely(old == c)) | |
192 | break; | |
193 | c = old; | |
194 | } | |
195 | return c != (u); | |
196 | } | |
197 | ||
198 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | |
199 | ||
d7f6de1e LB |
200 | /* |
201 | * atomic64_dec_if_positive - decrement by 1 if old value positive | |
202 | * @v: pointer of type atomic_t | |
203 | * | |
204 | * The function returns the old value of *v minus 1, even if | |
205 | * the atomic variable, v, was not decremented. | |
206 | */ | |
207 | static inline long atomic64_dec_if_positive(atomic64_t *v) | |
208 | { | |
209 | long c, old, dec; | |
210 | c = atomic64_read(v); | |
211 | for (;;) { | |
212 | dec = c - 1; | |
213 | if (unlikely(dec < 0)) | |
214 | break; | |
215 | old = atomic64_cmpxchg((v), c, dec); | |
216 | if (likely(old == c)) | |
217 | break; | |
218 | c = old; | |
219 | } | |
220 | return dec; | |
221 | } | |
222 | ||
7fc1845d PZ |
223 | #define ATOMIC64_OP(op) \ |
224 | static inline void atomic64_##op(long i, atomic64_t *v) \ | |
225 | { \ | |
226 | asm volatile(LOCK_PREFIX #op"q %1,%0" \ | |
227 | : "+m" (v->counter) \ | |
228 | : "er" (i) \ | |
229 | : "memory"); \ | |
230 | } | |
231 | ||
232 | ATOMIC64_OP(and) | |
233 | ATOMIC64_OP(or) | |
234 | ATOMIC64_OP(xor) | |
235 | ||
236 | #undef ATOMIC64_OP | |
237 | ||
1a3b1d89 | 238 | #endif /* _ASM_X86_ATOMIC64_64_H */ |