Commit | Line | Data |
---|---|---|
1a3b1d89 BG |
1 | #ifndef _ASM_X86_ATOMIC64_64_H |
2 | #define _ASM_X86_ATOMIC64_64_H | |
3 | ||
4 | #include <linux/types.h> | |
5 | #include <asm/alternative.h> | |
6 | #include <asm/cmpxchg.h> | |
7 | ||
8 | /* The 64-bit atomic type */ | |
9 | ||
10 | #define ATOMIC64_INIT(i) { (i) } | |
11 | ||
12 | /** | |
13 | * atomic64_read - read atomic64 variable | |
14 | * @v: pointer of type atomic64_t | |
15 | * | |
16 | * Atomically reads the value of @v. | |
17 | * Doesn't imply a read memory barrier. | |
18 | */ | |
19 | static inline long atomic64_read(const atomic64_t *v) | |
20 | { | |
21 | return v->counter; | |
22 | } | |
23 | ||
24 | /** | |
25 | * atomic64_set - set atomic64 variable | |
26 | * @v: pointer to type atomic64_t | |
27 | * @i: required value | |
28 | * | |
29 | * Atomically sets the value of @v to @i. | |
30 | */ | |
31 | static inline void atomic64_set(atomic64_t *v, long i) | |
32 | { | |
33 | v->counter = i; | |
34 | } | |
35 | ||
36 | /** | |
37 | * atomic64_add - add integer to atomic64 variable | |
38 | * @i: integer value to add | |
39 | * @v: pointer to type atomic64_t | |
40 | * | |
41 | * Atomically adds @i to @v. | |
42 | */ | |
43 | static inline void atomic64_add(long i, atomic64_t *v) | |
44 | { | |
45 | asm volatile(LOCK_PREFIX "addq %1,%0" | |
46 | : "=m" (v->counter) | |
47 | : "er" (i), "m" (v->counter)); | |
48 | } | |
49 | ||
50 | /** | |
51 | * atomic64_sub - subtract the atomic64 variable | |
52 | * @i: integer value to subtract | |
53 | * @v: pointer to type atomic64_t | |
54 | * | |
55 | * Atomically subtracts @i from @v. | |
56 | */ | |
57 | static inline void atomic64_sub(long i, atomic64_t *v) | |
58 | { | |
59 | asm volatile(LOCK_PREFIX "subq %1,%0" | |
60 | : "=m" (v->counter) | |
61 | : "er" (i), "m" (v->counter)); | |
62 | } | |
63 | ||
64 | /** | |
65 | * atomic64_sub_and_test - subtract value from variable and test result | |
66 | * @i: integer value to subtract | |
67 | * @v: pointer to type atomic64_t | |
68 | * | |
69 | * Atomically subtracts @i from @v and returns | |
70 | * true if the result is zero, or false for all | |
71 | * other cases. | |
72 | */ | |
73 | static inline int atomic64_sub_and_test(long i, atomic64_t *v) | |
74 | { | |
75 | unsigned char c; | |
76 | ||
77 | asm volatile(LOCK_PREFIX "subq %2,%0; sete %1" | |
78 | : "=m" (v->counter), "=qm" (c) | |
79 | : "er" (i), "m" (v->counter) : "memory"); | |
80 | return c; | |
81 | } | |
82 | ||
83 | /** | |
84 | * atomic64_inc - increment atomic64 variable | |
85 | * @v: pointer to type atomic64_t | |
86 | * | |
87 | * Atomically increments @v by 1. | |
88 | */ | |
89 | static inline void atomic64_inc(atomic64_t *v) | |
90 | { | |
91 | asm volatile(LOCK_PREFIX "incq %0" | |
92 | : "=m" (v->counter) | |
93 | : "m" (v->counter)); | |
94 | } | |
95 | ||
96 | /** | |
97 | * atomic64_dec - decrement atomic64 variable | |
98 | * @v: pointer to type atomic64_t | |
99 | * | |
100 | * Atomically decrements @v by 1. | |
101 | */ | |
102 | static inline void atomic64_dec(atomic64_t *v) | |
103 | { | |
104 | asm volatile(LOCK_PREFIX "decq %0" | |
105 | : "=m" (v->counter) | |
106 | : "m" (v->counter)); | |
107 | } | |
108 | ||
109 | /** | |
110 | * atomic64_dec_and_test - decrement and test | |
111 | * @v: pointer to type atomic64_t | |
112 | * | |
113 | * Atomically decrements @v by 1 and | |
114 | * returns true if the result is 0, or false for all other | |
115 | * cases. | |
116 | */ | |
117 | static inline int atomic64_dec_and_test(atomic64_t *v) | |
118 | { | |
119 | unsigned char c; | |
120 | ||
121 | asm volatile(LOCK_PREFIX "decq %0; sete %1" | |
122 | : "=m" (v->counter), "=qm" (c) | |
123 | : "m" (v->counter) : "memory"); | |
124 | return c != 0; | |
125 | } | |
126 | ||
127 | /** | |
128 | * atomic64_inc_and_test - increment and test | |
129 | * @v: pointer to type atomic64_t | |
130 | * | |
131 | * Atomically increments @v by 1 | |
132 | * and returns true if the result is zero, or false for all | |
133 | * other cases. | |
134 | */ | |
135 | static inline int atomic64_inc_and_test(atomic64_t *v) | |
136 | { | |
137 | unsigned char c; | |
138 | ||
139 | asm volatile(LOCK_PREFIX "incq %0; sete %1" | |
140 | : "=m" (v->counter), "=qm" (c) | |
141 | : "m" (v->counter) : "memory"); | |
142 | return c != 0; | |
143 | } | |
144 | ||
145 | /** | |
146 | * atomic64_add_negative - add and test if negative | |
147 | * @i: integer value to add | |
148 | * @v: pointer to type atomic64_t | |
149 | * | |
150 | * Atomically adds @i to @v and returns true | |
151 | * if the result is negative, or false when | |
152 | * result is greater than or equal to zero. | |
153 | */ | |
154 | static inline int atomic64_add_negative(long i, atomic64_t *v) | |
155 | { | |
156 | unsigned char c; | |
157 | ||
158 | asm volatile(LOCK_PREFIX "addq %2,%0; sets %1" | |
159 | : "=m" (v->counter), "=qm" (c) | |
160 | : "er" (i), "m" (v->counter) : "memory"); | |
161 | return c; | |
162 | } | |
163 | ||
164 | /** | |
165 | * atomic64_add_return - add and return | |
166 | * @i: integer value to add | |
167 | * @v: pointer to type atomic64_t | |
168 | * | |
169 | * Atomically adds @i to @v and returns @i + @v | |
170 | */ | |
171 | static inline long atomic64_add_return(long i, atomic64_t *v) | |
172 | { | |
173 | long __i = i; | |
174 | asm volatile(LOCK_PREFIX "xaddq %0, %1;" | |
175 | : "+r" (i), "+m" (v->counter) | |
176 | : : "memory"); | |
177 | return i + __i; | |
178 | } | |
179 | ||
180 | static inline long atomic64_sub_return(long i, atomic64_t *v) | |
181 | { | |
182 | return atomic64_add_return(-i, v); | |
183 | } | |
184 | ||
185 | #define atomic64_inc_return(v) (atomic64_add_return(1, (v))) | |
186 | #define atomic64_dec_return(v) (atomic64_sub_return(1, (v))) | |
187 | ||
188 | static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new) | |
189 | { | |
190 | return cmpxchg(&v->counter, old, new); | |
191 | } | |
192 | ||
193 | static inline long atomic64_xchg(atomic64_t *v, long new) | |
194 | { | |
195 | return xchg(&v->counter, new); | |
196 | } | |
197 | ||
198 | /** | |
199 | * atomic64_add_unless - add unless the number is a given value | |
200 | * @v: pointer of type atomic64_t | |
201 | * @a: the amount to add to v... | |
202 | * @u: ...unless v is equal to u. | |
203 | * | |
204 | * Atomically adds @a to @v, so long as it was not @u. | |
205 | * Returns non-zero if @v was not @u, and zero otherwise. | |
206 | */ | |
207 | static inline int atomic64_add_unless(atomic64_t *v, long a, long u) | |
208 | { | |
209 | long c, old; | |
210 | c = atomic64_read(v); | |
211 | for (;;) { | |
212 | if (unlikely(c == (u))) | |
213 | break; | |
214 | old = atomic64_cmpxchg((v), c, c + (a)); | |
215 | if (likely(old == c)) | |
216 | break; | |
217 | c = old; | |
218 | } | |
219 | return c != (u); | |
220 | } | |
221 | ||
222 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | |
223 | ||
224 | #endif /* _ASM_X86_ATOMIC64_64_H */ |