1 #ifndef ASM_X86__ATOMIC_64_H
2 #define ASM_X86__ATOMIC_64_H
4 #include <asm/alternative.h>
5 #include <asm/cmpxchg.h>
7 /* atomic_t should be 32 bit signed type */
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
15 * Make sure gcc doesn't try to be clever and move things around
16 * on us. We need to use _exactly_ the address the user gave us,
17 * not some alias that contains the same information.
23 #define ATOMIC_INIT(i) { (i) }
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
29 * Atomically reads the value of @v.
31 #define atomic_read(v) ((v)->counter)
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
38 * Atomically sets the value of @v to @i.
40 #define atomic_set(v, i) (((v)->counter) = (i))
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
47 * Atomically adds @i to @v.
49 static inline void atomic_add(int i, atomic_t *v)
51 asm volatile(LOCK_PREFIX "addl %1,%0"
53 : "ir" (i), "m" (v->counter));
57 * atomic_sub - subtract the atomic variable
58 * @i: integer value to subtract
59 * @v: pointer of type atomic_t
61 * Atomically subtracts @i from @v.
63 static inline void atomic_sub(int i, atomic_t *v)
65 asm volatile(LOCK_PREFIX "subl %1,%0"
67 : "ir" (i), "m" (v->counter));
71 * atomic_sub_and_test - subtract value from variable and test result
72 * @i: integer value to subtract
73 * @v: pointer of type atomic_t
75 * Atomically subtracts @i from @v and returns
76 * true if the result is zero, or false for all
79 static inline int atomic_sub_and_test(int i, atomic_t *v)
83 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
84 : "=m" (v->counter), "=qm" (c)
85 : "ir" (i), "m" (v->counter) : "memory");
90 * atomic_inc - increment atomic variable
91 * @v: pointer of type atomic_t
93 * Atomically increments @v by 1.
95 static inline void atomic_inc(atomic_t *v)
97 asm volatile(LOCK_PREFIX "incl %0"
103 * atomic_dec - decrement atomic variable
104 * @v: pointer of type atomic_t
106 * Atomically decrements @v by 1.
108 static inline void atomic_dec(atomic_t *v)
110 asm volatile(LOCK_PREFIX "decl %0"
116 * atomic_dec_and_test - decrement and test
117 * @v: pointer of type atomic_t
119 * Atomically decrements @v by 1 and
120 * returns true if the result is 0, or false for all other
123 static inline int atomic_dec_and_test(atomic_t *v)
127 asm volatile(LOCK_PREFIX "decl %0; sete %1"
128 : "=m" (v->counter), "=qm" (c)
129 : "m" (v->counter) : "memory");
134 * atomic_inc_and_test - increment and test
135 * @v: pointer of type atomic_t
137 * Atomically increments @v by 1
138 * and returns true if the result is zero, or false for all
141 static inline int atomic_inc_and_test(atomic_t *v)
145 asm volatile(LOCK_PREFIX "incl %0; sete %1"
146 : "=m" (v->counter), "=qm" (c)
147 : "m" (v->counter) : "memory");
152 * atomic_add_negative - add and test if negative
153 * @i: integer value to add
154 * @v: pointer of type atomic_t
156 * Atomically adds @i to @v and returns true
157 * if the result is negative, or false when
158 * result is greater than or equal to zero.
160 static inline int atomic_add_negative(int i, atomic_t *v)
164 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
165 : "=m" (v->counter), "=qm" (c)
166 : "ir" (i), "m" (v->counter) : "memory");
171 * atomic_add_return - add and return
172 * @i: integer value to add
173 * @v: pointer of type atomic_t
175 * Atomically adds @i to @v and returns @i + @v
177 static inline int atomic_add_return(int i, atomic_t *v)
180 asm volatile(LOCK_PREFIX "xaddl %0, %1"
181 : "+r" (i), "+m" (v->counter)
186 static inline int atomic_sub_return(int i, atomic_t *v)
188 return atomic_add_return(-i, v);
191 #define atomic_inc_return(v) (atomic_add_return(1, v))
192 #define atomic_dec_return(v) (atomic_sub_return(1, v))
194 /* An 64bit atomic type */
200 #define ATOMIC64_INIT(i) { (i) }
203 * atomic64_read - read atomic64 variable
204 * @v: pointer of type atomic64_t
206 * Atomically reads the value of @v.
207 * Doesn't imply a read memory barrier.
209 #define atomic64_read(v) ((v)->counter)
212 * atomic64_set - set atomic64 variable
213 * @v: pointer to type atomic64_t
216 * Atomically sets the value of @v to @i.
218 #define atomic64_set(v, i) (((v)->counter) = (i))
221 * atomic64_add - add integer to atomic64 variable
222 * @i: integer value to add
223 * @v: pointer to type atomic64_t
225 * Atomically adds @i to @v.
227 static inline void atomic64_add(long i, atomic64_t *v)
229 asm volatile(LOCK_PREFIX "addq %1,%0"
231 : "er" (i), "m" (v->counter));
235 * atomic64_sub - subtract the atomic64 variable
236 * @i: integer value to subtract
237 * @v: pointer to type atomic64_t
239 * Atomically subtracts @i from @v.
241 static inline void atomic64_sub(long i, atomic64_t *v)
243 asm volatile(LOCK_PREFIX "subq %1,%0"
245 : "er" (i), "m" (v->counter));
249 * atomic64_sub_and_test - subtract value from variable and test result
250 * @i: integer value to subtract
251 * @v: pointer to type atomic64_t
253 * Atomically subtracts @i from @v and returns
254 * true if the result is zero, or false for all
257 static inline int atomic64_sub_and_test(long i, atomic64_t *v)
261 asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
262 : "=m" (v->counter), "=qm" (c)
263 : "er" (i), "m" (v->counter) : "memory");
268 * atomic64_inc - increment atomic64 variable
269 * @v: pointer to type atomic64_t
271 * Atomically increments @v by 1.
273 static inline void atomic64_inc(atomic64_t *v)
275 asm volatile(LOCK_PREFIX "incq %0"
281 * atomic64_dec - decrement atomic64 variable
282 * @v: pointer to type atomic64_t
284 * Atomically decrements @v by 1.
286 static inline void atomic64_dec(atomic64_t *v)
288 asm volatile(LOCK_PREFIX "decq %0"
294 * atomic64_dec_and_test - decrement and test
295 * @v: pointer to type atomic64_t
297 * Atomically decrements @v by 1 and
298 * returns true if the result is 0, or false for all other
301 static inline int atomic64_dec_and_test(atomic64_t *v)
305 asm volatile(LOCK_PREFIX "decq %0; sete %1"
306 : "=m" (v->counter), "=qm" (c)
307 : "m" (v->counter) : "memory");
312 * atomic64_inc_and_test - increment and test
313 * @v: pointer to type atomic64_t
315 * Atomically increments @v by 1
316 * and returns true if the result is zero, or false for all
319 static inline int atomic64_inc_and_test(atomic64_t *v)
323 asm volatile(LOCK_PREFIX "incq %0; sete %1"
324 : "=m" (v->counter), "=qm" (c)
325 : "m" (v->counter) : "memory");
330 * atomic64_add_negative - add and test if negative
331 * @i: integer value to add
332 * @v: pointer to type atomic64_t
334 * Atomically adds @i to @v and returns true
335 * if the result is negative, or false when
336 * result is greater than or equal to zero.
338 static inline int atomic64_add_negative(long i, atomic64_t *v)
342 asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
343 : "=m" (v->counter), "=qm" (c)
344 : "er" (i), "m" (v->counter) : "memory");
349 * atomic64_add_return - add and return
350 * @i: integer value to add
351 * @v: pointer to type atomic64_t
353 * Atomically adds @i to @v and returns @i + @v
355 static inline long atomic64_add_return(long i, atomic64_t *v)
358 asm volatile(LOCK_PREFIX "xaddq %0, %1;"
359 : "+r" (i), "+m" (v->counter)
364 static inline long atomic64_sub_return(long i, atomic64_t *v)
366 return atomic64_add_return(-i, v);
369 #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
370 #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
372 #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
373 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
375 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
376 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
379 * atomic_add_unless - add unless the number is a given value
380 * @v: pointer of type atomic_t
381 * @a: the amount to add to v...
382 * @u: ...unless v is equal to u.
384 * Atomically adds @a to @v, so long as it was not @u.
385 * Returns non-zero if @v was not @u, and zero otherwise.
387 static inline int atomic_add_unless(atomic_t *v, int a, int u)
392 if (unlikely(c == (u)))
394 old = atomic_cmpxchg((v), c, c + (a));
395 if (likely(old == c))
402 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
405 * atomic64_add_unless - add unless the number is a given value
406 * @v: pointer of type atomic64_t
407 * @a: the amount to add to v...
408 * @u: ...unless v is equal to u.
410 * Atomically adds @a to @v, so long as it was not @u.
411 * Returns non-zero if @v was not @u, and zero otherwise.
413 static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
416 c = atomic64_read(v);
418 if (unlikely(c == (u)))
420 old = atomic64_cmpxchg((v), c, c + (a));
421 if (likely(old == c))
429 * atomic_inc_short - increment of a short integer
430 * @v: pointer to type int
432 * Atomically adds 1 to @v
433 * Returns the new value of @u
435 static inline short int atomic_inc_short(short int *v)
437 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
442 * atomic_or_long - OR of two long integers
443 * @v1: pointer to type unsigned long
444 * @v2: pointer to type unsigned long
446 * Atomically ORs @v1 and @v2
447 * Returns the result of the OR
449 static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
451 asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
454 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
456 /* These are x86-specific, used by some header files */
457 #define atomic_clear_mask(mask, addr) \
458 asm volatile(LOCK_PREFIX "andl %0,%1" \
459 : : "r" (~(mask)), "m" (*(addr)) : "memory")
461 #define atomic_set_mask(mask, addr) \
462 asm volatile(LOCK_PREFIX "orl %0,%1" \
463 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
466 /* Atomic operations are already serializing on x86 */
467 #define smp_mb__before_atomic_dec() barrier()
468 #define smp_mb__after_atomic_dec() barrier()
469 #define smp_mb__before_atomic_inc() barrier()
470 #define smp_mb__after_atomic_inc() barrier()
472 #include <asm-generic/atomic.h>
473 #endif /* ASM_X86__ATOMIC_64_H */