2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <asm/barrier.h>
19 #include <asm/cpu-features.h>
21 #include <asm/system.h>
23 typedef struct { volatile int counter; } atomic_t;
25 #define ATOMIC_INIT(i) { (i) }
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
31 * Atomically reads the value of @v.
33 #define atomic_read(v) ((v)->counter)
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
40 * Atomically sets the value of @v to @i.
42 #define atomic_set(v, i) ((v)->counter = (i))
45 * atomic_add - add integer to atomic variable
46 * @i: integer value to add
47 * @v: pointer of type atomic_t
49 * Atomically adds @i to @v.
51 static __inline__ void atomic_add(int i, atomic_t * v)
53 if (cpu_has_llsc && R10000_LLSC_WAR) {
58 "1: ll %0, %1 # atomic_add \n"
63 : "=&r" (temp), "=m" (v->counter)
64 : "Ir" (i), "m" (v->counter));
65 } else if (cpu_has_llsc) {
70 "1: ll %0, %1 # atomic_add \n"
78 : "=&r" (temp), "=m" (v->counter)
79 : "Ir" (i), "m" (v->counter));
83 raw_local_irq_save(flags);
85 raw_local_irq_restore(flags);
90 * atomic_sub - subtract the atomic variable
91 * @i: integer value to subtract
92 * @v: pointer of type atomic_t
94 * Atomically subtracts @i from @v.
96 static __inline__ void atomic_sub(int i, atomic_t * v)
98 if (cpu_has_llsc && R10000_LLSC_WAR) {
101 __asm__ __volatile__(
103 "1: ll %0, %1 # atomic_sub \n"
108 : "=&r" (temp), "=m" (v->counter)
109 : "Ir" (i), "m" (v->counter));
110 } else if (cpu_has_llsc) {
113 __asm__ __volatile__(
115 "1: ll %0, %1 # atomic_sub \n"
123 : "=&r" (temp), "=m" (v->counter)
124 : "Ir" (i), "m" (v->counter));
128 raw_local_irq_save(flags);
130 raw_local_irq_restore(flags);
135 * Same as above, but return the result value
137 static __inline__ int atomic_add_return(int i, atomic_t * v)
139 unsigned long result;
143 if (cpu_has_llsc && R10000_LLSC_WAR) {
146 __asm__ __volatile__(
148 "1: ll %1, %2 # atomic_add_return \n"
149 " addu %0, %1, %3 \n"
152 " addu %0, %1, %3 \n"
154 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155 : "Ir" (i), "m" (v->counter)
157 } else if (cpu_has_llsc) {
160 __asm__ __volatile__(
162 "1: ll %1, %2 # atomic_add_return \n"
163 " addu %0, %1, %3 \n"
166 " addu %0, %1, %3 \n"
171 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
172 : "Ir" (i), "m" (v->counter)
177 raw_local_irq_save(flags);
181 raw_local_irq_restore(flags);
189 static __inline__ int atomic_sub_return(int i, atomic_t * v)
191 unsigned long result;
195 if (cpu_has_llsc && R10000_LLSC_WAR) {
198 __asm__ __volatile__(
200 "1: ll %1, %2 # atomic_sub_return \n"
201 " subu %0, %1, %3 \n"
204 " subu %0, %1, %3 \n"
206 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
207 : "Ir" (i), "m" (v->counter)
209 } else if (cpu_has_llsc) {
212 __asm__ __volatile__(
214 "1: ll %1, %2 # atomic_sub_return \n"
215 " subu %0, %1, %3 \n"
218 " subu %0, %1, %3 \n"
223 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
224 : "Ir" (i), "m" (v->counter)
229 raw_local_irq_save(flags);
233 raw_local_irq_restore(flags);
242 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
243 * @i: integer value to subtract
244 * @v: pointer of type atomic_t
246 * Atomically test @v and subtract @i if @v is greater or equal than @i.
247 * The function returns the old value of @v minus @i.
249 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
251 unsigned long result;
255 if (cpu_has_llsc && R10000_LLSC_WAR) {
258 __asm__ __volatile__(
260 "1: ll %1, %2 # atomic_sub_if_positive\n"
261 " subu %0, %1, %3 \n"
266 " subu %0, %1, %3 \n"
270 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271 : "Ir" (i), "m" (v->counter)
273 } else if (cpu_has_llsc) {
276 __asm__ __volatile__(
278 "1: ll %1, %2 # atomic_sub_if_positive\n"
279 " subu %0, %1, %3 \n"
284 " subu %0, %1, %3 \n"
291 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
292 : "Ir" (i), "m" (v->counter)
297 raw_local_irq_save(flags);
302 raw_local_irq_restore(flags);
310 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
311 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
314 * atomic_add_unless - add unless the number is a given value
315 * @v: pointer of type atomic_t
316 * @a: the amount to add to v...
317 * @u: ...unless v is equal to u.
319 * Atomically adds @a to @v, so long as it was not @u.
320 * Returns non-zero if @v was not @u, and zero otherwise.
322 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
327 if (unlikely(c == (u)))
329 old = atomic_cmpxchg((v), c, c + (a));
330 if (likely(old == c))
336 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
338 #define atomic_dec_return(v) atomic_sub_return(1, (v))
339 #define atomic_inc_return(v) atomic_add_return(1, (v))
342 * atomic_sub_and_test - subtract value from variable and test result
343 * @i: integer value to subtract
344 * @v: pointer of type atomic_t
346 * Atomically subtracts @i from @v and returns
347 * true if the result is zero, or false for all
350 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
353 * atomic_inc_and_test - increment and test
354 * @v: pointer of type atomic_t
356 * Atomically increments @v by 1
357 * and returns true if the result is zero, or false for all
360 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
363 * atomic_dec_and_test - decrement by 1 and test
364 * @v: pointer of type atomic_t
366 * Atomically decrements @v by 1 and
367 * returns true if the result is 0, or false for all other
370 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
373 * atomic_dec_if_positive - decrement by 1 if old value positive
374 * @v: pointer of type atomic_t
376 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
379 * atomic_inc - increment atomic variable
380 * @v: pointer of type atomic_t
382 * Atomically increments @v by 1.
384 #define atomic_inc(v) atomic_add(1, (v))
387 * atomic_dec - decrement and test
388 * @v: pointer of type atomic_t
390 * Atomically decrements @v by 1.
392 #define atomic_dec(v) atomic_sub(1, (v))
395 * atomic_add_negative - add and test if negative
396 * @v: pointer of type atomic_t
397 * @i: integer value to add
399 * Atomically adds @i to @v and returns true
400 * if the result is negative, or false when
401 * result is greater than or equal to zero.
403 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
407 typedef struct { volatile long counter; } atomic64_t;
409 #define ATOMIC64_INIT(i) { (i) }
412 * atomic64_read - read atomic variable
413 * @v: pointer of type atomic64_t
416 #define atomic64_read(v) ((v)->counter)
419 * atomic64_set - set atomic variable
420 * @v: pointer of type atomic64_t
423 #define atomic64_set(v, i) ((v)->counter = (i))
426 * atomic64_add - add integer to atomic variable
427 * @i: integer value to add
428 * @v: pointer of type atomic64_t
430 * Atomically adds @i to @v.
432 static __inline__ void atomic64_add(long i, atomic64_t * v)
434 if (cpu_has_llsc && R10000_LLSC_WAR) {
437 __asm__ __volatile__(
439 "1: lld %0, %1 # atomic64_add \n"
444 : "=&r" (temp), "=m" (v->counter)
445 : "Ir" (i), "m" (v->counter));
446 } else if (cpu_has_llsc) {
449 __asm__ __volatile__(
451 "1: lld %0, %1 # atomic64_add \n"
459 : "=&r" (temp), "=m" (v->counter)
460 : "Ir" (i), "m" (v->counter));
464 raw_local_irq_save(flags);
466 raw_local_irq_restore(flags);
471 * atomic64_sub - subtract the atomic variable
472 * @i: integer value to subtract
473 * @v: pointer of type atomic64_t
475 * Atomically subtracts @i from @v.
477 static __inline__ void atomic64_sub(long i, atomic64_t * v)
479 if (cpu_has_llsc && R10000_LLSC_WAR) {
482 __asm__ __volatile__(
484 "1: lld %0, %1 # atomic64_sub \n"
489 : "=&r" (temp), "=m" (v->counter)
490 : "Ir" (i), "m" (v->counter));
491 } else if (cpu_has_llsc) {
494 __asm__ __volatile__(
496 "1: lld %0, %1 # atomic64_sub \n"
504 : "=&r" (temp), "=m" (v->counter)
505 : "Ir" (i), "m" (v->counter));
509 raw_local_irq_save(flags);
511 raw_local_irq_restore(flags);
516 * Same as above, but return the result value
518 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
520 unsigned long result;
524 if (cpu_has_llsc && R10000_LLSC_WAR) {
527 __asm__ __volatile__(
529 "1: lld %1, %2 # atomic64_add_return \n"
530 " addu %0, %1, %3 \n"
533 " addu %0, %1, %3 \n"
535 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
536 : "Ir" (i), "m" (v->counter)
538 } else if (cpu_has_llsc) {
541 __asm__ __volatile__(
543 "1: lld %1, %2 # atomic64_add_return \n"
544 " addu %0, %1, %3 \n"
547 " addu %0, %1, %3 \n"
552 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
553 : "Ir" (i), "m" (v->counter)
558 raw_local_irq_save(flags);
562 raw_local_irq_restore(flags);
570 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
572 unsigned long result;
576 if (cpu_has_llsc && R10000_LLSC_WAR) {
579 __asm__ __volatile__(
581 "1: lld %1, %2 # atomic64_sub_return \n"
582 " subu %0, %1, %3 \n"
585 " subu %0, %1, %3 \n"
587 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
588 : "Ir" (i), "m" (v->counter)
590 } else if (cpu_has_llsc) {
593 __asm__ __volatile__(
595 "1: lld %1, %2 # atomic64_sub_return \n"
596 " subu %0, %1, %3 \n"
599 " subu %0, %1, %3 \n"
604 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
605 : "Ir" (i), "m" (v->counter)
610 raw_local_irq_save(flags);
614 raw_local_irq_restore(flags);
623 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
624 * @i: integer value to subtract
625 * @v: pointer of type atomic64_t
627 * Atomically test @v and subtract @i if @v is greater or equal than @i.
628 * The function returns the old value of @v minus @i.
630 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
632 unsigned long result;
636 if (cpu_has_llsc && R10000_LLSC_WAR) {
639 __asm__ __volatile__(
641 "1: lld %1, %2 # atomic64_sub_if_positive\n"
642 " dsubu %0, %1, %3 \n"
647 " dsubu %0, %1, %3 \n"
651 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
652 : "Ir" (i), "m" (v->counter)
654 } else if (cpu_has_llsc) {
657 __asm__ __volatile__(
659 "1: lld %1, %2 # atomic64_sub_if_positive\n"
660 " dsubu %0, %1, %3 \n"
665 " dsubu %0, %1, %3 \n"
672 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
673 : "Ir" (i), "m" (v->counter)
678 raw_local_irq_save(flags);
683 raw_local_irq_restore(flags);
691 #define atomic64_cmpxchg(v, o, n) \
692 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
693 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
696 * atomic64_add_unless - add unless the number is a given value
697 * @v: pointer of type atomic64_t
698 * @a: the amount to add to v...
699 * @u: ...unless v is equal to u.
701 * Atomically adds @a to @v, so long as it was not @u.
702 * Returns non-zero if @v was not @u, and zero otherwise.
704 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
707 c = atomic64_read(v);
709 if (unlikely(c == (u)))
711 old = atomic64_cmpxchg((v), c, c + (a));
712 if (likely(old == c))
719 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
721 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
722 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
725 * atomic64_sub_and_test - subtract value from variable and test result
726 * @i: integer value to subtract
727 * @v: pointer of type atomic64_t
729 * Atomically subtracts @i from @v and returns
730 * true if the result is zero, or false for all
733 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
736 * atomic64_inc_and_test - increment and test
737 * @v: pointer of type atomic64_t
739 * Atomically increments @v by 1
740 * and returns true if the result is zero, or false for all
743 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
746 * atomic64_dec_and_test - decrement by 1 and test
747 * @v: pointer of type atomic64_t
749 * Atomically decrements @v by 1 and
750 * returns true if the result is 0, or false for all other
753 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
756 * atomic64_dec_if_positive - decrement by 1 if old value positive
757 * @v: pointer of type atomic64_t
759 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
762 * atomic64_inc - increment atomic variable
763 * @v: pointer of type atomic64_t
765 * Atomically increments @v by 1.
767 #define atomic64_inc(v) atomic64_add(1, (v))
770 * atomic64_dec - decrement and test
771 * @v: pointer of type atomic64_t
773 * Atomically decrements @v by 1.
775 #define atomic64_dec(v) atomic64_sub(1, (v))
778 * atomic64_add_negative - add and test if negative
779 * @v: pointer of type atomic64_t
780 * @i: integer value to add
782 * Atomically adds @i to @v and returns true
783 * if the result is negative, or false when
784 * result is greater than or equal to zero.
786 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
788 #endif /* CONFIG_64BIT */
791 * atomic*_return operations are serializing but not the non-*_return
794 #define smp_mb__before_atomic_dec() smp_llsc_mb()
795 #define smp_mb__after_atomic_dec() smp_llsc_mb()
796 #define smp_mb__before_atomic_inc() smp_llsc_mb()
797 #define smp_mb__after_atomic_inc() smp_llsc_mb()
799 #include <asm-generic/atomic.h>
801 #endif /* _ASM_ATOMIC_H */