X-Git-Url: http://pilppa.org/gitweb/gitweb.cgi?a=blobdiff_plain;f=include%2Fasm-mips%2Fatomic.h;h=a798d6299a7927ee22d706e9a46c10f4ed9a73a0;hb=2cdf79cafbd11580f5b63cd4993b45c1c4952415;hp=1ac50b6c47adfc634ad55a3b023dc1d63fce7d3e;hpb=e0cc09e295f346b7921e921f385fe5213472316a;p=linux-2.6-omap-h63xx.git diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index 1ac50b6c47a..a798d6299a7 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h @@ -18,6 +18,7 @@ #include #include #include +#include typedef struct { volatile int counter; } atomic_t; @@ -38,7 +39,7 @@ typedef struct { volatile int counter; } atomic_t; * * Atomically sets the value of @v to @i. */ -#define atomic_set(v,i) ((v)->counter = (i)) +#define atomic_set(v, i) ((v)->counter = (i)) /* * atomic_add - add integer to atomic variable @@ -137,7 +138,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -180,7 +181,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -189,7 +190,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -232,7 +233,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -249,7 +250,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -301,13 +302,13 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } -#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) -#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) +#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) +#define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) /** * atomic_add_unless - add unless the number is a given value @@ -318,18 +319,24 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) * Atomically adds @a to @v, so long as it was not @u. * Returns non-zero if @v was not @u, and zero otherwise. */ -#define atomic_add_unless(v, a, u) \ -({ \ - int c, old; \ - c = atomic_read(v); \ - while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ - c = old; \ - c != (u); \ -}) +static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) +{ + int c, old; + c = atomic_read(v); + for (;;) { + if (unlikely(c == (u))) + break; + old = atomic_cmpxchg((v), c, c + (a)); + if (likely(old == c)) + break; + c = old; + } + return c != (u); +} #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) -#define atomic_dec_return(v) atomic_sub_return(1,(v)) -#define atomic_inc_return(v) atomic_add_return(1,(v)) +#define atomic_dec_return(v) atomic_sub_return(1, (v)) +#define atomic_inc_return(v) atomic_add_return(1, (v)) /* * atomic_sub_and_test - subtract value from variable and test result @@ -340,7 +347,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) * true if the result is zero, or false for all * other cases. */ -#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) +#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) /* * atomic_inc_and_test - increment and test @@ -374,7 +381,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) * * Atomically increments @v by 1. */ -#define atomic_inc(v) atomic_add(1,(v)) +#define atomic_inc(v) atomic_add(1, (v)) /* * atomic_dec - decrement and test @@ -382,7 +389,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) * * Atomically decrements @v by 1. */ -#define atomic_dec(v) atomic_sub(1,(v)) +#define atomic_dec(v) atomic_sub(1, (v)) /* * atomic_add_negative - add and test if negative @@ -393,7 +400,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) * if the result is negative, or false when * result is greater than or equal to zero. */ -#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0) +#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0) #ifdef CONFIG_64BIT @@ -413,7 +420,7 @@ typedef struct { volatile long counter; } atomic64_t; * @v: pointer of type atomic64_t * @i: required value */ -#define atomic64_set(v,i) ((v)->counter = (i)) +#define atomic64_set(v, i) ((v)->counter = (i)) /* * atomic64_add - add integer to atomic variable @@ -512,7 +519,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -555,7 +562,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -564,7 +571,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -607,7 +614,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -624,7 +631,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -676,13 +683,43 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } -#define atomic64_dec_return(v) atomic64_sub_return(1,(v)) -#define atomic64_inc_return(v) atomic64_add_return(1,(v)) +#define atomic64_cmpxchg(v, o, n) \ + ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) +#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new))) + +/** + * atomic64_add_unless - add unless the number is a given value + * @v: pointer of type atomic64_t + * @a: the amount to add to v... + * @u: ...unless v is equal to u. + * + * Atomically adds @a to @v, so long as it was not @u. + * Returns non-zero if @v was not @u, and zero otherwise. + */ +static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) +{ + long c, old; + c = atomic64_read(v); + for (;;) { + if (unlikely(c == (u))) + break; + old = atomic64_cmpxchg((v), c, c + (a)); + if (likely(old == c)) + break; + c = old; + } + return c != (u); +} + +#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) + +#define atomic64_dec_return(v) atomic64_sub_return(1, (v)) +#define atomic64_inc_return(v) atomic64_add_return(1, (v)) /* * atomic64_sub_and_test - subtract value from variable and test result @@ -693,7 +730,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) * true if the result is zero, or false for all * other cases. */ -#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0) +#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) /* * atomic64_inc_and_test - increment and test @@ -727,7 +764,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) * * Atomically increments @v by 1. */ -#define atomic64_inc(v) atomic64_add(1,(v)) +#define atomic64_inc(v) atomic64_add(1, (v)) /* * atomic64_dec - decrement and test @@ -735,7 +772,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) * * Atomically decrements @v by 1. */ -#define atomic64_dec(v) atomic64_sub(1,(v)) +#define atomic64_dec(v) atomic64_sub(1, (v)) /* * atomic64_add_negative - add and test if negative @@ -746,7 +783,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) * if the result is negative, or false when * result is greater than or equal to zero. */ -#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0) +#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0) #endif /* CONFIG_64BIT */ @@ -754,10 +791,11 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) * atomic*_return operations are serializing but not the non-*_return * versions. */ -#define smp_mb__before_atomic_dec() smp_mb() -#define smp_mb__after_atomic_dec() smp_mb() -#define smp_mb__before_atomic_inc() smp_mb() -#define smp_mb__after_atomic_inc() smp_mb() +#define smp_mb__before_atomic_dec() smp_llsc_mb() +#define smp_mb__after_atomic_dec() smp_llsc_mb() +#define smp_mb__before_atomic_inc() smp_llsc_mb() +#define smp_mb__after_atomic_inc() smp_llsc_mb() #include + #endif /* _ASM_ATOMIC_H */