X-Git-Url: http://pilppa.org/gitweb/gitweb.cgi?a=blobdiff_plain;f=include%2Fasm-mips%2Fatomic.h;h=a798d6299a7927ee22d706e9a46c10f4ed9a73a0;hb=ade8c56cbd02020fecbe1684f181250a466685eb;hp=62daa746a9c9180101af1e566635778c36d1d269;hpb=f697b677620d04d8c77841745727de85f7e948b1;p=linux-2.6-omap-h63xx.git diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index 62daa746a9c..a798d6299a7 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h @@ -39,7 +39,7 @@ typedef struct { volatile int counter; } atomic_t; * * Atomically sets the value of @v to @i. */ -#define atomic_set(v,i) ((v)->counter = (i)) +#define atomic_set(v, i) ((v)->counter = (i)) /* * atomic_add - add integer to atomic variable @@ -138,7 +138,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -181,7 +181,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -190,7 +190,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -233,7 +233,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -250,7 +250,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -302,7 +302,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -335,8 +335,8 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) } #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) -#define atomic_dec_return(v) atomic_sub_return(1,(v)) -#define atomic_inc_return(v) atomic_add_return(1,(v)) +#define atomic_dec_return(v) atomic_sub_return(1, (v)) +#define atomic_inc_return(v) atomic_add_return(1, (v)) /* * atomic_sub_and_test - subtract value from variable and test result @@ -347,7 +347,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) * true if the result is zero, or false for all * other cases. */ -#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) +#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) /* * atomic_inc_and_test - increment and test @@ -381,7 +381,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) * * Atomically increments @v by 1. */ -#define atomic_inc(v) atomic_add(1,(v)) +#define atomic_inc(v) atomic_add(1, (v)) /* * atomic_dec - decrement and test @@ -389,7 +389,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) * * Atomically decrements @v by 1. */ -#define atomic_dec(v) atomic_sub(1,(v)) +#define atomic_dec(v) atomic_sub(1, (v)) /* * atomic_add_negative - add and test if negative @@ -400,7 +400,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) * if the result is negative, or false when * result is greater than or equal to zero. */ -#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0) +#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0) #ifdef CONFIG_64BIT @@ -420,7 +420,7 @@ typedef struct { volatile long counter; } atomic64_t; * @v: pointer of type atomic64_t * @i: required value */ -#define atomic64_set(v,i) ((v)->counter = (i)) +#define atomic64_set(v, i) ((v)->counter = (i)) /* * atomic64_add - add integer to atomic variable @@ -519,7 +519,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -562,7 +562,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -571,7 +571,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -614,7 +614,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } @@ -631,7 +631,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { unsigned long result; - smp_mb(); + smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; @@ -683,13 +683,13 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) raw_local_irq_restore(flags); } - smp_mb(); + smp_llsc_mb(); return result; } #define atomic64_cmpxchg(v, o, n) \ - (((__typeof__((v)->counter)))cmpxchg(&((v)->counter), (o), (n))) + ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new))) /** @@ -718,8 +718,8 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) -#define atomic64_dec_return(v) atomic64_sub_return(1,(v)) -#define atomic64_inc_return(v) atomic64_add_return(1,(v)) +#define atomic64_dec_return(v) atomic64_sub_return(1, (v)) +#define atomic64_inc_return(v) atomic64_add_return(1, (v)) /* * atomic64_sub_and_test - subtract value from variable and test result @@ -730,7 +730,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * true if the result is zero, or false for all * other cases. */ -#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0) +#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) /* * atomic64_inc_and_test - increment and test @@ -764,7 +764,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * * Atomically increments @v by 1. */ -#define atomic64_inc(v) atomic64_add(1,(v)) +#define atomic64_inc(v) atomic64_add(1, (v)) /* * atomic64_dec - decrement and test @@ -772,7 +772,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * * Atomically decrements @v by 1. */ -#define atomic64_dec(v) atomic64_sub(1,(v)) +#define atomic64_dec(v) atomic64_sub(1, (v)) /* * atomic64_add_negative - add and test if negative @@ -783,7 +783,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * if the result is negative, or false when * result is greater than or equal to zero. */ -#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0) +#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0) #endif /* CONFIG_64BIT */ @@ -791,10 +791,11 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * atomic*_return operations are serializing but not the non-*_return * versions. */ -#define smp_mb__before_atomic_dec() smp_mb() -#define smp_mb__after_atomic_dec() smp_mb() -#define smp_mb__before_atomic_inc() smp_mb() -#define smp_mb__after_atomic_inc() smp_mb() +#define smp_mb__before_atomic_dec() smp_llsc_mb() +#define smp_mb__after_atomic_dec() smp_llsc_mb() +#define smp_mb__before_atomic_inc() smp_llsc_mb() +#define smp_mb__after_atomic_inc() smp_llsc_mb() #include + #endif /* _ASM_ATOMIC_H */