]> pilppa.org Git - linux-2.6-omap-h63xx.git/blobdiff - include/asm-m32r/atomic.h
[SPARC64]: Optimized TSB table initialization.
[linux-2.6-omap-h63xx.git] / include / asm-m32r / atomic.h
index bfff69a49936abdaae3b3d07dea3a8843cf6e39f..3122fe106f051b32a6bc1bebfeffbf7b92d23371 100644 (file)
@@ -242,6 +242,28 @@ static __inline__ int atomic_dec_return(atomic_t *v)
  */
 #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
 
+#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
+/**
+ * atomic_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 static __inline__ void atomic_clear_mask(unsigned long  mask, atomic_t *addr)
 {
        unsigned long flags;
@@ -292,4 +314,5 @@ static __inline__ void atomic_set_mask(unsigned long  mask, atomic_t *addr)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
+#include <asm-generic/atomic.h>
 #endif /* _ASM_M32R_ATOMIC_H */