X-Git-Url: http://pilppa.org/gitweb/gitweb.cgi?a=blobdiff_plain;f=include%2Fasm-sparc64%2Fatomic.h;h=2c71ec4a3b180930f53bcbec979527d3f28d3f97;hb=e00fc542eb694e448249361ef63c190e74f53574;hp=468eb48d814246e0b793b900d459b0ad51422cd6;hpb=ae574a5d7aa1d80469dfcbaa757db2bea536ee66;p=linux-2.6-omap-h63xx.git diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h index 468eb48d814..2c71ec4a3b1 100644 --- a/include/asm-sparc64/atomic.h +++ b/include/asm-sparc64/atomic.h @@ -1,5 +1,4 @@ -/* $Id: atomic.h,v 1.22 2001/07/11 23:56:07 davem Exp $ - * atomic.h: Thankfully the V9 is at least reasonable for this +/* atomic.h: Thankfully the V9 is at least reasonable for this * stuff. * * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com) @@ -8,8 +7,8 @@ #ifndef __ARCH_SPARC64_ATOMIC__ #define __ARCH_SPARC64_ATOMIC__ -#include #include +#include typedef struct { volatile int counter; } atomic_t; typedef struct { volatile __s64 counter; } atomic64_t; @@ -71,25 +70,47 @@ extern int atomic64_sub_ret(int, atomic64_t *); #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0) #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0) -#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) +#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) -#define atomic_add_unless(v, a, u) \ -({ \ - int c, old; \ - c = atomic_read(v); \ - for (;;) { \ - if (unlikely(c == (u))) \ - break; \ - old = atomic_cmpxchg((v), c, c + (a)); \ - if (likely(old == c)) \ - break; \ - c = old; \ - } \ - likely(c != (u)); \ -}) +static inline int atomic_add_unless(atomic_t *v, int a, int u) +{ + int c, old; + c = atomic_read(v); + for (;;) { + if (unlikely(c == (u))) + break; + old = atomic_cmpxchg((v), c, c + (a)); + if (likely(old == c)) + break; + c = old; + } + return c != (u); +} + #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) +#define atomic64_cmpxchg(v, o, n) \ + ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) +#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) + +static inline int atomic64_add_unless(atomic64_t *v, long a, long u) +{ + long c, old; + c = atomic64_read(v); + for (;;) { + if (unlikely(c == (u))) + break; + old = atomic64_cmpxchg((v), c, c + (a)); + if (likely(old == c)) + break; + c = old; + } + return c != (u); +} + +#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) + /* Atomic operations are already serializing */ #ifdef CONFIG_SMP #define smp_mb__before_atomic_dec() membar_storeload_loadload();