X-Git-Url: http://pilppa.org/gitweb/gitweb.cgi?a=blobdiff_plain;f=include%2Fasm-frv%2Fbitops.h;h=39456ba0ec17c7156c5a7fef5baed6cbf60f74e2;hb=526e5ab200ce483dcdf146806f4936bd58daa800;hp=8dba74b1a2549e10a520ae9766151f4e613c77a6;hpb=70180659a479b55387eca8cc1fa7024ba8410b14;p=linux-2.6-omap-h63xx.git diff --git a/include/asm-frv/bitops.h b/include/asm-frv/bitops.h index 8dba74b1a25..39456ba0ec1 100644 --- a/include/asm-frv/bitops.h +++ b/include/asm-frv/bitops.h @@ -1,7 +1,7 @@ /* bitops.h: bit operations for the Fujitsu FR-V CPUs * * For an explanation of how atomic ops work in this arch, see: - * Documentation/fujitsu/frv/atomic-ops.txt + * Documentation/frv/atomic-ops.txt * * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved. * Written by David Howells (dhowells@redhat.com) @@ -16,11 +16,13 @@ #include #include -#include -#include #ifdef __KERNEL__ +#ifndef _LINUX_BITOPS_H +#error only can be included directly +#endif + #include /* @@ -29,6 +31,87 @@ #define smp_mb__before_clear_bit() barrier() #define smp_mb__after_clear_bit() barrier() +#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS +static inline +unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v) +{ + unsigned long old, tmp; + + asm volatile( + "0: \n" + " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */ + " ckeq icc3,cc7 \n" + " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */ + " orcr cc7,cc7,cc3 \n" /* set CC3 to true */ + " and%I3 %1,%3,%2 \n" + " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */ + " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */ + " beq icc3,#0,0b \n" + : "+U"(*v), "=&r"(old), "=r"(tmp) + : "NPr"(~mask) + : "memory", "cc7", "cc3", "icc3" + ); + + return old; +} + +static inline +unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v) +{ + unsigned long old, tmp; + + asm volatile( + "0: \n" + " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */ + " ckeq icc3,cc7 \n" + " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */ + " orcr cc7,cc7,cc3 \n" /* set CC3 to true */ + " or%I3 %1,%3,%2 \n" + " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */ + " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */ + " beq icc3,#0,0b \n" + : "+U"(*v), "=&r"(old), "=r"(tmp) + : "NPr"(mask) + : "memory", "cc7", "cc3", "icc3" + ); + + return old; +} + +static inline +unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v) +{ + unsigned long old, tmp; + + asm volatile( + "0: \n" + " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */ + " ckeq icc3,cc7 \n" + " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */ + " orcr cc7,cc7,cc3 \n" /* set CC3 to true */ + " xor%I3 %1,%3,%2 \n" + " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */ + " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */ + " beq icc3,#0,0b \n" + : "+U"(*v), "=&r"(old), "=r"(tmp) + : "NPr"(mask) + : "memory", "cc7", "cc3", "icc3" + ); + + return old; +} + +#else + +extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v); +extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v); +extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v); + +#endif + +#define atomic_clear_mask(mask, v) atomic_test_and_ANDNOT_mask((mask), (v)) +#define atomic_set_mask(mask, v) atomic_test_and_OR_mask((mask), (v)) + static inline int test_and_clear_bit(int nr, volatile void *addr) { volatile unsigned long *ptr = addr;