]> pilppa.org Git - linux-2.6-omap-h63xx.git/blob - include/asm-mips/atomic.h
drm: merge in Linus mainline
[linux-2.6-omap-h63xx.git] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13  */
14
15 /*
16  * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17  * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18  * main big wrapper ...
19  */
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
22
23 #ifndef _ASM_ATOMIC_H
24 #define _ASM_ATOMIC_H
25
26 #include <asm/cpu-features.h>
27 #include <asm/war.h>
28
29 extern spinlock_t atomic_lock;
30
31 typedef struct { volatile int counter; } atomic_t;
32
33 #define ATOMIC_INIT(i)    { (i) }
34
35 /*
36  * atomic_read - read atomic variable
37  * @v: pointer of type atomic_t
38  *
39  * Atomically reads the value of @v.
40  */
41 #define atomic_read(v)          ((v)->counter)
42
43 /*
44  * atomic_set - set atomic variable
45  * @v: pointer of type atomic_t
46  * @i: required value
47  *
48  * Atomically sets the value of @v to @i.
49  */
50 #define atomic_set(v,i)         ((v)->counter = (i))
51
52 /*
53  * atomic_add - add integer to atomic variable
54  * @i: integer value to add
55  * @v: pointer of type atomic_t
56  *
57  * Atomically adds @i to @v.
58  */
59 static __inline__ void atomic_add(int i, atomic_t * v)
60 {
61         if (cpu_has_llsc && R10000_LLSC_WAR) {
62                 unsigned long temp;
63
64                 __asm__ __volatile__(
65                 "       .set    mips3                                   \n"
66                 "1:     ll      %0, %1          # atomic_add            \n"
67                 "       addu    %0, %2                                  \n"
68                 "       sc      %0, %1                                  \n"
69                 "       beqzl   %0, 1b                                  \n"
70                 "       .set    mips0                                   \n"
71                 : "=&r" (temp), "=m" (v->counter)
72                 : "Ir" (i), "m" (v->counter));
73         } else if (cpu_has_llsc) {
74                 unsigned long temp;
75
76                 __asm__ __volatile__(
77                 "       .set    mips3                                   \n"
78                 "1:     ll      %0, %1          # atomic_add            \n"
79                 "       addu    %0, %2                                  \n"
80                 "       sc      %0, %1                                  \n"
81                 "       beqz    %0, 1b                                  \n"
82                 "       .set    mips0                                   \n"
83                 : "=&r" (temp), "=m" (v->counter)
84                 : "Ir" (i), "m" (v->counter));
85         } else {
86                 unsigned long flags;
87
88                 spin_lock_irqsave(&atomic_lock, flags);
89                 v->counter += i;
90                 spin_unlock_irqrestore(&atomic_lock, flags);
91         }
92 }
93
94 /*
95  * atomic_sub - subtract the atomic variable
96  * @i: integer value to subtract
97  * @v: pointer of type atomic_t
98  *
99  * Atomically subtracts @i from @v.
100  */
101 static __inline__ void atomic_sub(int i, atomic_t * v)
102 {
103         if (cpu_has_llsc && R10000_LLSC_WAR) {
104                 unsigned long temp;
105
106                 __asm__ __volatile__(
107                 "       .set    mips3                                   \n"
108                 "1:     ll      %0, %1          # atomic_sub            \n"
109                 "       subu    %0, %2                                  \n"
110                 "       sc      %0, %1                                  \n"
111                 "       beqzl   %0, 1b                                  \n"
112                 "       .set    mips0                                   \n"
113                 : "=&r" (temp), "=m" (v->counter)
114                 : "Ir" (i), "m" (v->counter));
115         } else if (cpu_has_llsc) {
116                 unsigned long temp;
117
118                 __asm__ __volatile__(
119                 "       .set    mips3                                   \n"
120                 "1:     ll      %0, %1          # atomic_sub            \n"
121                 "       subu    %0, %2                                  \n"
122                 "       sc      %0, %1                                  \n"
123                 "       beqz    %0, 1b                                  \n"
124                 "       .set    mips0                                   \n"
125                 : "=&r" (temp), "=m" (v->counter)
126                 : "Ir" (i), "m" (v->counter));
127         } else {
128                 unsigned long flags;
129
130                 spin_lock_irqsave(&atomic_lock, flags);
131                 v->counter -= i;
132                 spin_unlock_irqrestore(&atomic_lock, flags);
133         }
134 }
135
136 /*
137  * Same as above, but return the result value
138  */
139 static __inline__ int atomic_add_return(int i, atomic_t * v)
140 {
141         unsigned long result;
142
143         if (cpu_has_llsc && R10000_LLSC_WAR) {
144                 unsigned long temp;
145
146                 __asm__ __volatile__(
147                 "       .set    mips3                                   \n"
148                 "1:     ll      %1, %2          # atomic_add_return     \n"
149                 "       addu    %0, %1, %3                              \n"
150                 "       sc      %0, %2                                  \n"
151                 "       beqzl   %0, 1b                                  \n"
152                 "       addu    %0, %1, %3                              \n"
153                 "       sync                                            \n"
154                 "       .set    mips0                                   \n"
155                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
156                 : "Ir" (i), "m" (v->counter)
157                 : "memory");
158         } else if (cpu_has_llsc) {
159                 unsigned long temp;
160
161                 __asm__ __volatile__(
162                 "       .set    mips3                                   \n"
163                 "1:     ll      %1, %2          # atomic_add_return     \n"
164                 "       addu    %0, %1, %3                              \n"
165                 "       sc      %0, %2                                  \n"
166                 "       beqz    %0, 1b                                  \n"
167                 "       addu    %0, %1, %3                              \n"
168                 "       sync                                            \n"
169                 "       .set    mips0                                   \n"
170                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171                 : "Ir" (i), "m" (v->counter)
172                 : "memory");
173         } else {
174                 unsigned long flags;
175
176                 spin_lock_irqsave(&atomic_lock, flags);
177                 result = v->counter;
178                 result += i;
179                 v->counter = result;
180                 spin_unlock_irqrestore(&atomic_lock, flags);
181         }
182
183         return result;
184 }
185
186 static __inline__ int atomic_sub_return(int i, atomic_t * v)
187 {
188         unsigned long result;
189
190         if (cpu_has_llsc && R10000_LLSC_WAR) {
191                 unsigned long temp;
192
193                 __asm__ __volatile__(
194                 "       .set    mips3                                   \n"
195                 "1:     ll      %1, %2          # atomic_sub_return     \n"
196                 "       subu    %0, %1, %3                              \n"
197                 "       sc      %0, %2                                  \n"
198                 "       beqzl   %0, 1b                                  \n"
199                 "       subu    %0, %1, %3                              \n"
200                 "       sync                                            \n"
201                 "       .set    mips0                                   \n"
202                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
203                 : "Ir" (i), "m" (v->counter)
204                 : "memory");
205         } else if (cpu_has_llsc) {
206                 unsigned long temp;
207
208                 __asm__ __volatile__(
209                 "       .set    mips3                                   \n"
210                 "1:     ll      %1, %2          # atomic_sub_return     \n"
211                 "       subu    %0, %1, %3                              \n"
212                 "       sc      %0, %2                                  \n"
213                 "       beqz    %0, 1b                                  \n"
214                 "       subu    %0, %1, %3                              \n"
215                 "       sync                                            \n"
216                 "       .set    mips0                                   \n"
217                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
218                 : "Ir" (i), "m" (v->counter)
219                 : "memory");
220         } else {
221                 unsigned long flags;
222
223                 spin_lock_irqsave(&atomic_lock, flags);
224                 result = v->counter;
225                 result -= i;
226                 v->counter = result;
227                 spin_unlock_irqrestore(&atomic_lock, flags);
228         }
229
230         return result;
231 }
232
233 /*
234  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
235  * @i: integer value to subtract
236  * @v: pointer of type atomic_t
237  *
238  * Atomically test @v and subtract @i if @v is greater or equal than @i.
239  * The function returns the old value of @v minus @i.
240  */
241 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
242 {
243         unsigned long result;
244
245         if (cpu_has_llsc && R10000_LLSC_WAR) {
246                 unsigned long temp;
247
248                 __asm__ __volatile__(
249                 "       .set    mips3                                   \n"
250                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
251                 "       subu    %0, %1, %3                              \n"
252                 "       bltz    %0, 1f                                  \n"
253                 "       sc      %0, %2                                  \n"
254                 "       beqzl   %0, 1b                                  \n"
255                 "       sync                                            \n"
256                 "1:                                                     \n"
257                 "       .set    mips0                                   \n"
258                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
259                 : "Ir" (i), "m" (v->counter)
260                 : "memory");
261         } else if (cpu_has_llsc) {
262                 unsigned long temp;
263
264                 __asm__ __volatile__(
265                 "       .set    mips3                                   \n"
266                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
267                 "       subu    %0, %1, %3                              \n"
268                 "       bltz    %0, 1f                                  \n"
269                 "       sc      %0, %2                                  \n"
270                 "       beqz    %0, 1b                                  \n"
271                 "       sync                                            \n"
272                 "1:                                                     \n"
273                 "       .set    mips0                                   \n"
274                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
275                 : "Ir" (i), "m" (v->counter)
276                 : "memory");
277         } else {
278                 unsigned long flags;
279
280                 spin_lock_irqsave(&atomic_lock, flags);
281                 result = v->counter;
282                 result -= i;
283                 if (result >= 0)
284                         v->counter = result;
285                 spin_unlock_irqrestore(&atomic_lock, flags);
286         }
287
288         return result;
289 }
290
291 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
292
293 /**
294  * atomic_add_unless - add unless the number is a given value
295  * @v: pointer of type atomic_t
296  * @a: the amount to add to v...
297  * @u: ...unless v is equal to u.
298  *
299  * Atomically adds @a to @v, so long as it was not @u.
300  * Returns non-zero if @v was not @u, and zero otherwise.
301  */
302 #define atomic_add_unless(v, a, u)                              \
303 ({                                                              \
304         int c, old;                                             \
305         c = atomic_read(v);                                     \
306         while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
307                 c = old;                                        \
308         c != (u);                                               \
309 })
310 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
311
312 #define atomic_dec_return(v) atomic_sub_return(1,(v))
313 #define atomic_inc_return(v) atomic_add_return(1,(v))
314
315 /*
316  * atomic_sub_and_test - subtract value from variable and test result
317  * @i: integer value to subtract
318  * @v: pointer of type atomic_t
319  *
320  * Atomically subtracts @i from @v and returns
321  * true if the result is zero, or false for all
322  * other cases.
323  */
324 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
325
326 /*
327  * atomic_inc_and_test - increment and test
328  * @v: pointer of type atomic_t
329  *
330  * Atomically increments @v by 1
331  * and returns true if the result is zero, or false for all
332  * other cases.
333  */
334 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
335
336 /*
337  * atomic_dec_and_test - decrement by 1 and test
338  * @v: pointer of type atomic_t
339  *
340  * Atomically decrements @v by 1 and
341  * returns true if the result is 0, or false for all other
342  * cases.
343  */
344 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
345
346 /*
347  * atomic_dec_if_positive - decrement by 1 if old value positive
348  * @v: pointer of type atomic_t
349  */
350 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
351
352 /*
353  * atomic_inc - increment atomic variable
354  * @v: pointer of type atomic_t
355  *
356  * Atomically increments @v by 1.
357  */
358 #define atomic_inc(v) atomic_add(1,(v))
359
360 /*
361  * atomic_dec - decrement and test
362  * @v: pointer of type atomic_t
363  *
364  * Atomically decrements @v by 1.
365  */
366 #define atomic_dec(v) atomic_sub(1,(v))
367
368 /*
369  * atomic_add_negative - add and test if negative
370  * @v: pointer of type atomic_t
371  * @i: integer value to add
372  *
373  * Atomically adds @i to @v and returns true
374  * if the result is negative, or false when
375  * result is greater than or equal to zero.
376  */
377 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
378
379 #ifdef CONFIG_64BIT
380
381 typedef struct { volatile __s64 counter; } atomic64_t;
382
383 #define ATOMIC64_INIT(i)    { (i) }
384
385 /*
386  * atomic64_read - read atomic variable
387  * @v: pointer of type atomic64_t
388  *
389  */
390 #define atomic64_read(v)        ((v)->counter)
391
392 /*
393  * atomic64_set - set atomic variable
394  * @v: pointer of type atomic64_t
395  * @i: required value
396  */
397 #define atomic64_set(v,i)       ((v)->counter = (i))
398
399 /*
400  * atomic64_add - add integer to atomic variable
401  * @i: integer value to add
402  * @v: pointer of type atomic64_t
403  *
404  * Atomically adds @i to @v.
405  */
406 static __inline__ void atomic64_add(long i, atomic64_t * v)
407 {
408         if (cpu_has_llsc && R10000_LLSC_WAR) {
409                 unsigned long temp;
410
411                 __asm__ __volatile__(
412                 "       .set    mips3                                   \n"
413                 "1:     lld     %0, %1          # atomic64_add          \n"
414                 "       addu    %0, %2                                  \n"
415                 "       scd     %0, %1                                  \n"
416                 "       beqzl   %0, 1b                                  \n"
417                 "       .set    mips0                                   \n"
418                 : "=&r" (temp), "=m" (v->counter)
419                 : "Ir" (i), "m" (v->counter));
420         } else if (cpu_has_llsc) {
421                 unsigned long temp;
422
423                 __asm__ __volatile__(
424                 "       .set    mips3                                   \n"
425                 "1:     lld     %0, %1          # atomic64_add          \n"
426                 "       addu    %0, %2                                  \n"
427                 "       scd     %0, %1                                  \n"
428                 "       beqz    %0, 1b                                  \n"
429                 "       .set    mips0                                   \n"
430                 : "=&r" (temp), "=m" (v->counter)
431                 : "Ir" (i), "m" (v->counter));
432         } else {
433                 unsigned long flags;
434
435                 spin_lock_irqsave(&atomic_lock, flags);
436                 v->counter += i;
437                 spin_unlock_irqrestore(&atomic_lock, flags);
438         }
439 }
440
441 /*
442  * atomic64_sub - subtract the atomic variable
443  * @i: integer value to subtract
444  * @v: pointer of type atomic64_t
445  *
446  * Atomically subtracts @i from @v.
447  */
448 static __inline__ void atomic64_sub(long i, atomic64_t * v)
449 {
450         if (cpu_has_llsc && R10000_LLSC_WAR) {
451                 unsigned long temp;
452
453                 __asm__ __volatile__(
454                 "       .set    mips3                                   \n"
455                 "1:     lld     %0, %1          # atomic64_sub          \n"
456                 "       subu    %0, %2                                  \n"
457                 "       scd     %0, %1                                  \n"
458                 "       beqzl   %0, 1b                                  \n"
459                 "       .set    mips0                                   \n"
460                 : "=&r" (temp), "=m" (v->counter)
461                 : "Ir" (i), "m" (v->counter));
462         } else if (cpu_has_llsc) {
463                 unsigned long temp;
464
465                 __asm__ __volatile__(
466                 "       .set    mips3                                   \n"
467                 "1:     lld     %0, %1          # atomic64_sub          \n"
468                 "       subu    %0, %2                                  \n"
469                 "       scd     %0, %1                                  \n"
470                 "       beqz    %0, 1b                                  \n"
471                 "       .set    mips0                                   \n"
472                 : "=&r" (temp), "=m" (v->counter)
473                 : "Ir" (i), "m" (v->counter));
474         } else {
475                 unsigned long flags;
476
477                 spin_lock_irqsave(&atomic_lock, flags);
478                 v->counter -= i;
479                 spin_unlock_irqrestore(&atomic_lock, flags);
480         }
481 }
482
483 /*
484  * Same as above, but return the result value
485  */
486 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
487 {
488         unsigned long result;
489
490         if (cpu_has_llsc && R10000_LLSC_WAR) {
491                 unsigned long temp;
492
493                 __asm__ __volatile__(
494                 "       .set    mips3                                   \n"
495                 "1:     lld     %1, %2          # atomic64_add_return   \n"
496                 "       addu    %0, %1, %3                              \n"
497                 "       scd     %0, %2                                  \n"
498                 "       beqzl   %0, 1b                                  \n"
499                 "       addu    %0, %1, %3                              \n"
500                 "       sync                                            \n"
501                 "       .set    mips0                                   \n"
502                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
503                 : "Ir" (i), "m" (v->counter)
504                 : "memory");
505         } else if (cpu_has_llsc) {
506                 unsigned long temp;
507
508                 __asm__ __volatile__(
509                 "       .set    mips3                                   \n"
510                 "1:     lld     %1, %2          # atomic64_add_return   \n"
511                 "       addu    %0, %1, %3                              \n"
512                 "       scd     %0, %2                                  \n"
513                 "       beqz    %0, 1b                                  \n"
514                 "       addu    %0, %1, %3                              \n"
515                 "       sync                                            \n"
516                 "       .set    mips0                                   \n"
517                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
518                 : "Ir" (i), "m" (v->counter)
519                 : "memory");
520         } else {
521                 unsigned long flags;
522
523                 spin_lock_irqsave(&atomic_lock, flags);
524                 result = v->counter;
525                 result += i;
526                 v->counter = result;
527                 spin_unlock_irqrestore(&atomic_lock, flags);
528         }
529
530         return result;
531 }
532
533 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
534 {
535         unsigned long result;
536
537         if (cpu_has_llsc && R10000_LLSC_WAR) {
538                 unsigned long temp;
539
540                 __asm__ __volatile__(
541                 "       .set    mips3                                   \n"
542                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
543                 "       subu    %0, %1, %3                              \n"
544                 "       scd     %0, %2                                  \n"
545                 "       beqzl   %0, 1b                                  \n"
546                 "       subu    %0, %1, %3                              \n"
547                 "       sync                                            \n"
548                 "       .set    mips0                                   \n"
549                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
550                 : "Ir" (i), "m" (v->counter)
551                 : "memory");
552         } else if (cpu_has_llsc) {
553                 unsigned long temp;
554
555                 __asm__ __volatile__(
556                 "       .set    mips3                                   \n"
557                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
558                 "       subu    %0, %1, %3                              \n"
559                 "       scd     %0, %2                                  \n"
560                 "       beqz    %0, 1b                                  \n"
561                 "       subu    %0, %1, %3                              \n"
562                 "       sync                                            \n"
563                 "       .set    mips0                                   \n"
564                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
565                 : "Ir" (i), "m" (v->counter)
566                 : "memory");
567         } else {
568                 unsigned long flags;
569
570                 spin_lock_irqsave(&atomic_lock, flags);
571                 result = v->counter;
572                 result -= i;
573                 v->counter = result;
574                 spin_unlock_irqrestore(&atomic_lock, flags);
575         }
576
577         return result;
578 }
579
580 /*
581  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
582  * @i: integer value to subtract
583  * @v: pointer of type atomic64_t
584  *
585  * Atomically test @v and subtract @i if @v is greater or equal than @i.
586  * The function returns the old value of @v minus @i.
587  */
588 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
589 {
590         unsigned long result;
591
592         if (cpu_has_llsc && R10000_LLSC_WAR) {
593                 unsigned long temp;
594
595                 __asm__ __volatile__(
596                 "       .set    mips3                                   \n"
597                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
598                 "       dsubu   %0, %1, %3                              \n"
599                 "       bltz    %0, 1f                                  \n"
600                 "       scd     %0, %2                                  \n"
601                 "       beqzl   %0, 1b                                  \n"
602                 "       sync                                            \n"
603                 "1:                                                     \n"
604                 "       .set    mips0                                   \n"
605                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
606                 : "Ir" (i), "m" (v->counter)
607                 : "memory");
608         } else if (cpu_has_llsc) {
609                 unsigned long temp;
610
611                 __asm__ __volatile__(
612                 "       .set    mips3                                   \n"
613                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
614                 "       dsubu   %0, %1, %3                              \n"
615                 "       bltz    %0, 1f                                  \n"
616                 "       scd     %0, %2                                  \n"
617                 "       beqz    %0, 1b                                  \n"
618                 "       sync                                            \n"
619                 "1:                                                     \n"
620                 "       .set    mips0                                   \n"
621                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
622                 : "Ir" (i), "m" (v->counter)
623                 : "memory");
624         } else {
625                 unsigned long flags;
626
627                 spin_lock_irqsave(&atomic_lock, flags);
628                 result = v->counter;
629                 result -= i;
630                 if (result >= 0)
631                         v->counter = result;
632                 spin_unlock_irqrestore(&atomic_lock, flags);
633         }
634
635         return result;
636 }
637
638 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
639 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
640
641 /*
642  * atomic64_sub_and_test - subtract value from variable and test result
643  * @i: integer value to subtract
644  * @v: pointer of type atomic64_t
645  *
646  * Atomically subtracts @i from @v and returns
647  * true if the result is zero, or false for all
648  * other cases.
649  */
650 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
651
652 /*
653  * atomic64_inc_and_test - increment and test
654  * @v: pointer of type atomic64_t
655  *
656  * Atomically increments @v by 1
657  * and returns true if the result is zero, or false for all
658  * other cases.
659  */
660 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
661
662 /*
663  * atomic64_dec_and_test - decrement by 1 and test
664  * @v: pointer of type atomic64_t
665  *
666  * Atomically decrements @v by 1 and
667  * returns true if the result is 0, or false for all other
668  * cases.
669  */
670 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
671
672 /*
673  * atomic64_dec_if_positive - decrement by 1 if old value positive
674  * @v: pointer of type atomic64_t
675  */
676 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
677
678 /*
679  * atomic64_inc - increment atomic variable
680  * @v: pointer of type atomic64_t
681  *
682  * Atomically increments @v by 1.
683  */
684 #define atomic64_inc(v) atomic64_add(1,(v))
685
686 /*
687  * atomic64_dec - decrement and test
688  * @v: pointer of type atomic64_t
689  *
690  * Atomically decrements @v by 1.
691  */
692 #define atomic64_dec(v) atomic64_sub(1,(v))
693
694 /*
695  * atomic64_add_negative - add and test if negative
696  * @v: pointer of type atomic64_t
697  * @i: integer value to add
698  *
699  * Atomically adds @i to @v and returns true
700  * if the result is negative, or false when
701  * result is greater than or equal to zero.
702  */
703 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
704
705 #endif /* CONFIG_64BIT */
706
707 /*
708  * atomic*_return operations are serializing but not the non-*_return
709  * versions.
710  */
711 #define smp_mb__before_atomic_dec()     smp_mb()
712 #define smp_mb__after_atomic_dec()      smp_mb()
713 #define smp_mb__before_atomic_inc()     smp_mb()
714 #define smp_mb__after_atomic_inc()      smp_mb()
715
716 #endif /* _ASM_ATOMIC_H */