Merge to Fedora kernel-2.6.18-1.2224_FC5 patched with stable patch-2.6.18.1-vs2.0...
[linux-2.6.git] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13  */
14
15 /*
16  * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17  * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18  * main big wrapper ...
19  */
20 #include <linux/spinlock.h>
21
22 #ifndef _ASM_ATOMIC_H
23 #define _ASM_ATOMIC_H
24
25 #include <linux/irqflags.h>
26 #include <asm/cpu-features.h>
27 #include <asm/war.h>
28
29 typedef struct { volatile int counter; } atomic_t;
30
31 #define ATOMIC_INIT(i)    { (i) }
32
33 /*
34  * atomic_read - read atomic variable
35  * @v: pointer of type atomic_t
36  *
37  * Atomically reads the value of @v.
38  */
39 #define atomic_read(v)          ((v)->counter)
40
41 /*
42  * atomic_set - set atomic variable
43  * @v: pointer of type atomic_t
44  * @i: required value
45  *
46  * Atomically sets the value of @v to @i.
47  */
48 #define atomic_set(v,i)         ((v)->counter = (i))
49
50 /*
51  * atomic_add - add integer to atomic variable
52  * @i: integer value to add
53  * @v: pointer of type atomic_t
54  *
55  * Atomically adds @i to @v.
56  */
57 static __inline__ void atomic_add(int i, atomic_t * v)
58 {
59         if (cpu_has_llsc && R10000_LLSC_WAR) {
60                 unsigned long temp;
61
62                 __asm__ __volatile__(
63                 "       .set    mips3                                   \n"
64                 "1:     ll      %0, %1          # atomic_add            \n"
65                 "       addu    %0, %2                                  \n"
66                 "       sc      %0, %1                                  \n"
67                 "       beqzl   %0, 1b                                  \n"
68                 "       .set    mips0                                   \n"
69                 : "=&r" (temp), "=m" (v->counter)
70                 : "Ir" (i), "m" (v->counter));
71         } else if (cpu_has_llsc) {
72                 unsigned long temp;
73
74                 __asm__ __volatile__(
75                 "       .set    mips3                                   \n"
76                 "1:     ll      %0, %1          # atomic_add            \n"
77                 "       addu    %0, %2                                  \n"
78                 "       sc      %0, %1                                  \n"
79                 "       beqz    %0, 1b                                  \n"
80                 "       .set    mips0                                   \n"
81                 : "=&r" (temp), "=m" (v->counter)
82                 : "Ir" (i), "m" (v->counter));
83         } else {
84                 unsigned long flags;
85
86                 local_irq_save(flags);
87                 v->counter += i;
88                 local_irq_restore(flags);
89         }
90 }
91
92 /*
93  * atomic_sub - subtract the atomic variable
94  * @i: integer value to subtract
95  * @v: pointer of type atomic_t
96  *
97  * Atomically subtracts @i from @v.
98  */
99 static __inline__ void atomic_sub(int i, atomic_t * v)
100 {
101         if (cpu_has_llsc && R10000_LLSC_WAR) {
102                 unsigned long temp;
103
104                 __asm__ __volatile__(
105                 "       .set    mips3                                   \n"
106                 "1:     ll      %0, %1          # atomic_sub            \n"
107                 "       subu    %0, %2                                  \n"
108                 "       sc      %0, %1                                  \n"
109                 "       beqzl   %0, 1b                                  \n"
110                 "       .set    mips0                                   \n"
111                 : "=&r" (temp), "=m" (v->counter)
112                 : "Ir" (i), "m" (v->counter));
113         } else if (cpu_has_llsc) {
114                 unsigned long temp;
115
116                 __asm__ __volatile__(
117                 "       .set    mips3                                   \n"
118                 "1:     ll      %0, %1          # atomic_sub            \n"
119                 "       subu    %0, %2                                  \n"
120                 "       sc      %0, %1                                  \n"
121                 "       beqz    %0, 1b                                  \n"
122                 "       .set    mips0                                   \n"
123                 : "=&r" (temp), "=m" (v->counter)
124                 : "Ir" (i), "m" (v->counter));
125         } else {
126                 unsigned long flags;
127
128                 local_irq_save(flags);
129                 v->counter -= i;
130                 local_irq_restore(flags);
131         }
132 }
133
134 /*
135  * Same as above, but return the result value
136  */
137 static __inline__ int atomic_add_return(int i, atomic_t * v)
138 {
139         unsigned long result;
140
141         if (cpu_has_llsc && R10000_LLSC_WAR) {
142                 unsigned long temp;
143
144                 __asm__ __volatile__(
145                 "       .set    mips3                                   \n"
146                 "1:     ll      %1, %2          # atomic_add_return     \n"
147                 "       addu    %0, %1, %3                              \n"
148                 "       sc      %0, %2                                  \n"
149                 "       beqzl   %0, 1b                                  \n"
150                 "       addu    %0, %1, %3                              \n"
151                 "       sync                                            \n"
152                 "       .set    mips0                                   \n"
153                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154                 : "Ir" (i), "m" (v->counter)
155                 : "memory");
156         } else if (cpu_has_llsc) {
157                 unsigned long temp;
158
159                 __asm__ __volatile__(
160                 "       .set    mips3                                   \n"
161                 "1:     ll      %1, %2          # atomic_add_return     \n"
162                 "       addu    %0, %1, %3                              \n"
163                 "       sc      %0, %2                                  \n"
164                 "       beqz    %0, 1b                                  \n"
165                 "       addu    %0, %1, %3                              \n"
166                 "       sync                                            \n"
167                 "       .set    mips0                                   \n"
168                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
169                 : "Ir" (i), "m" (v->counter)
170                 : "memory");
171         } else {
172                 unsigned long flags;
173
174                 local_irq_save(flags);
175                 result = v->counter;
176                 result += i;
177                 v->counter = result;
178                 local_irq_restore(flags);
179         }
180
181         return result;
182 }
183
184 static __inline__ int atomic_sub_return(int i, atomic_t * v)
185 {
186         unsigned long result;
187
188         if (cpu_has_llsc && R10000_LLSC_WAR) {
189                 unsigned long temp;
190
191                 __asm__ __volatile__(
192                 "       .set    mips3                                   \n"
193                 "1:     ll      %1, %2          # atomic_sub_return     \n"
194                 "       subu    %0, %1, %3                              \n"
195                 "       sc      %0, %2                                  \n"
196                 "       beqzl   %0, 1b                                  \n"
197                 "       subu    %0, %1, %3                              \n"
198                 "       sync                                            \n"
199                 "       .set    mips0                                   \n"
200                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
201                 : "Ir" (i), "m" (v->counter)
202                 : "memory");
203         } else if (cpu_has_llsc) {
204                 unsigned long temp;
205
206                 __asm__ __volatile__(
207                 "       .set    mips3                                   \n"
208                 "1:     ll      %1, %2          # atomic_sub_return     \n"
209                 "       subu    %0, %1, %3                              \n"
210                 "       sc      %0, %2                                  \n"
211                 "       beqz    %0, 1b                                  \n"
212                 "       subu    %0, %1, %3                              \n"
213                 "       sync                                            \n"
214                 "       .set    mips0                                   \n"
215                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
216                 : "Ir" (i), "m" (v->counter)
217                 : "memory");
218         } else {
219                 unsigned long flags;
220
221                 local_irq_save(flags);
222                 result = v->counter;
223                 result -= i;
224                 v->counter = result;
225                 local_irq_restore(flags);
226         }
227
228         return result;
229 }
230
231 /*
232  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
233  * @i: integer value to subtract
234  * @v: pointer of type atomic_t
235  *
236  * Atomically test @v and subtract @i if @v is greater or equal than @i.
237  * The function returns the old value of @v minus @i.
238  */
239 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
240 {
241         unsigned long result;
242
243         if (cpu_has_llsc && R10000_LLSC_WAR) {
244                 unsigned long temp;
245
246                 __asm__ __volatile__(
247                 "       .set    mips3                                   \n"
248                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
249                 "       subu    %0, %1, %3                              \n"
250                 "       bltz    %0, 1f                                  \n"
251                 "       sc      %0, %2                                  \n"
252                 "       .set    noreorder                               \n"
253                 "       beqzl   %0, 1b                                  \n"
254                 "        subu   %0, %1, %3                              \n"
255                 "       .set    reorder                                 \n"
256                 "       sync                                            \n"
257                 "1:                                                     \n"
258                 "       .set    mips0                                   \n"
259                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
260                 : "Ir" (i), "m" (v->counter)
261                 : "memory");
262         } else if (cpu_has_llsc) {
263                 unsigned long temp;
264
265                 __asm__ __volatile__(
266                 "       .set    mips3                                   \n"
267                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
268                 "       subu    %0, %1, %3                              \n"
269                 "       bltz    %0, 1f                                  \n"
270                 "       sc      %0, %2                                  \n"
271                 "       .set    noreorder                               \n"
272                 "       beqz    %0, 1b                                  \n"
273                 "        subu   %0, %1, %3                              \n"
274                 "       .set    reorder                                 \n"
275                 "       sync                                            \n"
276                 "1:                                                     \n"
277                 "       .set    mips0                                   \n"
278                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
279                 : "Ir" (i), "m" (v->counter)
280                 : "memory");
281         } else {
282                 unsigned long flags;
283
284                 local_irq_save(flags);
285                 result = v->counter;
286                 result -= i;
287                 if (result >= 0)
288                         v->counter = result;
289                 local_irq_restore(flags);
290         }
291
292         return result;
293 }
294
295 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
296 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
297
298 /**
299  * atomic_add_unless - add unless the number is a given value
300  * @v: pointer of type atomic_t
301  * @a: the amount to add to v...
302  * @u: ...unless v is equal to u.
303  *
304  * Atomically adds @a to @v, so long as it was not @u.
305  * Returns non-zero if @v was not @u, and zero otherwise.
306  */
307 #define atomic_add_unless(v, a, u)                              \
308 ({                                                              \
309         int c, old;                                             \
310         c = atomic_read(v);                                     \
311         while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
312                 c = old;                                        \
313         c != (u);                                               \
314 })
315 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
316
317 #define atomic_dec_return(v) atomic_sub_return(1,(v))
318 #define atomic_inc_return(v) atomic_add_return(1,(v))
319
320 /*
321  * atomic_sub_and_test - subtract value from variable and test result
322  * @i: integer value to subtract
323  * @v: pointer of type atomic_t
324  *
325  * Atomically subtracts @i from @v and returns
326  * true if the result is zero, or false for all
327  * other cases.
328  */
329 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
330
331 /*
332  * atomic_inc_and_test - increment and test
333  * @v: pointer of type atomic_t
334  *
335  * Atomically increments @v by 1
336  * and returns true if the result is zero, or false for all
337  * other cases.
338  */
339 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
340
341 /*
342  * atomic_dec_and_test - decrement by 1 and test
343  * @v: pointer of type atomic_t
344  *
345  * Atomically decrements @v by 1 and
346  * returns true if the result is 0, or false for all other
347  * cases.
348  */
349 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
350
351 /*
352  * atomic_dec_if_positive - decrement by 1 if old value positive
353  * @v: pointer of type atomic_t
354  */
355 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
356
357 /*
358  * atomic_inc - increment atomic variable
359  * @v: pointer of type atomic_t
360  *
361  * Atomically increments @v by 1.
362  */
363 #define atomic_inc(v) atomic_add(1,(v))
364
365 /*
366  * atomic_dec - decrement and test
367  * @v: pointer of type atomic_t
368  *
369  * Atomically decrements @v by 1.
370  */
371 #define atomic_dec(v) atomic_sub(1,(v))
372
373 /*
374  * atomic_add_negative - add and test if negative
375  * @v: pointer of type atomic_t
376  * @i: integer value to add
377  *
378  * Atomically adds @i to @v and returns true
379  * if the result is negative, or false when
380  * result is greater than or equal to zero.
381  */
382 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
383
384 #ifdef CONFIG_64BIT
385
386 typedef struct { volatile __s64 counter; } atomic64_t;
387
388 #define ATOMIC64_INIT(i)    { (i) }
389
390 /*
391  * atomic64_read - read atomic variable
392  * @v: pointer of type atomic64_t
393  *
394  */
395 #define atomic64_read(v)        ((v)->counter)
396
397 /*
398  * atomic64_set - set atomic variable
399  * @v: pointer of type atomic64_t
400  * @i: required value
401  */
402 #define atomic64_set(v,i)       ((v)->counter = (i))
403
404 /*
405  * atomic64_add - add integer to atomic variable
406  * @i: integer value to add
407  * @v: pointer of type atomic64_t
408  *
409  * Atomically adds @i to @v.
410  */
411 static __inline__ void atomic64_add(long i, atomic64_t * v)
412 {
413         if (cpu_has_llsc && R10000_LLSC_WAR) {
414                 unsigned long temp;
415
416                 __asm__ __volatile__(
417                 "       .set    mips3                                   \n"
418                 "1:     lld     %0, %1          # atomic64_add          \n"
419                 "       addu    %0, %2                                  \n"
420                 "       scd     %0, %1                                  \n"
421                 "       beqzl   %0, 1b                                  \n"
422                 "       .set    mips0                                   \n"
423                 : "=&r" (temp), "=m" (v->counter)
424                 : "Ir" (i), "m" (v->counter));
425         } else if (cpu_has_llsc) {
426                 unsigned long temp;
427
428                 __asm__ __volatile__(
429                 "       .set    mips3                                   \n"
430                 "1:     lld     %0, %1          # atomic64_add          \n"
431                 "       addu    %0, %2                                  \n"
432                 "       scd     %0, %1                                  \n"
433                 "       beqz    %0, 1b                                  \n"
434                 "       .set    mips0                                   \n"
435                 : "=&r" (temp), "=m" (v->counter)
436                 : "Ir" (i), "m" (v->counter));
437         } else {
438                 unsigned long flags;
439
440                 local_irq_save(flags);
441                 v->counter += i;
442                 local_irq_restore(flags);
443         }
444 }
445
446 /*
447  * atomic64_sub - subtract the atomic variable
448  * @i: integer value to subtract
449  * @v: pointer of type atomic64_t
450  *
451  * Atomically subtracts @i from @v.
452  */
453 static __inline__ void atomic64_sub(long i, atomic64_t * v)
454 {
455         if (cpu_has_llsc && R10000_LLSC_WAR) {
456                 unsigned long temp;
457
458                 __asm__ __volatile__(
459                 "       .set    mips3                                   \n"
460                 "1:     lld     %0, %1          # atomic64_sub          \n"
461                 "       subu    %0, %2                                  \n"
462                 "       scd     %0, %1                                  \n"
463                 "       beqzl   %0, 1b                                  \n"
464                 "       .set    mips0                                   \n"
465                 : "=&r" (temp), "=m" (v->counter)
466                 : "Ir" (i), "m" (v->counter));
467         } else if (cpu_has_llsc) {
468                 unsigned long temp;
469
470                 __asm__ __volatile__(
471                 "       .set    mips3                                   \n"
472                 "1:     lld     %0, %1          # atomic64_sub          \n"
473                 "       subu    %0, %2                                  \n"
474                 "       scd     %0, %1                                  \n"
475                 "       beqz    %0, 1b                                  \n"
476                 "       .set    mips0                                   \n"
477                 : "=&r" (temp), "=m" (v->counter)
478                 : "Ir" (i), "m" (v->counter));
479         } else {
480                 unsigned long flags;
481
482                 local_irq_save(flags);
483                 v->counter -= i;
484                 local_irq_restore(flags);
485         }
486 }
487
488 /*
489  * Same as above, but return the result value
490  */
491 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
492 {
493         unsigned long result;
494
495         if (cpu_has_llsc && R10000_LLSC_WAR) {
496                 unsigned long temp;
497
498                 __asm__ __volatile__(
499                 "       .set    mips3                                   \n"
500                 "1:     lld     %1, %2          # atomic64_add_return   \n"
501                 "       addu    %0, %1, %3                              \n"
502                 "       scd     %0, %2                                  \n"
503                 "       beqzl   %0, 1b                                  \n"
504                 "       addu    %0, %1, %3                              \n"
505                 "       sync                                            \n"
506                 "       .set    mips0                                   \n"
507                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
508                 : "Ir" (i), "m" (v->counter)
509                 : "memory");
510         } else if (cpu_has_llsc) {
511                 unsigned long temp;
512
513                 __asm__ __volatile__(
514                 "       .set    mips3                                   \n"
515                 "1:     lld     %1, %2          # atomic64_add_return   \n"
516                 "       addu    %0, %1, %3                              \n"
517                 "       scd     %0, %2                                  \n"
518                 "       beqz    %0, 1b                                  \n"
519                 "       addu    %0, %1, %3                              \n"
520                 "       sync                                            \n"
521                 "       .set    mips0                                   \n"
522                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
523                 : "Ir" (i), "m" (v->counter)
524                 : "memory");
525         } else {
526                 unsigned long flags;
527
528                 local_irq_save(flags);
529                 result = v->counter;
530                 result += i;
531                 v->counter = result;
532                 local_irq_restore(flags);
533         }
534
535         return result;
536 }
537
538 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
539 {
540         unsigned long result;
541
542         if (cpu_has_llsc && R10000_LLSC_WAR) {
543                 unsigned long temp;
544
545                 __asm__ __volatile__(
546                 "       .set    mips3                                   \n"
547                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
548                 "       subu    %0, %1, %3                              \n"
549                 "       scd     %0, %2                                  \n"
550                 "       beqzl   %0, 1b                                  \n"
551                 "       subu    %0, %1, %3                              \n"
552                 "       sync                                            \n"
553                 "       .set    mips0                                   \n"
554                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
555                 : "Ir" (i), "m" (v->counter)
556                 : "memory");
557         } else if (cpu_has_llsc) {
558                 unsigned long temp;
559
560                 __asm__ __volatile__(
561                 "       .set    mips3                                   \n"
562                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
563                 "       subu    %0, %1, %3                              \n"
564                 "       scd     %0, %2                                  \n"
565                 "       beqz    %0, 1b                                  \n"
566                 "       subu    %0, %1, %3                              \n"
567                 "       sync                                            \n"
568                 "       .set    mips0                                   \n"
569                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
570                 : "Ir" (i), "m" (v->counter)
571                 : "memory");
572         } else {
573                 unsigned long flags;
574
575                 local_irq_save(flags);
576                 result = v->counter;
577                 result -= i;
578                 v->counter = result;
579                 local_irq_restore(flags);
580         }
581
582         return result;
583 }
584
585 /*
586  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
587  * @i: integer value to subtract
588  * @v: pointer of type atomic64_t
589  *
590  * Atomically test @v and subtract @i if @v is greater or equal than @i.
591  * The function returns the old value of @v minus @i.
592  */
593 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
594 {
595         unsigned long result;
596
597         if (cpu_has_llsc && R10000_LLSC_WAR) {
598                 unsigned long temp;
599
600                 __asm__ __volatile__(
601                 "       .set    mips3                                   \n"
602                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
603                 "       dsubu   %0, %1, %3                              \n"
604                 "       bltz    %0, 1f                                  \n"
605                 "       scd     %0, %2                                  \n"
606                 "       .set    noreorder                               \n"
607                 "       beqzl   %0, 1b                                  \n"
608                 "        dsubu  %0, %1, %3                              \n"
609                 "       .set    reorder                                 \n"
610                 "       sync                                            \n"
611                 "1:                                                     \n"
612                 "       .set    mips0                                   \n"
613                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
614                 : "Ir" (i), "m" (v->counter)
615                 : "memory");
616         } else if (cpu_has_llsc) {
617                 unsigned long temp;
618
619                 __asm__ __volatile__(
620                 "       .set    mips3                                   \n"
621                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
622                 "       dsubu   %0, %1, %3                              \n"
623                 "       bltz    %0, 1f                                  \n"
624                 "       scd     %0, %2                                  \n"
625                 "       .set    noreorder                               \n"
626                 "       beqz    %0, 1b                                  \n"
627                 "        dsubu  %0, %1, %3                              \n"
628                 "       .set    reorder                                 \n"
629                 "       sync                                            \n"
630                 "1:                                                     \n"
631                 "       .set    mips0                                   \n"
632                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
633                 : "Ir" (i), "m" (v->counter)
634                 : "memory");
635         } else {
636                 unsigned long flags;
637
638                 local_irq_save(flags);
639                 result = v->counter;
640                 result -= i;
641                 if (result >= 0)
642                         v->counter = result;
643                 local_irq_restore(flags);
644         }
645
646         return result;
647 }
648
649 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
650 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
651
652 /*
653  * atomic64_sub_and_test - subtract value from variable and test result
654  * @i: integer value to subtract
655  * @v: pointer of type atomic64_t
656  *
657  * Atomically subtracts @i from @v and returns
658  * true if the result is zero, or false for all
659  * other cases.
660  */
661 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
662
663 /*
664  * atomic64_inc_and_test - increment and test
665  * @v: pointer of type atomic64_t
666  *
667  * Atomically increments @v by 1
668  * and returns true if the result is zero, or false for all
669  * other cases.
670  */
671 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
672
673 /*
674  * atomic64_dec_and_test - decrement by 1 and test
675  * @v: pointer of type atomic64_t
676  *
677  * Atomically decrements @v by 1 and
678  * returns true if the result is 0, or false for all other
679  * cases.
680  */
681 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
682
683 /*
684  * atomic64_dec_if_positive - decrement by 1 if old value positive
685  * @v: pointer of type atomic64_t
686  */
687 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
688
689 /*
690  * atomic64_inc - increment atomic variable
691  * @v: pointer of type atomic64_t
692  *
693  * Atomically increments @v by 1.
694  */
695 #define atomic64_inc(v) atomic64_add(1,(v))
696
697 /*
698  * atomic64_dec - decrement and test
699  * @v: pointer of type atomic64_t
700  *
701  * Atomically decrements @v by 1.
702  */
703 #define atomic64_dec(v) atomic64_sub(1,(v))
704
705 /*
706  * atomic64_add_negative - add and test if negative
707  * @v: pointer of type atomic64_t
708  * @i: integer value to add
709  *
710  * Atomically adds @i to @v and returns true
711  * if the result is negative, or false when
712  * result is greater than or equal to zero.
713  */
714 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
715
716 #endif /* CONFIG_64BIT */
717
718 /*
719  * atomic*_return operations are serializing but not the non-*_return
720  * versions.
721  */
722 #define smp_mb__before_atomic_dec()     smp_mb()
723 #define smp_mb__after_atomic_dec()      smp_mb()
724 #define smp_mb__before_atomic_inc()     smp_mb()
725 #define smp_mb__after_atomic_inc()      smp_mb()
726
727 #include <asm-generic/atomic.h>
728 #endif /* _ASM_ATOMIC_H */