aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/atomic.h')
-rw-r--r--include/linux/atomic.h453
1 files changed, 343 insertions, 110 deletions
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 01ce3997cb42..1e8e88bdaf09 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -2,6 +2,8 @@
2/* Atomic operations usable in machine independent code */ 2/* Atomic operations usable in machine independent code */
3#ifndef _LINUX_ATOMIC_H 3#ifndef _LINUX_ATOMIC_H
4#define _LINUX_ATOMIC_H 4#define _LINUX_ATOMIC_H
5#include <linux/types.h>
6
5#include <asm/atomic.h> 7#include <asm/atomic.h>
6#include <asm/barrier.h> 8#include <asm/barrier.h>
7 9
@@ -36,40 +38,46 @@
36 * barriers on top of the relaxed variant. In the case where the relaxed 38 * barriers on top of the relaxed variant. In the case where the relaxed
37 * variant is already fully ordered, no additional barriers are needed. 39 * variant is already fully ordered, no additional barriers are needed.
38 * 40 *
39 * Besides, if an arch has a special barrier for acquire/release, it could 41 * If an architecture overrides __atomic_acquire_fence() it will probably
40 * implement its own __atomic_op_* and use the same framework for building 42 * want to define smp_mb__after_spinlock().
41 * variants
42 *
43 * If an architecture overrides __atomic_op_acquire() it will probably want
44 * to define smp_mb__after_spinlock().
45 */ 43 */
46#ifndef __atomic_op_acquire 44#ifndef __atomic_acquire_fence
45#define __atomic_acquire_fence smp_mb__after_atomic
46#endif
47
48#ifndef __atomic_release_fence
49#define __atomic_release_fence smp_mb__before_atomic
50#endif
51
52#ifndef __atomic_pre_full_fence
53#define __atomic_pre_full_fence smp_mb__before_atomic
54#endif
55
56#ifndef __atomic_post_full_fence
57#define __atomic_post_full_fence smp_mb__after_atomic
58#endif
59
47#define __atomic_op_acquire(op, args...) \ 60#define __atomic_op_acquire(op, args...) \
48({ \ 61({ \
49 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \ 62 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
50 smp_mb__after_atomic(); \ 63 __atomic_acquire_fence(); \
51 __ret; \ 64 __ret; \
52}) 65})
53#endif
54 66
55#ifndef __atomic_op_release
56#define __atomic_op_release(op, args...) \ 67#define __atomic_op_release(op, args...) \
57({ \ 68({ \
58 smp_mb__before_atomic(); \ 69 __atomic_release_fence(); \
59 op##_relaxed(args); \ 70 op##_relaxed(args); \
60}) 71})
61#endif
62 72
63#ifndef __atomic_op_fence
64#define __atomic_op_fence(op, args...) \ 73#define __atomic_op_fence(op, args...) \
65({ \ 74({ \
66 typeof(op##_relaxed(args)) __ret; \ 75 typeof(op##_relaxed(args)) __ret; \
67 smp_mb__before_atomic(); \ 76 __atomic_pre_full_fence(); \
68 __ret = op##_relaxed(args); \ 77 __ret = op##_relaxed(args); \
69 smp_mb__after_atomic(); \ 78 __atomic_post_full_fence(); \
70 __ret; \ 79 __ret; \
71}) 80})
72#endif
73 81
74/* atomic_add_return_relaxed */ 82/* atomic_add_return_relaxed */
75#ifndef atomic_add_return_relaxed 83#ifndef atomic_add_return_relaxed
@@ -95,11 +103,23 @@
95#endif 103#endif
96#endif /* atomic_add_return_relaxed */ 104#endif /* atomic_add_return_relaxed */
97 105
106#ifndef atomic_inc
107#define atomic_inc(v) atomic_add(1, (v))
108#endif
109
98/* atomic_inc_return_relaxed */ 110/* atomic_inc_return_relaxed */
99#ifndef atomic_inc_return_relaxed 111#ifndef atomic_inc_return_relaxed
112
113#ifndef atomic_inc_return
114#define atomic_inc_return(v) atomic_add_return(1, (v))
115#define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
116#define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
117#define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
118#else /* atomic_inc_return */
100#define atomic_inc_return_relaxed atomic_inc_return 119#define atomic_inc_return_relaxed atomic_inc_return
101#define atomic_inc_return_acquire atomic_inc_return 120#define atomic_inc_return_acquire atomic_inc_return
102#define atomic_inc_return_release atomic_inc_return 121#define atomic_inc_return_release atomic_inc_return
122#endif /* atomic_inc_return */
103 123
104#else /* atomic_inc_return_relaxed */ 124#else /* atomic_inc_return_relaxed */
105 125
@@ -143,11 +163,23 @@
143#endif 163#endif
144#endif /* atomic_sub_return_relaxed */ 164#endif /* atomic_sub_return_relaxed */
145 165
166#ifndef atomic_dec
167#define atomic_dec(v) atomic_sub(1, (v))
168#endif
169
146/* atomic_dec_return_relaxed */ 170/* atomic_dec_return_relaxed */
147#ifndef atomic_dec_return_relaxed 171#ifndef atomic_dec_return_relaxed
172
173#ifndef atomic_dec_return
174#define atomic_dec_return(v) atomic_sub_return(1, (v))
175#define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
176#define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
177#define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
178#else /* atomic_dec_return */
148#define atomic_dec_return_relaxed atomic_dec_return 179#define atomic_dec_return_relaxed atomic_dec_return
149#define atomic_dec_return_acquire atomic_dec_return 180#define atomic_dec_return_acquire atomic_dec_return
150#define atomic_dec_return_release atomic_dec_return 181#define atomic_dec_return_release atomic_dec_return
182#endif /* atomic_dec_return */
151 183
152#else /* atomic_dec_return_relaxed */ 184#else /* atomic_dec_return_relaxed */
153 185
@@ -328,12 +360,22 @@
328#endif 360#endif
329#endif /* atomic_fetch_and_relaxed */ 361#endif /* atomic_fetch_and_relaxed */
330 362
331#ifdef atomic_andnot 363#ifndef atomic_andnot
332/* atomic_fetch_andnot_relaxed */ 364#define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
365#endif
366
333#ifndef atomic_fetch_andnot_relaxed 367#ifndef atomic_fetch_andnot_relaxed
334#define atomic_fetch_andnot_relaxed atomic_fetch_andnot 368
335#define atomic_fetch_andnot_acquire atomic_fetch_andnot 369#ifndef atomic_fetch_andnot
336#define atomic_fetch_andnot_release atomic_fetch_andnot 370#define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
371#define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
372#define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
373#define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
374#else /* atomic_fetch_andnot */
375#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
376#define atomic_fetch_andnot_acquire atomic_fetch_andnot
377#define atomic_fetch_andnot_release atomic_fetch_andnot
378#endif /* atomic_fetch_andnot */
337 379
338#else /* atomic_fetch_andnot_relaxed */ 380#else /* atomic_fetch_andnot_relaxed */
339 381
@@ -352,7 +394,6 @@
352 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__) 394 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
353#endif 395#endif
354#endif /* atomic_fetch_andnot_relaxed */ 396#endif /* atomic_fetch_andnot_relaxed */
355#endif /* atomic_andnot */
356 397
357/* atomic_fetch_xor_relaxed */ 398/* atomic_fetch_xor_relaxed */
358#ifndef atomic_fetch_xor_relaxed 399#ifndef atomic_fetch_xor_relaxed
@@ -520,112 +561,140 @@
520#endif /* xchg_relaxed */ 561#endif /* xchg_relaxed */
521 562
522/** 563/**
564 * atomic_fetch_add_unless - add unless the number is already a given value
565 * @v: pointer of type atomic_t
566 * @a: the amount to add to v...
567 * @u: ...unless v is equal to u.
568 *
569 * Atomically adds @a to @v, if @v was not already @u.
570 * Returns the original value of @v.
571 */
572#ifndef atomic_fetch_add_unless
573static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
574{
575 int c = atomic_read(v);
576
577 do {
578 if (unlikely(c == u))
579 break;
580 } while (!atomic_try_cmpxchg(v, &c, c + a));
581
582 return c;
583}
584#endif
585
586/**
523 * atomic_add_unless - add unless the number is already a given value 587 * atomic_add_unless - add unless the number is already a given value
524 * @v: pointer of type atomic_t 588 * @v: pointer of type atomic_t
525 * @a: the amount to add to v... 589 * @a: the amount to add to v...
526 * @u: ...unless v is equal to u. 590 * @u: ...unless v is equal to u.
527 * 591 *
528 * Atomically adds @a to @v, so long as @v was not already @u. 592 * Atomically adds @a to @v, if @v was not already @u.
529 * Returns non-zero if @v was not @u, and zero otherwise. 593 * Returns true if the addition was done.
530 */ 594 */
531static inline int atomic_add_unless(atomic_t *v, int a, int u) 595static inline bool atomic_add_unless(atomic_t *v, int a, int u)
532{ 596{
533 return __atomic_add_unless(v, a, u) != u; 597 return atomic_fetch_add_unless(v, a, u) != u;
534} 598}
535 599
536/** 600/**
537 * atomic_inc_not_zero - increment unless the number is zero 601 * atomic_inc_not_zero - increment unless the number is zero
538 * @v: pointer of type atomic_t 602 * @v: pointer of type atomic_t
539 * 603 *
540 * Atomically increments @v by 1, so long as @v is non-zero. 604 * Atomically increments @v by 1, if @v is non-zero.
541 * Returns non-zero if @v was non-zero, and zero otherwise. 605 * Returns true if the increment was done.
542 */ 606 */
543#ifndef atomic_inc_not_zero 607#ifndef atomic_inc_not_zero
544#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 608#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
545#endif 609#endif
546 610
547#ifndef atomic_andnot 611/**
548static inline void atomic_andnot(int i, atomic_t *v) 612 * atomic_inc_and_test - increment and test
549{ 613 * @v: pointer of type atomic_t
550 atomic_and(~i, v); 614 *
551} 615 * Atomically increments @v by 1
552 616 * and returns true if the result is zero, or false for all
553static inline int atomic_fetch_andnot(int i, atomic_t *v) 617 * other cases.
554{ 618 */
555 return atomic_fetch_and(~i, v); 619#ifndef atomic_inc_and_test
556} 620static inline bool atomic_inc_and_test(atomic_t *v)
557
558static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
559{ 621{
560 return atomic_fetch_and_relaxed(~i, v); 622 return atomic_inc_return(v) == 0;
561} 623}
624#endif
562 625
563static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v) 626/**
627 * atomic_dec_and_test - decrement and test
628 * @v: pointer of type atomic_t
629 *
630 * Atomically decrements @v by 1 and
631 * returns true if the result is 0, or false for all other
632 * cases.
633 */
634#ifndef atomic_dec_and_test
635static inline bool atomic_dec_and_test(atomic_t *v)
564{ 636{
565 return atomic_fetch_and_acquire(~i, v); 637 return atomic_dec_return(v) == 0;
566} 638}
639#endif
567 640
568static inline int atomic_fetch_andnot_release(int i, atomic_t *v) 641/**
642 * atomic_sub_and_test - subtract value from variable and test result
643 * @i: integer value to subtract
644 * @v: pointer of type atomic_t
645 *
646 * Atomically subtracts @i from @v and returns
647 * true if the result is zero, or false for all
648 * other cases.
649 */
650#ifndef atomic_sub_and_test
651static inline bool atomic_sub_and_test(int i, atomic_t *v)
569{ 652{
570 return atomic_fetch_and_release(~i, v); 653 return atomic_sub_return(i, v) == 0;
571} 654}
572#endif 655#endif
573 656
574/** 657/**
575 * atomic_inc_not_zero_hint - increment if not null 658 * atomic_add_negative - add and test if negative
659 * @i: integer value to add
576 * @v: pointer of type atomic_t 660 * @v: pointer of type atomic_t
577 * @hint: probable value of the atomic before the increment
578 *
579 * This version of atomic_inc_not_zero() gives a hint of probable
580 * value of the atomic. This helps processor to not read the memory
581 * before doing the atomic read/modify/write cycle, lowering
582 * number of bus transactions on some arches.
583 * 661 *
584 * Returns: 0 if increment was not done, 1 otherwise. 662 * Atomically adds @i to @v and returns true
663 * if the result is negative, or false when
664 * result is greater than or equal to zero.
585 */ 665 */
586#ifndef atomic_inc_not_zero_hint 666#ifndef atomic_add_negative
587static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint) 667static inline bool atomic_add_negative(int i, atomic_t *v)
588{ 668{
589 int val, c = hint; 669 return atomic_add_return(i, v) < 0;
590
591 /* sanity test, should be removed by compiler if hint is a constant */
592 if (!hint)
593 return atomic_inc_not_zero(v);
594
595 do {
596 val = atomic_cmpxchg(v, c, c + 1);
597 if (val == c)
598 return 1;
599 c = val;
600 } while (c);
601
602 return 0;
603} 670}
604#endif 671#endif
605 672
606#ifndef atomic_inc_unless_negative 673#ifndef atomic_inc_unless_negative
607static inline int atomic_inc_unless_negative(atomic_t *p) 674static inline bool atomic_inc_unless_negative(atomic_t *v)
608{ 675{
609 int v, v1; 676 int c = atomic_read(v);
610 for (v = 0; v >= 0; v = v1) { 677
611 v1 = atomic_cmpxchg(p, v, v + 1); 678 do {
612 if (likely(v1 == v)) 679 if (unlikely(c < 0))
613 return 1; 680 return false;
614 } 681 } while (!atomic_try_cmpxchg(v, &c, c + 1));
615 return 0; 682
683 return true;
616} 684}
617#endif 685#endif
618 686
619#ifndef atomic_dec_unless_positive 687#ifndef atomic_dec_unless_positive
620static inline int atomic_dec_unless_positive(atomic_t *p) 688static inline bool atomic_dec_unless_positive(atomic_t *v)
621{ 689{
622 int v, v1; 690 int c = atomic_read(v);
623 for (v = 0; v <= 0; v = v1) { 691
624 v1 = atomic_cmpxchg(p, v, v - 1); 692 do {
625 if (likely(v1 == v)) 693 if (unlikely(c > 0))
626 return 1; 694 return false;
627 } 695 } while (!atomic_try_cmpxchg(v, &c, c - 1));
628 return 0; 696
697 return true;
629} 698}
630#endif 699#endif
631 700
@@ -639,17 +708,14 @@ static inline int atomic_dec_unless_positive(atomic_t *p)
639#ifndef atomic_dec_if_positive 708#ifndef atomic_dec_if_positive
640static inline int atomic_dec_if_positive(atomic_t *v) 709static inline int atomic_dec_if_positive(atomic_t *v)
641{ 710{
642 int c, old, dec; 711 int dec, c = atomic_read(v);
643 c = atomic_read(v); 712
644 for (;;) { 713 do {
645 dec = c - 1; 714 dec = c - 1;
646 if (unlikely(dec < 0)) 715 if (unlikely(dec < 0))
647 break; 716 break;
648 old = atomic_cmpxchg((v), c, dec); 717 } while (!atomic_try_cmpxchg(v, &c, dec));
649 if (likely(old == c)) 718
650 break;
651 c = old;
652 }
653 return dec; 719 return dec;
654} 720}
655#endif 721#endif
@@ -693,11 +759,23 @@ static inline int atomic_dec_if_positive(atomic_t *v)
693#endif 759#endif
694#endif /* atomic64_add_return_relaxed */ 760#endif /* atomic64_add_return_relaxed */
695 761
762#ifndef atomic64_inc
763#define atomic64_inc(v) atomic64_add(1, (v))
764#endif
765
696/* atomic64_inc_return_relaxed */ 766/* atomic64_inc_return_relaxed */
697#ifndef atomic64_inc_return_relaxed 767#ifndef atomic64_inc_return_relaxed
768
769#ifndef atomic64_inc_return
770#define atomic64_inc_return(v) atomic64_add_return(1, (v))
771#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
772#define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
773#define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
774#else /* atomic64_inc_return */
698#define atomic64_inc_return_relaxed atomic64_inc_return 775#define atomic64_inc_return_relaxed atomic64_inc_return
699#define atomic64_inc_return_acquire atomic64_inc_return 776#define atomic64_inc_return_acquire atomic64_inc_return
700#define atomic64_inc_return_release atomic64_inc_return 777#define atomic64_inc_return_release atomic64_inc_return
778#endif /* atomic64_inc_return */
701 779
702#else /* atomic64_inc_return_relaxed */ 780#else /* atomic64_inc_return_relaxed */
703 781
@@ -742,11 +820,23 @@ static inline int atomic_dec_if_positive(atomic_t *v)
742#endif 820#endif
743#endif /* atomic64_sub_return_relaxed */ 821#endif /* atomic64_sub_return_relaxed */
744 822
823#ifndef atomic64_dec
824#define atomic64_dec(v) atomic64_sub(1, (v))
825#endif
826
745/* atomic64_dec_return_relaxed */ 827/* atomic64_dec_return_relaxed */
746#ifndef atomic64_dec_return_relaxed 828#ifndef atomic64_dec_return_relaxed
829
830#ifndef atomic64_dec_return
831#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
832#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
833#define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
834#define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
835#else /* atomic64_dec_return */
747#define atomic64_dec_return_relaxed atomic64_dec_return 836#define atomic64_dec_return_relaxed atomic64_dec_return
748#define atomic64_dec_return_acquire atomic64_dec_return 837#define atomic64_dec_return_acquire atomic64_dec_return
749#define atomic64_dec_return_release atomic64_dec_return 838#define atomic64_dec_return_release atomic64_dec_return
839#endif /* atomic64_dec_return */
750 840
751#else /* atomic64_dec_return_relaxed */ 841#else /* atomic64_dec_return_relaxed */
752 842
@@ -927,12 +1017,22 @@ static inline int atomic_dec_if_positive(atomic_t *v)
927#endif 1017#endif
928#endif /* atomic64_fetch_and_relaxed */ 1018#endif /* atomic64_fetch_and_relaxed */
929 1019
930#ifdef atomic64_andnot 1020#ifndef atomic64_andnot
931/* atomic64_fetch_andnot_relaxed */ 1021#define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
1022#endif
1023
932#ifndef atomic64_fetch_andnot_relaxed 1024#ifndef atomic64_fetch_andnot_relaxed
933#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot 1025
934#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot 1026#ifndef atomic64_fetch_andnot
935#define atomic64_fetch_andnot_release atomic64_fetch_andnot 1027#define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
1028#define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
1029#define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
1030#define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
1031#else /* atomic64_fetch_andnot */
1032#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1033#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1034#define atomic64_fetch_andnot_release atomic64_fetch_andnot
1035#endif /* atomic64_fetch_andnot */
936 1036
937#else /* atomic64_fetch_andnot_relaxed */ 1037#else /* atomic64_fetch_andnot_relaxed */
938 1038
@@ -951,7 +1051,6 @@ static inline int atomic_dec_if_positive(atomic_t *v)
951 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__) 1051 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
952#endif 1052#endif
953#endif /* atomic64_fetch_andnot_relaxed */ 1053#endif /* atomic64_fetch_andnot_relaxed */
954#endif /* atomic64_andnot */
955 1054
956/* atomic64_fetch_xor_relaxed */ 1055/* atomic64_fetch_xor_relaxed */
957#ifndef atomic64_fetch_xor_relaxed 1056#ifndef atomic64_fetch_xor_relaxed
@@ -1049,30 +1148,164 @@ static inline int atomic_dec_if_positive(atomic_t *v)
1049#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg 1148#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1050#endif /* atomic64_try_cmpxchg */ 1149#endif /* atomic64_try_cmpxchg */
1051 1150
1052#ifndef atomic64_andnot 1151/**
1053static inline void atomic64_andnot(long long i, atomic64_t *v) 1152 * atomic64_fetch_add_unless - add unless the number is already a given value
1153 * @v: pointer of type atomic64_t
1154 * @a: the amount to add to v...
1155 * @u: ...unless v is equal to u.
1156 *
1157 * Atomically adds @a to @v, if @v was not already @u.
1158 * Returns the original value of @v.
1159 */
1160#ifndef atomic64_fetch_add_unless
1161static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
1162 long long u)
1054{ 1163{
1055 atomic64_and(~i, v); 1164 long long c = atomic64_read(v);
1165
1166 do {
1167 if (unlikely(c == u))
1168 break;
1169 } while (!atomic64_try_cmpxchg(v, &c, c + a));
1170
1171 return c;
1056} 1172}
1173#endif
1057 1174
1058static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v) 1175/**
1176 * atomic64_add_unless - add unless the number is already a given value
1177 * @v: pointer of type atomic_t
1178 * @a: the amount to add to v...
1179 * @u: ...unless v is equal to u.
1180 *
1181 * Atomically adds @a to @v, if @v was not already @u.
1182 * Returns true if the addition was done.
1183 */
1184static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1059{ 1185{
1060 return atomic64_fetch_and(~i, v); 1186 return atomic64_fetch_add_unless(v, a, u) != u;
1061} 1187}
1062 1188
1063static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v) 1189/**
1190 * atomic64_inc_not_zero - increment unless the number is zero
1191 * @v: pointer of type atomic64_t
1192 *
1193 * Atomically increments @v by 1, if @v is non-zero.
1194 * Returns true if the increment was done.
1195 */
1196#ifndef atomic64_inc_not_zero
1197#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
1198#endif
1199
1200/**
1201 * atomic64_inc_and_test - increment and test
1202 * @v: pointer of type atomic64_t
1203 *
1204 * Atomically increments @v by 1
1205 * and returns true if the result is zero, or false for all
1206 * other cases.
1207 */
1208#ifndef atomic64_inc_and_test
1209static inline bool atomic64_inc_and_test(atomic64_t *v)
1064{ 1210{
1065 return atomic64_fetch_and_relaxed(~i, v); 1211 return atomic64_inc_return(v) == 0;
1066} 1212}
1213#endif
1067 1214
1068static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v) 1215/**
1216 * atomic64_dec_and_test - decrement and test
1217 * @v: pointer of type atomic64_t
1218 *
1219 * Atomically decrements @v by 1 and
1220 * returns true if the result is 0, or false for all other
1221 * cases.
1222 */
1223#ifndef atomic64_dec_and_test
1224static inline bool atomic64_dec_and_test(atomic64_t *v)
1069{ 1225{
1070 return atomic64_fetch_and_acquire(~i, v); 1226 return atomic64_dec_return(v) == 0;
1071} 1227}
1228#endif
1072 1229
1073static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v) 1230/**
1231 * atomic64_sub_and_test - subtract value from variable and test result
1232 * @i: integer value to subtract
1233 * @v: pointer of type atomic64_t
1234 *
1235 * Atomically subtracts @i from @v and returns
1236 * true if the result is zero, or false for all
1237 * other cases.
1238 */
1239#ifndef atomic64_sub_and_test
1240static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1241{
1242 return atomic64_sub_return(i, v) == 0;
1243}
1244#endif
1245
1246/**
1247 * atomic64_add_negative - add and test if negative
1248 * @i: integer value to add
1249 * @v: pointer of type atomic64_t
1250 *
1251 * Atomically adds @i to @v and returns true
1252 * if the result is negative, or false when
1253 * result is greater than or equal to zero.
1254 */
1255#ifndef atomic64_add_negative
1256static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1074{ 1257{
1075 return atomic64_fetch_and_release(~i, v); 1258 return atomic64_add_return(i, v) < 0;
1259}
1260#endif
1261
1262#ifndef atomic64_inc_unless_negative
1263static inline bool atomic64_inc_unless_negative(atomic64_t *v)
1264{
1265 long long c = atomic64_read(v);
1266
1267 do {
1268 if (unlikely(c < 0))
1269 return false;
1270 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
1271
1272 return true;
1273}
1274#endif
1275
1276#ifndef atomic64_dec_unless_positive
1277static inline bool atomic64_dec_unless_positive(atomic64_t *v)
1278{
1279 long long c = atomic64_read(v);
1280
1281 do {
1282 if (unlikely(c > 0))
1283 return false;
1284 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
1285
1286 return true;
1287}
1288#endif
1289
1290/*
1291 * atomic64_dec_if_positive - decrement by 1 if old value positive
1292 * @v: pointer of type atomic64_t
1293 *
1294 * The function returns the old value of *v minus 1, even if
1295 * the atomic64 variable, v, was not decremented.
1296 */
1297#ifndef atomic64_dec_if_positive
1298static inline long long atomic64_dec_if_positive(atomic64_t *v)
1299{
1300 long long dec, c = atomic64_read(v);
1301
1302 do {
1303 dec = c - 1;
1304 if (unlikely(dec < 0))
1305 break;
1306 } while (!atomic64_try_cmpxchg(v, &c, dec));
1307
1308 return dec;
1076} 1309}
1077#endif 1310#endif
1078 1311