diff options
Diffstat (limited to 'arch/x86/include/asm/refcount.h')
-rw-r--r-- | arch/x86/include/asm/refcount.h | 79 |
1 files changed, 48 insertions, 31 deletions
diff --git a/arch/x86/include/asm/refcount.h b/arch/x86/include/asm/refcount.h index 19b90521954c..a8b5e1e13319 100644 --- a/arch/x86/include/asm/refcount.h +++ b/arch/x86/include/asm/refcount.h | |||
@@ -4,6 +4,41 @@ | |||
4 | * x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from | 4 | * x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from |
5 | * PaX/grsecurity. | 5 | * PaX/grsecurity. |
6 | */ | 6 | */ |
7 | |||
8 | #ifdef __ASSEMBLY__ | ||
9 | |||
10 | #include <asm/asm.h> | ||
11 | #include <asm/bug.h> | ||
12 | |||
13 | .macro REFCOUNT_EXCEPTION counter:req | ||
14 | .pushsection .text..refcount | ||
15 | 111: lea \counter, %_ASM_CX | ||
16 | 112: ud2 | ||
17 | ASM_UNREACHABLE | ||
18 | .popsection | ||
19 | 113: _ASM_EXTABLE_REFCOUNT(112b, 113b) | ||
20 | .endm | ||
21 | |||
22 | /* Trigger refcount exception if refcount result is negative. */ | ||
23 | .macro REFCOUNT_CHECK_LT_ZERO counter:req | ||
24 | js 111f | ||
25 | REFCOUNT_EXCEPTION counter="\counter" | ||
26 | .endm | ||
27 | |||
28 | /* Trigger refcount exception if refcount result is zero or negative. */ | ||
29 | .macro REFCOUNT_CHECK_LE_ZERO counter:req | ||
30 | jz 111f | ||
31 | REFCOUNT_CHECK_LT_ZERO counter="\counter" | ||
32 | .endm | ||
33 | |||
34 | /* Trigger refcount exception unconditionally. */ | ||
35 | .macro REFCOUNT_ERROR counter:req | ||
36 | jmp 111f | ||
37 | REFCOUNT_EXCEPTION counter="\counter" | ||
38 | .endm | ||
39 | |||
40 | #else /* __ASSEMBLY__ */ | ||
41 | |||
7 | #include <linux/refcount.h> | 42 | #include <linux/refcount.h> |
8 | #include <asm/bug.h> | 43 | #include <asm/bug.h> |
9 | 44 | ||
@@ -15,34 +50,11 @@ | |||
15 | * central refcount exception. The fixup address for the exception points | 50 | * central refcount exception. The fixup address for the exception points |
16 | * back to the regular execution flow in .text. | 51 | * back to the regular execution flow in .text. |
17 | */ | 52 | */ |
18 | #define _REFCOUNT_EXCEPTION \ | ||
19 | ".pushsection .text..refcount\n" \ | ||
20 | "111:\tlea %[counter], %%" _ASM_CX "\n" \ | ||
21 | "112:\t" ASM_UD2 "\n" \ | ||
22 | ASM_UNREACHABLE \ | ||
23 | ".popsection\n" \ | ||
24 | "113:\n" \ | ||
25 | _ASM_EXTABLE_REFCOUNT(112b, 113b) | ||
26 | |||
27 | /* Trigger refcount exception if refcount result is negative. */ | ||
28 | #define REFCOUNT_CHECK_LT_ZERO \ | ||
29 | "js 111f\n\t" \ | ||
30 | _REFCOUNT_EXCEPTION | ||
31 | |||
32 | /* Trigger refcount exception if refcount result is zero or negative. */ | ||
33 | #define REFCOUNT_CHECK_LE_ZERO \ | ||
34 | "jz 111f\n\t" \ | ||
35 | REFCOUNT_CHECK_LT_ZERO | ||
36 | |||
37 | /* Trigger refcount exception unconditionally. */ | ||
38 | #define REFCOUNT_ERROR \ | ||
39 | "jmp 111f\n\t" \ | ||
40 | _REFCOUNT_EXCEPTION | ||
41 | 53 | ||
42 | static __always_inline void refcount_add(unsigned int i, refcount_t *r) | 54 | static __always_inline void refcount_add(unsigned int i, refcount_t *r) |
43 | { | 55 | { |
44 | asm volatile(LOCK_PREFIX "addl %1,%0\n\t" | 56 | asm volatile(LOCK_PREFIX "addl %1,%0\n\t" |
45 | REFCOUNT_CHECK_LT_ZERO | 57 | "REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\"" |
46 | : [counter] "+m" (r->refs.counter) | 58 | : [counter] "+m" (r->refs.counter) |
47 | : "ir" (i) | 59 | : "ir" (i) |
48 | : "cc", "cx"); | 60 | : "cc", "cx"); |
@@ -51,7 +63,7 @@ static __always_inline void refcount_add(unsigned int i, refcount_t *r) | |||
51 | static __always_inline void refcount_inc(refcount_t *r) | 63 | static __always_inline void refcount_inc(refcount_t *r) |
52 | { | 64 | { |
53 | asm volatile(LOCK_PREFIX "incl %0\n\t" | 65 | asm volatile(LOCK_PREFIX "incl %0\n\t" |
54 | REFCOUNT_CHECK_LT_ZERO | 66 | "REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\"" |
55 | : [counter] "+m" (r->refs.counter) | 67 | : [counter] "+m" (r->refs.counter) |
56 | : : "cc", "cx"); | 68 | : : "cc", "cx"); |
57 | } | 69 | } |
@@ -59,7 +71,7 @@ static __always_inline void refcount_inc(refcount_t *r) | |||
59 | static __always_inline void refcount_dec(refcount_t *r) | 71 | static __always_inline void refcount_dec(refcount_t *r) |
60 | { | 72 | { |
61 | asm volatile(LOCK_PREFIX "decl %0\n\t" | 73 | asm volatile(LOCK_PREFIX "decl %0\n\t" |
62 | REFCOUNT_CHECK_LE_ZERO | 74 | "REFCOUNT_CHECK_LE_ZERO counter=\"%[counter]\"" |
63 | : [counter] "+m" (r->refs.counter) | 75 | : [counter] "+m" (r->refs.counter) |
64 | : : "cc", "cx"); | 76 | : : "cc", "cx"); |
65 | } | 77 | } |
@@ -67,14 +79,17 @@ static __always_inline void refcount_dec(refcount_t *r) | |||
67 | static __always_inline __must_check | 79 | static __always_inline __must_check |
68 | bool refcount_sub_and_test(unsigned int i, refcount_t *r) | 80 | bool refcount_sub_and_test(unsigned int i, refcount_t *r) |
69 | { | 81 | { |
70 | GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl", REFCOUNT_CHECK_LT_ZERO, | 82 | |
71 | r->refs.counter, "er", i, "%0", e, "cx"); | 83 | return GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl", |
84 | "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"", | ||
85 | r->refs.counter, e, "er", i, "cx"); | ||
72 | } | 86 | } |
73 | 87 | ||
74 | static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r) | 88 | static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r) |
75 | { | 89 | { |
76 | GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl", REFCOUNT_CHECK_LT_ZERO, | 90 | return GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl", |
77 | r->refs.counter, "%0", e, "cx"); | 91 | "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"", |
92 | r->refs.counter, e, "cx"); | ||
78 | } | 93 | } |
79 | 94 | ||
80 | static __always_inline __must_check | 95 | static __always_inline __must_check |
@@ -91,7 +106,7 @@ bool refcount_add_not_zero(unsigned int i, refcount_t *r) | |||
91 | 106 | ||
92 | /* Did we try to increment from/to an undesirable state? */ | 107 | /* Did we try to increment from/to an undesirable state? */ |
93 | if (unlikely(c < 0 || c == INT_MAX || result < c)) { | 108 | if (unlikely(c < 0 || c == INT_MAX || result < c)) { |
94 | asm volatile(REFCOUNT_ERROR | 109 | asm volatile("REFCOUNT_ERROR counter=\"%[counter]\"" |
95 | : : [counter] "m" (r->refs.counter) | 110 | : : [counter] "m" (r->refs.counter) |
96 | : "cc", "cx"); | 111 | : "cc", "cx"); |
97 | break; | 112 | break; |
@@ -107,4 +122,6 @@ static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r) | |||
107 | return refcount_add_not_zero(1, r); | 122 | return refcount_add_not_zero(1, r); |
108 | } | 123 | } |
109 | 124 | ||
125 | #endif /* __ASSEMBLY__ */ | ||
126 | |||
110 | #endif | 127 | #endif |