aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2015-07-30 14:19:43 -0400
committerWill Deacon <will.deacon@arm.com>2015-07-30 15:16:53 -0400
commita14949e09a228dcd4cc5088c90c375429c7d102c (patch)
treeab11f9350f85e2becd350780b4e70dbc9c61e15b
parentef5e724b25c9f90b7683bb2d45833ebac0989dcb (diff)
arm64: cmpxchg: truncate sub-word signed types before comparison
When performing a cmpxchg operation on a signed sub-word type (e.g. s8), we need to ensure that the upper register bits of the "old" value used for comparison are zeroed, otherwise we may erroneously fail the cmpxchg which may even be interpreted as success by the caller (if the compiler performs the truncation as part of its check). This has been observed in mod_state, where negative values where causing problems with this_cpu_cmpxchg. This patch fixes the issue by explicitly casting 8-bit and 16-bit "old" values using unsigned types in our cmpxchg wrappers. 32-bit types can be left alone, since the underlying asm makes use of W registers in this case. Reported-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
-rw-r--r--arch/arm64/include/asm/cmpxchg.h8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/arm64/include/asm/cmpxchg.h b/arch/arm64/include/asm/cmpxchg.h
index 7bfda0944c9b..899e9f1d19e4 100644
--- a/arch/arm64/include/asm/cmpxchg.h
+++ b/arch/arm64/include/asm/cmpxchg.h
@@ -122,9 +122,9 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
122{ 122{
123 switch (size) { 123 switch (size) {
124 case 1: 124 case 1:
125 return __cmpxchg_case_1(ptr, old, new); 125 return __cmpxchg_case_1(ptr, (u8)old, new);
126 case 2: 126 case 2:
127 return __cmpxchg_case_2(ptr, old, new); 127 return __cmpxchg_case_2(ptr, (u16)old, new);
128 case 4: 128 case 4:
129 return __cmpxchg_case_4(ptr, old, new); 129 return __cmpxchg_case_4(ptr, old, new);
130 case 8: 130 case 8:
@@ -141,9 +141,9 @@ static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
141{ 141{
142 switch (size) { 142 switch (size) {
143 case 1: 143 case 1:
144 return __cmpxchg_case_mb_1(ptr, old, new); 144 return __cmpxchg_case_mb_1(ptr, (u8)old, new);
145 case 2: 145 case 2:
146 return __cmpxchg_case_mb_2(ptr, old, new); 146 return __cmpxchg_case_mb_2(ptr, (u16)old, new);
147 case 4: 147 case 4:
148 return __cmpxchg_case_mb_4(ptr, old, new); 148 return __cmpxchg_case_mb_4(ptr, old, new);
149 case 8: 149 case 8: