aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/lib/bitops.h
diff options
context:
space:
mode:
authorRussell King <rmk+kernel@arm.linux.org.uk>2011-01-16 12:59:44 -0500
committerRussell King <rmk+kernel@arm.linux.org.uk>2011-02-02 16:21:53 -0500
commita16ede35a2659170c855c5d267776666c0630f1f (patch)
tree67773b1c9aac890cd5838a6d58983f2d8643a1ba /arch/arm/lib/bitops.h
parente5310f611d17ed4e92e0d4e46fd9f6fb40e66df3 (diff)
ARM: bitops: ensure set/clear/change bitops take a word-aligned pointer
Add additional instructions to our assembly bitops functions to ensure that they only operate on word-aligned pointers. This will be necessary when we switch these operations to use the word-based exclusive operations. Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/lib/bitops.h')
-rw-r--r--arch/arm/lib/bitops.h8
1 files changed, 8 insertions, 0 deletions
diff --git a/arch/arm/lib/bitops.h b/arch/arm/lib/bitops.h
index d42252918bf..bd00551fb79 100644
--- a/arch/arm/lib/bitops.h
+++ b/arch/arm/lib/bitops.h
@@ -1,6 +1,8 @@
1 1
2#if __LINUX_ARM_ARCH__ >= 6 && defined(CONFIG_CPU_32v6K) 2#if __LINUX_ARM_ARCH__ >= 6 && defined(CONFIG_CPU_32v6K)
3 .macro bitop, instr 3 .macro bitop, instr
4 ands ip, r1, #3
5 strneb r1, [ip] @ assert word-aligned
4 mov r2, #1 6 mov r2, #1
5 and r3, r0, #7 @ Get bit offset 7 and r3, r0, #7 @ Get bit offset
6 add r1, r1, r0, lsr #3 @ Get byte offset 8 add r1, r1, r0, lsr #3 @ Get byte offset
@@ -14,6 +16,8 @@
14 .endm 16 .endm
15 17
16 .macro testop, instr, store 18 .macro testop, instr, store
19 ands ip, r1, #3
20 strneb r1, [ip] @ assert word-aligned
17 and r3, r0, #7 @ Get bit offset 21 and r3, r0, #7 @ Get bit offset
18 mov r2, #1 22 mov r2, #1
19 add r1, r1, r0, lsr #3 @ Get byte offset 23 add r1, r1, r0, lsr #3 @ Get byte offset
@@ -32,6 +36,8 @@
32 .endm 36 .endm
33#else 37#else
34 .macro bitop, instr 38 .macro bitop, instr
39 ands ip, r1, #3
40 strneb r1, [ip] @ assert word-aligned
35 and r2, r0, #7 41 and r2, r0, #7
36 mov r3, #1 42 mov r3, #1
37 mov r3, r3, lsl r2 43 mov r3, r3, lsl r2
@@ -52,6 +58,8 @@
52 * to avoid dirtying the data cache. 58 * to avoid dirtying the data cache.
53 */ 59 */
54 .macro testop, instr, store 60 .macro testop, instr, store
61 ands ip, r1, #3
62 strneb r1, [ip] @ assert word-aligned
55 add r1, r1, r0, lsr #3 63 add r1, r1, r0, lsr #3
56 and r3, r0, #7 64 and r3, r0, #7
57 mov r0, #1 65 mov r0, #1