aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/lib/atomic.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc/lib/atomic.S')
-rw-r--r--arch/sparc/lib/atomic.S100
1 files changed, 100 insertions, 0 deletions
diff --git a/arch/sparc/lib/atomic.S b/arch/sparc/lib/atomic.S
new file mode 100644
index 000000000000..f48ad0c4dadb
--- /dev/null
+++ b/arch/sparc/lib/atomic.S
@@ -0,0 +1,100 @@
1/* atomic.S: Move this stuff here for better ICACHE hit rates.
2 *
3 * Copyright (C) 1996 David S. Miller (davem@caipfs.rutgers.edu)
4 */
5
6#include <linux/config.h>
7#include <asm/ptrace.h>
8#include <asm/psr.h>
9
10 .text
11 .align 4
12
13 .globl __atomic_begin
14__atomic_begin:
15
16#ifndef CONFIG_SMP
17 .globl ___xchg32_sun4c
18___xchg32_sun4c:
19 rd %psr, %g3
20 andcc %g3, PSR_PIL, %g0
21 bne 1f
22 nop
23 wr %g3, PSR_PIL, %psr
24 nop; nop; nop
251:
26 andcc %g3, PSR_PIL, %g0
27 ld [%g1], %g7
28 bne 1f
29 st %g2, [%g1]
30 wr %g3, 0x0, %psr
31 nop; nop; nop
321:
33 mov %g7, %g2
34 jmpl %o7 + 8, %g0
35 mov %g4, %o7
36
37 .globl ___xchg32_sun4md
38___xchg32_sun4md:
39 swap [%g1], %g2
40 jmpl %o7 + 8, %g0
41 mov %g4, %o7
42#endif
43
44 /* Read asm-sparc/atomic.h carefully to understand how this works for SMP.
45 * Really, some things here for SMP are overly clever, go read the header.
46 */
47 .globl ___atomic24_add
48___atomic24_add:
49 rd %psr, %g3 ! Keep the code small, old way was stupid
50 nop; nop; nop; ! Let the bits set
51 or %g3, PSR_PIL, %g7 ! Disable interrupts
52 wr %g7, 0x0, %psr ! Set %psr
53 nop; nop; nop; ! Let the bits set
54#ifdef CONFIG_SMP
551: ldstub [%g1 + 3], %g7 ! Spin on the byte lock for SMP.
56 orcc %g7, 0x0, %g0 ! Did we get it?
57 bne 1b ! Nope...
58 ld [%g1], %g7 ! Load locked atomic24_t
59 sra %g7, 8, %g7 ! Get signed 24-bit integer
60 add %g7, %g2, %g2 ! Add in argument
61 sll %g2, 8, %g7 ! Transpose back to atomic24_t
62 st %g7, [%g1] ! Clever: This releases the lock as well.
63#else
64 ld [%g1], %g7 ! Load locked atomic24_t
65 add %g7, %g2, %g2 ! Add in argument
66 st %g2, [%g1] ! Store it back
67#endif
68 wr %g3, 0x0, %psr ! Restore original PSR_PIL
69 nop; nop; nop; ! Let the bits set
70 jmpl %o7, %g0 ! NOTE: not + 8, see callers in atomic.h
71 mov %g4, %o7 ! Restore %o7
72
73 .globl ___atomic24_sub
74___atomic24_sub:
75 rd %psr, %g3 ! Keep the code small, old way was stupid
76 nop; nop; nop; ! Let the bits set
77 or %g3, PSR_PIL, %g7 ! Disable interrupts
78 wr %g7, 0x0, %psr ! Set %psr
79 nop; nop; nop; ! Let the bits set
80#ifdef CONFIG_SMP
811: ldstub [%g1 + 3], %g7 ! Spin on the byte lock for SMP.
82 orcc %g7, 0x0, %g0 ! Did we get it?
83 bne 1b ! Nope...
84 ld [%g1], %g7 ! Load locked atomic24_t
85 sra %g7, 8, %g7 ! Get signed 24-bit integer
86 sub %g7, %g2, %g2 ! Subtract argument
87 sll %g2, 8, %g7 ! Transpose back to atomic24_t
88 st %g7, [%g1] ! Clever: This releases the lock as well
89#else
90 ld [%g1], %g7 ! Load locked atomic24_t
91 sub %g7, %g2, %g2 ! Subtract argument
92 st %g2, [%g1] ! Store it back
93#endif
94 wr %g3, 0x0, %psr ! Restore original PSR_PIL
95 nop; nop; nop; ! Let the bits set
96 jmpl %o7, %g0 ! NOTE: not + 8, see callers in atomic.h
97 mov %g4, %o7 ! Restore %o7
98
99 .globl __atomic_end
100__atomic_end: