aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/include
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2012-10-28 02:00:41 -0400
committerDavid S. Miller <davem@davemloft.net>2012-10-28 02:00:41 -0400
commite9b9eb59ffcdee09ec96b040f85c919618f4043e (patch)
tree30f93cc20aa577ec5b12f609641fdf84d0bd5124 /arch/sparc/include
parent270c10e00a1e557e068803a22e0556281ceb1830 (diff)
sparc64: Use pause instruction when available.
In atomic backoff and cpu_relax(), use the pause instruction found on SPARC-T4 and later. It makes the cpu strand unselectable for the given number of cycles, unless an intervening disrupting trap occurs. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/include')
-rw-r--r--arch/sparc/include/asm/backoff.h32
-rw-r--r--arch/sparc/include/asm/processor_64.h13
2 files changed, 29 insertions, 16 deletions
diff --git a/arch/sparc/include/asm/backoff.h b/arch/sparc/include/asm/backoff.h
index 64b077b3b13b..20f01df0871b 100644
--- a/arch/sparc/include/asm/backoff.h
+++ b/arch/sparc/include/asm/backoff.h
@@ -11,19 +11,25 @@
11#define BACKOFF_LABEL(spin_label, continue_label) \ 11#define BACKOFF_LABEL(spin_label, continue_label) \
12 spin_label 12 spin_label
13 13
14#define BACKOFF_SPIN(reg, tmp, label) \ 14#define BACKOFF_SPIN(reg, tmp, label) \
15 mov reg, tmp; \ 15 mov reg, tmp; \
1688: rd %ccr, %g0; \ 1688: rd %ccr, %g0; \
17 rd %ccr, %g0; \ 17 rd %ccr, %g0; \
18 rd %ccr, %g0; \ 18 rd %ccr, %g0; \
19 brnz,pt tmp, 88b; \ 19 .section .pause_patch,"ax"; \
20 sub tmp, 1, tmp; \ 20 .word 88b; \
21 set BACKOFF_LIMIT, tmp; \ 21 sllx tmp, 7, tmp; \
22 cmp reg, tmp; \ 22 wr tmp, 0, %asr27; \
23 bg,pn %xcc, label; \ 23 clr tmp; \
24 nop; \ 24 .previous; \
25 ba,pt %xcc, label; \ 25 brnz,pt tmp, 88b; \
26 sllx reg, 1, reg; 26 sub tmp, 1, tmp; \
27 set BACKOFF_LIMIT, tmp; \
28 cmp reg, tmp; \
29 bg,pn %xcc, label; \
30 nop; \
31 ba,pt %xcc, label; \
32 sllx reg, 1, reg;
27 33
28#else 34#else
29 35
diff --git a/arch/sparc/include/asm/processor_64.h b/arch/sparc/include/asm/processor_64.h
index 986563409469..9cdf52eec48a 100644
--- a/arch/sparc/include/asm/processor_64.h
+++ b/arch/sparc/include/asm/processor_64.h
@@ -196,9 +196,16 @@ extern unsigned long get_wchan(struct task_struct *task);
196#define KSTK_EIP(tsk) (task_pt_regs(tsk)->tpc) 196#define KSTK_EIP(tsk) (task_pt_regs(tsk)->tpc)
197#define KSTK_ESP(tsk) (task_pt_regs(tsk)->u_regs[UREG_FP]) 197#define KSTK_ESP(tsk) (task_pt_regs(tsk)->u_regs[UREG_FP])
198 198
199#define cpu_relax() asm volatile("rd %%ccr, %%g0\n\t" \ 199#define cpu_relax() asm volatile("\n99:\n\t" \
200 "rd %%ccr, %%g0\n\t" \ 200 "rd %%ccr, %%g0\n\t" \
201 "rd %%ccr, %%g0" \ 201 "rd %%ccr, %%g0\n\t" \
202 "rd %%ccr, %%g0\n\t" \
203 ".section .pause_patch,\"ax\"\n\t"\
204 ".word 99b\n\t" \
205 "wr %%g0, 128, %%asr27\n\t" \
206 "nop\n\t" \
207 "nop\n\t" \
208 ".previous" \
202 ::: "memory") 209 ::: "memory")
203 210
204/* Prefetch support. This is tuned for UltraSPARC-III and later. 211/* Prefetch support. This is tuned for UltraSPARC-III and later.