summaryrefslogtreecommitdiff
path: root/arch/sparc/include/asm
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2012-10-27 23:00:41 -0700
committerDavid S. Miller <davem@davemloft.net>2012-10-27 23:00:41 -0700
commite9b9eb59ffcdee09ec96b040f85c919618f4043e (patch)
tree30f93cc20aa577ec5b12f609641fdf84d0bd5124 /arch/sparc/include/asm
parent270c10e00a1e557e068803a22e0556281ceb1830 (diff)
sparc64: Use pause instruction when available.
In atomic backoff and cpu_relax(), use the pause instruction found on SPARC-T4 and later. It makes the cpu strand unselectable for the given number of cycles, unless an intervening disrupting trap occurs. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/include/asm')
-rw-r--r--arch/sparc/include/asm/backoff.h32
-rw-r--r--arch/sparc/include/asm/processor_64.h13
2 files changed, 29 insertions, 16 deletions
diff --git a/arch/sparc/include/asm/backoff.h b/arch/sparc/include/asm/backoff.h
index 64b077b3b13b..20f01df0871b 100644
--- a/arch/sparc/include/asm/backoff.h
+++ b/arch/sparc/include/asm/backoff.h
@@ -11,19 +11,25 @@
#define BACKOFF_LABEL(spin_label, continue_label) \
spin_label
-#define BACKOFF_SPIN(reg, tmp, label) \
- mov reg, tmp; \
-88: rd %ccr, %g0; \
- rd %ccr, %g0; \
- rd %ccr, %g0; \
- brnz,pt tmp, 88b; \
- sub tmp, 1, tmp; \
- set BACKOFF_LIMIT, tmp; \
- cmp reg, tmp; \
- bg,pn %xcc, label; \
- nop; \
- ba,pt %xcc, label; \
- sllx reg, 1, reg;
+#define BACKOFF_SPIN(reg, tmp, label) \
+ mov reg, tmp; \
+88: rd %ccr, %g0; \
+ rd %ccr, %g0; \
+ rd %ccr, %g0; \
+ .section .pause_patch,"ax"; \
+ .word 88b; \
+ sllx tmp, 7, tmp; \
+ wr tmp, 0, %asr27; \
+ clr tmp; \
+ .previous; \
+ brnz,pt tmp, 88b; \
+ sub tmp, 1, tmp; \
+ set BACKOFF_LIMIT, tmp; \
+ cmp reg, tmp; \
+ bg,pn %xcc, label; \
+ nop; \
+ ba,pt %xcc, label; \
+ sllx reg, 1, reg;
#else
diff --git a/arch/sparc/include/asm/processor_64.h b/arch/sparc/include/asm/processor_64.h
index 986563409469..9cdf52eec48a 100644
--- a/arch/sparc/include/asm/processor_64.h
+++ b/arch/sparc/include/asm/processor_64.h
@@ -196,9 +196,16 @@ extern unsigned long get_wchan(struct task_struct *task);
#define KSTK_EIP(tsk) (task_pt_regs(tsk)->tpc)
#define KSTK_ESP(tsk) (task_pt_regs(tsk)->u_regs[UREG_FP])
-#define cpu_relax() asm volatile("rd %%ccr, %%g0\n\t" \
- "rd %%ccr, %%g0\n\t" \
- "rd %%ccr, %%g0" \
+#define cpu_relax() asm volatile("\n99:\n\t" \
+ "rd %%ccr, %%g0\n\t" \
+ "rd %%ccr, %%g0\n\t" \
+ "rd %%ccr, %%g0\n\t" \
+ ".section .pause_patch,\"ax\"\n\t"\
+ ".word 99b\n\t" \
+ "wr %%g0, 128, %%asr27\n\t" \
+ "nop\n\t" \
+ "nop\n\t" \
+ ".previous" \
::: "memory")
/* Prefetch support. This is tuned for UltraSPARC-III and later.