aboutsummaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorDavid S. Miller <davem@sunset.davemloft.net>2006-02-02 21:55:10 -0800
committerDavid S. Miller <davem@sunset.davemloft.net>2006-03-20 01:11:35 -0800
commitffe483d55229fadbaf4cc7316d47024a24ecd1a2 (patch)
tree70bdb6c94d5b3512a7b2a3ff06979ac2e4e869bf /arch
parent92704a1c63c3b481870d02636d0b5a70c7e21cd1 (diff)
[SPARC64]: Add explicit register args to trap state loading macros.
This, as well as making the code cleaner, allows a simplification in the TSB miss handling path. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch')
-rw-r--r--arch/sparc64/kernel/entry.S8
-rw-r--r--arch/sparc64/kernel/etrap.S10
-rw-r--r--arch/sparc64/kernel/rtrap.S2
-rw-r--r--arch/sparc64/kernel/tsb.S9
-rw-r--r--arch/sparc64/kernel/winfixup.S18
5 files changed, 20 insertions, 27 deletions
diff --git a/arch/sparc64/kernel/entry.S b/arch/sparc64/kernel/entry.S
index b3511ff5d04..4ca3ea0beaf 100644
--- a/arch/sparc64/kernel/entry.S
+++ b/arch/sparc64/kernel/entry.S
@@ -50,7 +50,7 @@ do_fpdis:
add %g0, %g0, %g0
ba,a,pt %xcc, rtrap_clr_l6
-1: TRAP_LOAD_THREAD_REG
+1: TRAP_LOAD_THREAD_REG(%g6, %g1)
ldub [%g6 + TI_FPSAVED], %g5
wr %g0, FPRS_FEF, %fprs
andcc %g5, FPRS_FEF, %g0
@@ -190,7 +190,7 @@ fp_other_bounce:
.globl do_fpother_check_fitos
.align 32
do_fpother_check_fitos:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
sethi %hi(fp_other_bounce - 4), %g7
or %g7, %lo(fp_other_bounce - 4), %g7
@@ -378,7 +378,7 @@ do_ivec:
sllx %g2, %g4, %g2
sllx %g4, 2, %g4
- TRAP_LOAD_IRQ_WORK
+ TRAP_LOAD_IRQ_WORK(%g6, %g1)
lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */
stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */
@@ -422,7 +422,7 @@ setcc:
.globl utrap_trap
utrap_trap: /* %g3=handler,%g4=level */
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
ldx [%g6 + TI_UTRAPS], %g1
brnz,pt %g1, invoke_utrap
nop
diff --git a/arch/sparc64/kernel/etrap.S b/arch/sparc64/kernel/etrap.S
index d974d18b15b..b5f6bc52d91 100644
--- a/arch/sparc64/kernel/etrap.S
+++ b/arch/sparc64/kernel/etrap.S
@@ -31,7 +31,7 @@
.globl etrap, etrap_irq, etraptl1
etrap: rdpr %pil, %g2
etrap_irq:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
rdpr %tstate, %g1
sllx %g2, 20, %g3
andcc %g1, TSTATE_PRIV, %g0
@@ -100,7 +100,7 @@ etrap_irq:
stx %i7, [%sp + PTREGS_OFF + PT_V9_I7]
wrpr %g0, ETRAP_PSTATE2, %pstate
mov %l6, %g6
- LOAD_PER_CPU_BASE(%g4, %g3, %l1)
+ LOAD_PER_CPU_BASE(%g5, %g6, %g4, %g3, %l1)
jmpl %l2 + 0x4, %g0
ldx [%g6 + TI_TASK], %g4
@@ -124,7 +124,7 @@ etraptl1: /* Save tstate/tpc/tnpc of TL 1-->4 and the tl register itself.
* 0x58 TL4's TT
* 0x60 TL
*/
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
sub %sp, ((4 * 8) * 4) + 8, %g2
rdpr %tl, %g1
@@ -179,7 +179,7 @@ etraptl1: /* Save tstate/tpc/tnpc of TL 1-->4 and the tl register itself.
.align 64
.globl scetrap
scetrap:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
rdpr %pil, %g2
rdpr %tstate, %g1
sllx %g2, 20, %g3
@@ -250,7 +250,7 @@ scetrap:
stx %i6, [%sp + PTREGS_OFF + PT_V9_I6]
mov %l6, %g6
stx %i7, [%sp + PTREGS_OFF + PT_V9_I7]
- LOAD_PER_CPU_BASE(%g4, %g3, %l1)
+ LOAD_PER_CPU_BASE(%g5, %g6, %g4, %g3, %l1)
ldx [%g6 + TI_TASK], %g4
done
diff --git a/arch/sparc64/kernel/rtrap.S b/arch/sparc64/kernel/rtrap.S
index 64bc03610bc..61bd45e7697 100644
--- a/arch/sparc64/kernel/rtrap.S
+++ b/arch/sparc64/kernel/rtrap.S
@@ -226,7 +226,7 @@ rt_continue: ldx [%sp + PTREGS_OFF + PT_V9_G1], %g1
brz,pt %l3, 1f
nop
/* Must do this before thread reg is clobbered below. */
- LOAD_PER_CPU_BASE(%i0, %i1, %i2)
+ LOAD_PER_CPU_BASE(%g5, %g6, %i0, %i1, %i2)
1:
ldx [%sp + PTREGS_OFF + PT_V9_G6], %g6
ldx [%sp + PTREGS_OFF + PT_V9_G7], %g7
diff --git a/arch/sparc64/kernel/tsb.S b/arch/sparc64/kernel/tsb.S
index ff6a79beb98..28e38b168dd 100644
--- a/arch/sparc64/kernel/tsb.S
+++ b/arch/sparc64/kernel/tsb.S
@@ -36,14 +36,7 @@ tsb_miss_itlb:
nop
tsb_miss_page_table_walk:
- /* This clobbers %g1 and %g6, preserve them... */
- mov %g1, %g5
- mov %g6, %g2
-
- TRAP_LOAD_PGD_PHYS
-
- mov %g2, %g6
- mov %g5, %g1
+ TRAP_LOAD_PGD_PHYS(%g7, %g5)
USER_PGTABLE_WALK_TL1(%g4, %g7, %g5, %g2, tsb_do_fault)
diff --git a/arch/sparc64/kernel/winfixup.S b/arch/sparc64/kernel/winfixup.S
index 320a762d051..211021ae6e8 100644
--- a/arch/sparc64/kernel/winfixup.S
+++ b/arch/sparc64/kernel/winfixup.S
@@ -40,7 +40,7 @@ set_pcontext:
*/
.globl fill_fixup, spill_fixup
fill_fixup:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
rdpr %tstate, %g1
andcc %g1, TSTATE_PRIV, %g0
or %g4, FAULT_CODE_WINFIXUP, %g4
@@ -86,7 +86,7 @@ fill_fixup:
wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
mov %o7, %g6
ldx [%g6 + TI_TASK], %g4
- LOAD_PER_CPU_BASE(%g1, %g2, %g3)
+ LOAD_PER_CPU_BASE(%g5, %g6, %g1, %g2, %g3)
/* This is the same as below, except we handle this a bit special
* since we must preserve %l5 and %l6, see comment above.
@@ -105,7 +105,7 @@ fill_fixup:
* do not touch %g7 or %g2 so we handle the two cases fine.
*/
spill_fixup:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
ldx [%g6 + TI_FLAGS], %g1
andcc %g1, _TIF_32BIT, %g0
ldub [%g6 + TI_WSAVED], %g1
@@ -181,7 +181,7 @@ winfix_mna:
wrpr %g3, %tnpc
done
fill_fixup_mna:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
rdpr %tstate, %g1
andcc %g1, TSTATE_PRIV, %g0
be,pt %xcc, window_mna_from_user_common
@@ -209,14 +209,14 @@ fill_fixup_mna:
wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
mov %o7, %g6 ! Get current back.
ldx [%g6 + TI_TASK], %g4 ! Finish it.
- LOAD_PER_CPU_BASE(%g1, %g2, %g3)
+ LOAD_PER_CPU_BASE(%g5, %g6, %g1, %g2, %g3)
call mem_address_unaligned
add %sp, PTREGS_OFF, %o0
b,pt %xcc, rtrap
nop ! yes, the nop is correct
spill_fixup_mna:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
ldx [%g6 + TI_FLAGS], %g1
andcc %g1, _TIF_32BIT, %g0
ldub [%g6 + TI_WSAVED], %g1
@@ -284,7 +284,7 @@ winfix_dax:
wrpr %g3, %tnpc
done
fill_fixup_dax:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
rdpr %tstate, %g1
andcc %g1, TSTATE_PRIV, %g0
be,pt %xcc, window_dax_from_user_common
@@ -312,14 +312,14 @@ fill_fixup_dax:
wrpr %l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
mov %o7, %g6 ! Get current back.
ldx [%g6 + TI_TASK], %g4 ! Finish it.
- LOAD_PER_CPU_BASE(%g1, %g2, %g3)
+ LOAD_PER_CPU_BASE(%g5, %g6, %g1, %g2, %g3)
call spitfire_data_access_exception
add %sp, PTREGS_OFF, %o0
b,pt %xcc, rtrap
nop ! yes, the nop is correct
spill_fixup_dax:
- TRAP_LOAD_THREAD_REG
+ TRAP_LOAD_THREAD_REG(%g6, %g1)
ldx [%g6 + TI_FLAGS], %g1
andcc %g1, _TIF_32BIT, %g0
ldub [%g6 + TI_WSAVED], %g1