summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorMaciej W. Rozycki <macro@linux-mips.org>2006-08-23 14:26:50 +0100
committerRalf Baechle <ralf@linux-mips.org>2007-09-14 19:08:43 +0100
commit8df5beac2aa15b18a912ab585e1b86e748eda9ad (patch)
tree9c1d6621a58ed7804cd68da2f5d8bbd6dd30de7b /arch
parent48d480b0bde794781fcae9501fb043c1bac0e523 (diff)
[MIPS] Workaround for 4Kc machine check exception
Signed-off-by: Maciej W. Rozycki <macro@linux-mips.org> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/mips/mm/tlbex.c26
1 files changed, 25 insertions, 1 deletions
diff --git a/arch/mips/mm/tlbex.c b/arch/mips/mm/tlbex.c
index 9cb39644b6f1..6c425b052442 100644
--- a/arch/mips/mm/tlbex.c
+++ b/arch/mips/mm/tlbex.c
@@ -58,6 +58,21 @@ static __init int __maybe_unused r10000_llsc_war(void)
}
/*
+ * Found by experiment: At least some revisions of the 4kc throw under
+ * some circumstances a machine check exception, triggered by invalid
+ * values in the index register. Delaying the tlbp instruction until
+ * after the next branch, plus adding an additional nop in front of
+ * tlbwi/tlbwr avoids the invalid index register values. Nobody knows
+ * why; it's not an issue caused by the core RTL.
+ *
+ */
+static __init int __attribute__((unused)) m4kc_tlbp_war(void)
+{
+ return (current_cpu_data.processor_id & 0xffff00) ==
+ (PRID_COMP_MIPS | PRID_IMP_4KC);
+}
+
+/*
* A little micro-assembler, intended for TLB refill handler
* synthesizing. It is intentionally kept simple, does only support
* a subset of instructions, and does not try to hide pipeline effects
@@ -894,6 +909,8 @@ static __init void build_tlb_write_entry(u32 **p, struct label **l,
case CPU_20KC:
case CPU_25KF:
case CPU_LOONGSON2:
+ if (m4kc_tlbp_war())
+ i_nop(p);
tlbw(p);
break;
@@ -1705,7 +1722,8 @@ build_r4000_tlbchange_handler_head(u32 **p, struct label **l,
l_smp_pgtable_change(l, *p);
# endif
iPTE_LW(p, l, pte, ptr); /* get even pte */
- build_tlb_probe_entry(p);
+ if (!m4kc_tlbp_war())
+ build_tlb_probe_entry(p);
}
static void __init
@@ -1747,6 +1765,8 @@ static void __init build_r4000_tlb_load_handler(void)
build_r4000_tlbchange_handler_head(&p, &l, &r, K0, K1);
build_pte_present(&p, &l, &r, K0, K1, label_nopage_tlbl);
+ if (m4kc_tlbp_war())
+ build_tlb_probe_entry(&p);
build_make_valid(&p, &r, K0, K1);
build_r4000_tlbchange_handler_tail(&p, &l, &r, K0, K1);
@@ -1781,6 +1801,8 @@ static void __init build_r4000_tlb_store_handler(void)
build_r4000_tlbchange_handler_head(&p, &l, &r, K0, K1);
build_pte_writable(&p, &l, &r, K0, K1, label_nopage_tlbs);
+ if (m4kc_tlbp_war())
+ build_tlb_probe_entry(&p);
build_make_write(&p, &r, K0, K1);
build_r4000_tlbchange_handler_tail(&p, &l, &r, K0, K1);
@@ -1815,6 +1837,8 @@ static void __init build_r4000_tlb_modify_handler(void)
build_r4000_tlbchange_handler_head(&p, &l, &r, K0, K1);
build_pte_modifiable(&p, &l, &r, K0, K1, label_nopage_tlbm);
+ if (m4kc_tlbp_war())
+ build_tlb_probe_entry(&p);
/* Present and writable bits set, set accessed and dirty bits. */
build_make_write(&p, &r, K0, K1);
build_r4000_tlbchange_handler_tail(&p, &l, &r, K0, K1);