diff options
author | David Daney <ddaney@caviumnetworks.com> | 2010-02-10 18:12:44 -0500 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2010-02-27 06:53:25 -0500 |
commit | 9b8c38917b8e083a6343bb5a0c6bbaea78ebff7a (patch) | |
tree | 7b6a8513c3335f005e6a58b06f53cf179eabeb21 /arch/mips | |
parent | 52d7ecd033316b0540a6ac4af70574fae4aba295 (diff) |
MIPS: Use 64-bit stores to c0_entrylo on 64-bit kernels.
64-bit CPUs have 64-bit c0_entrylo{0,1} registers. We should use the
64-bit dmtc0 instruction to set them. This becomes important if we
want to set the RI and XI bits present in some processors.
Signed-off-by: David Daney <ddaney@caviumnetworks.com>
To: linux-mips@linux-mips.org
Patchwork: http://patchwork.linux-mips.org/patch/954/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips')
-rw-r--r-- | arch/mips/mm/tlbex.c | 20 |
1 files changed, 10 insertions, 10 deletions
diff --git a/arch/mips/mm/tlbex.c b/arch/mips/mm/tlbex.c index bcf30026d1f6..4a2907c59569 100644 --- a/arch/mips/mm/tlbex.c +++ b/arch/mips/mm/tlbex.c | |||
@@ -460,14 +460,14 @@ static __cpuinit void build_huge_update_entries(u32 **p, | |||
460 | uasm_i_lui(p, tmp, HPAGE_SIZE >> (7 + 16)); | 460 | uasm_i_lui(p, tmp, HPAGE_SIZE >> (7 + 16)); |
461 | 461 | ||
462 | UASM_i_SRL(p, pte, pte, 6); /* convert to entrylo */ | 462 | UASM_i_SRL(p, pte, pte, 6); /* convert to entrylo */ |
463 | uasm_i_mtc0(p, pte, C0_ENTRYLO0); /* load it */ | 463 | UASM_i_MTC0(p, pte, C0_ENTRYLO0); /* load it */ |
464 | /* convert to entrylo1 */ | 464 | /* convert to entrylo1 */ |
465 | if (small_sequence) | 465 | if (small_sequence) |
466 | UASM_i_ADDIU(p, pte, pte, HPAGE_SIZE >> 7); | 466 | UASM_i_ADDIU(p, pte, pte, HPAGE_SIZE >> 7); |
467 | else | 467 | else |
468 | UASM_i_ADDU(p, pte, pte, tmp); | 468 | UASM_i_ADDU(p, pte, pte, tmp); |
469 | 469 | ||
470 | uasm_i_mtc0(p, pte, C0_ENTRYLO1); /* load it */ | 470 | UASM_i_MTC0(p, pte, C0_ENTRYLO1); /* load it */ |
471 | } | 471 | } |
472 | 472 | ||
473 | static __cpuinit void build_huge_handler_tail(u32 **p, | 473 | static __cpuinit void build_huge_handler_tail(u32 **p, |
@@ -686,18 +686,18 @@ static void __cpuinit build_update_entries(u32 **p, unsigned int tmp, | |||
686 | uasm_i_ld(p, tmp, 0, ptep); /* get even pte */ | 686 | uasm_i_ld(p, tmp, 0, ptep); /* get even pte */ |
687 | uasm_i_ld(p, ptep, sizeof(pte_t), ptep); /* get odd pte */ | 687 | uasm_i_ld(p, ptep, sizeof(pte_t), ptep); /* get odd pte */ |
688 | uasm_i_dsrl(p, tmp, tmp, 6); /* convert to entrylo0 */ | 688 | uasm_i_dsrl(p, tmp, tmp, 6); /* convert to entrylo0 */ |
689 | uasm_i_mtc0(p, tmp, C0_ENTRYLO0); /* load it */ | 689 | UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */ |
690 | uasm_i_dsrl(p, ptep, ptep, 6); /* convert to entrylo1 */ | 690 | uasm_i_dsrl(p, ptep, ptep, 6); /* convert to entrylo1 */ |
691 | uasm_i_mtc0(p, ptep, C0_ENTRYLO1); /* load it */ | 691 | UASM_i_MTC0(p, ptep, C0_ENTRYLO1); /* load it */ |
692 | } else { | 692 | } else { |
693 | int pte_off_even = sizeof(pte_t) / 2; | 693 | int pte_off_even = sizeof(pte_t) / 2; |
694 | int pte_off_odd = pte_off_even + sizeof(pte_t); | 694 | int pte_off_odd = pte_off_even + sizeof(pte_t); |
695 | 695 | ||
696 | /* The pte entries are pre-shifted */ | 696 | /* The pte entries are pre-shifted */ |
697 | uasm_i_lw(p, tmp, pte_off_even, ptep); /* get even pte */ | 697 | uasm_i_lw(p, tmp, pte_off_even, ptep); /* get even pte */ |
698 | uasm_i_mtc0(p, tmp, C0_ENTRYLO0); /* load it */ | 698 | UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */ |
699 | uasm_i_lw(p, ptep, pte_off_odd, ptep); /* get odd pte */ | 699 | uasm_i_lw(p, ptep, pte_off_odd, ptep); /* get odd pte */ |
700 | uasm_i_mtc0(p, ptep, C0_ENTRYLO1); /* load it */ | 700 | UASM_i_MTC0(p, ptep, C0_ENTRYLO1); /* load it */ |
701 | } | 701 | } |
702 | #else | 702 | #else |
703 | UASM_i_LW(p, tmp, 0, ptep); /* get even pte */ | 703 | UASM_i_LW(p, tmp, 0, ptep); /* get even pte */ |
@@ -706,14 +706,14 @@ static void __cpuinit build_update_entries(u32 **p, unsigned int tmp, | |||
706 | build_tlb_probe_entry(p); | 706 | build_tlb_probe_entry(p); |
707 | UASM_i_SRL(p, tmp, tmp, 6); /* convert to entrylo0 */ | 707 | UASM_i_SRL(p, tmp, tmp, 6); /* convert to entrylo0 */ |
708 | if (r4k_250MHZhwbug()) | 708 | if (r4k_250MHZhwbug()) |
709 | uasm_i_mtc0(p, 0, C0_ENTRYLO0); | 709 | UASM_i_MTC0(p, 0, C0_ENTRYLO0); |
710 | uasm_i_mtc0(p, tmp, C0_ENTRYLO0); /* load it */ | 710 | UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */ |
711 | UASM_i_SRL(p, ptep, ptep, 6); /* convert to entrylo1 */ | 711 | UASM_i_SRL(p, ptep, ptep, 6); /* convert to entrylo1 */ |
712 | if (r45k_bvahwbug()) | 712 | if (r45k_bvahwbug()) |
713 | uasm_i_mfc0(p, tmp, C0_INDEX); | 713 | uasm_i_mfc0(p, tmp, C0_INDEX); |
714 | if (r4k_250MHZhwbug()) | 714 | if (r4k_250MHZhwbug()) |
715 | uasm_i_mtc0(p, 0, C0_ENTRYLO1); | 715 | UASM_i_MTC0(p, 0, C0_ENTRYLO1); |
716 | uasm_i_mtc0(p, ptep, C0_ENTRYLO1); /* load it */ | 716 | UASM_i_MTC0(p, ptep, C0_ENTRYLO1); /* load it */ |
717 | #endif | 717 | #endif |
718 | } | 718 | } |
719 | 719 | ||