ARC: MMUv4 preps/2 - Reshuffle PTE bits
With previous commit freeing up PTE bits, reassign them so as to:
- Match the bit to H/w counterpart where possible
(e.g. MMUv2 GLOBAL/PRESENT, this avoids a shift in create_tlb())
- Avoid holes in _PAGE_xxx definitions
Signed-off-by: Vineet Gupta <vgupta@synopsys.com>
diff --git a/arch/arc/mm/tlb.c b/arch/arc/mm/tlb.c
index f990834..85a8716 100644
--- a/arch/arc/mm/tlb.c
+++ b/arch/arc/mm/tlb.c
@@ -342,7 +342,6 @@
{
unsigned long flags;
unsigned int idx, asid_or_sasid, rwx;
- unsigned long pd0_flags;
/*
* create_tlb() assumes that current->mm == vma->mm, since
@@ -381,17 +380,13 @@
/* update this PTE credentials */
pte_val(*ptep) |= (_PAGE_PRESENT | _PAGE_ACCESSED);
- /* Create HW TLB entry Flags (in PD0) from PTE Flags */
-#if (CONFIG_ARC_MMU_VER <= 2)
- pd0_flags = ((pte_val(*ptep) & PTE_BITS_IN_PD0) >> 1);
-#else
- pd0_flags = ((pte_val(*ptep) & PTE_BITS_IN_PD0));
-#endif
+ /* Create HW TLB(PD0,PD1) from PTE */
/* ASID for this task */
asid_or_sasid = read_aux_reg(ARC_REG_PID) & 0xff;
- write_aux_reg(ARC_REG_TLBPD0, address | pd0_flags | asid_or_sasid);
+ write_aux_reg(ARC_REG_TLBPD0, address | asid_or_sasid |
+ (pte_val(*ptep) & PTE_BITS_IN_PD0));
/*
* ARC MMU provides fully orthogonal access bits for K/U mode,
diff --git a/arch/arc/mm/tlbex.S b/arch/arc/mm/tlbex.S
index ec382e5..50e83ca 100644
--- a/arch/arc/mm/tlbex.S
+++ b/arch/arc/mm/tlbex.S
@@ -229,9 +229,6 @@
sr r3, [ARC_REG_TLBPD1] ; these go in PD1
and r2, r0, PTE_BITS_IN_PD0 ; Extract other PTE flags: (V)alid, (G)lb
-#if (CONFIG_ARC_MMU_VER <= 2) /* Neednot be done with v3 onwards */
- lsr r2, r2 ; shift PTE flags to match layout in PD0
-#endif
lr r3,[ARC_REG_TLBPD0] ; MMU prepares PD0 with vaddr and asid