diff mbox series

[v3] parisc: Remove PTE load and fault check from L2_ptep macro

Message ID 20180929203459.GA6371@mx3210.localdomain (mailing list archive)
State Accepted, archived
Headers show
Series [v3] parisc: Remove PTE load and fault check from L2_ptep macro | expand

Commit Message

John David Anglin Sept. 29, 2018, 8:34 p.m. UTC
This change removes the PTE load and present check from the L2_ptep macro.
The load and check for kernel pages is now done in the tlb_lock macro.
This avoids a double load and check for user pages.  The load and check
for user pages is now done inside the lock so the fault handler can't be
called while the entry is being updated.  This version uses an ordered
store to release the lock when the page table entry isn't present.  It
also corrects the check in the non SMP case.


Signed-off-by: John David Anglin <dave.anglin@bell.net>
diff mbox series

Patch

diff --git a/arch/parisc/kernel/entry.S b/arch/parisc/kernel/entry.S
index 242c5ab65611..e5977187dee7 100644
--- a/arch/parisc/kernel/entry.S
+++ b/arch/parisc/kernel/entry.S
@@ -431,8 +431,6 @@ 
 	extru		\va,31-PAGE_SHIFT,ASM_BITS_PER_PTE,\index
 	dep		%r0,31,PAGE_SHIFT,\pmd  /* clear offset */
 	shladd		\index,BITS_PER_PTE_ENTRY,\pmd,\pmd /* pmd is now pte */
-	LDREG		%r0(\pmd),\pte
-	bb,>=,n		\pte,_PAGE_PRESENT_BIT,\fault
 	.endm
 
 	/* Look up PTE in a 3-Level scheme.
@@ -463,7 +461,7 @@ 
 	L2_ptep		\pgd,\pte,\index,\va,\fault
 	.endm
 
-	/* Acquire pa_tlb_lock lock and recheck page is still present. */
+	/* Acquire pa_tlb_lock lock and check page is present. */
 	.macro		tlb_lock	spc,ptp,pte,tmp,tmp1,fault
 #ifdef CONFIG_SMP
 	cmpib,COND(=),n	0,\spc,2f
@@ -472,11 +470,13 @@ 
 	cmpib,COND(=)	0,\tmp1,1b
 	nop
 	LDREG		0(\ptp),\pte
-	bb,<,n		\pte,_PAGE_PRESENT_BIT,2f
+	bb,<,n		\pte,_PAGE_PRESENT_BIT,3f
 	b		\fault
-	stw		 \spc,0(\tmp)
-2:
+	stw,ma		\spc,0(\tmp)
 #endif
+2:	LDREG		0(\ptp),\pte
+	bb,>=,n		\pte,_PAGE_PRESENT_BIT,\fault
+3:
 	.endm
 
 	/* Release pa_tlb_lock lock without reloading lock address. */