diff mbox series

parisc: Remove PTE load and fault check from L2_ptep macro

Message ID 20180923145518.GA9595@mx3210.localdomain (mailing list archive)
State Superseded, archived
Headers show
Series parisc: Remove PTE load and fault check from L2_ptep macro | expand

Commit Message

John David Anglin Sept. 23, 2018, 2:55 p.m. UTC
This change removes the PTE load and present check from the L2_ptep macro.
The load and check for kernel pages is now done in the tlb_lock macro.
This avoids a double load and check for user pages.  The load and check
for user pages is now done inside the lock so the fault handler can't be
called while the entry is being updated.


Signed-off-by: John David Anglin <dave.anglin@bell.net>
diff mbox series

Patch

diff --git a/arch/parisc/kernel/entry.S b/arch/parisc/kernel/entry.S
index 1b4732e20137..be1ef02a4799 100644
--- a/arch/parisc/kernel/entry.S
+++ b/arch/parisc/kernel/entry.S
@@ -430,8 +430,6 @@ 
 	extru		\va,31-PAGE_SHIFT,ASM_BITS_PER_PTE,\index
 	dep		%r0,31,PAGE_SHIFT,\pmd  /* clear offset */
 	shladd		\index,BITS_PER_PTE_ENTRY,\pmd,\pmd /* pmd is now pte */
-	LDREG		%r0(\pmd),\pte
-	bb,>=,n		\pte,_PAGE_PRESENT_BIT,\fault
 	.endm
 
 	/* Look up PTE in a 3-Level scheme.
@@ -462,7 +460,7 @@ 
 	L2_ptep		\pgd,\pte,\index,\va,\fault
 	.endm
 
-	/* Acquire pa_tlb_lock lock and recheck page is still present. */
+	/* Acquire pa_tlb_lock lock and check page is present. */
 	.macro		tlb_lock	spc,ptp,pte,tmp,tmp1,fault
 #ifdef CONFIG_SMP
 	cmpib,COND(=),n	0,\spc,2f
@@ -471,10 +469,12 @@ 
 	cmpib,COND(=)	0,\tmp1,1b
 	nop
 	LDREG		0(\ptp),\pte
-	bb,<,n		\pte,_PAGE_PRESENT_BIT,2f
+	bb,<,n		\pte,_PAGE_PRESENT_BIT,3f
 	b		\fault
 	stw		 \spc,0(\tmp)
-2:
+2:	LDREG		0(\ptp),\pte
+	bb,>=,n		\pte,_PAGE_PRESENT_BIT,\fault
+3:
 #endif
 	.endm