@@ -46,11 +46,6 @@
#define __tlbi(op, ...) __TLBI_N(op, ##__VA_ARGS__, 1, 0)
-#define __tlbi_user(op, arg) do { \
- if (arm64_kernel_unmapped_at_el0()) \
- __tlbi(op, (arg) | USER_ASID_FLAG); \
-} while (0)
-
/* This macro creates a properly formatted VA operand for the TLBI */
#define __TLBI_VADDR(addr, asid) \
({ \
@@ -87,6 +82,8 @@
} \
\
__tlbi(op, arg); \
+ if (arm64_kernel_unmapped_at_el0()) \
+ __tlbi(op, (arg) | USER_ASID_FLAG); \
} while(0)
/*
@@ -179,8 +176,7 @@ static inline void flush_tlb_mm(struct mm_struct *mm)
unsigned long asid = __TLBI_VADDR(0, ASID(mm));
dsb(ishst);
- __tlbi(aside1is, asid);
- __tlbi_user(aside1is, asid);
+ __tlbi_level(aside1is, asid, 0);
dsb(ish);
}
@@ -190,8 +186,7 @@ static inline void flush_tlb_page_nosync(struct vm_area_struct *vma,
unsigned long addr = __TLBI_VADDR(uaddr, ASID(vma->vm_mm));
dsb(ishst);
- __tlbi(vale1is, addr);
- __tlbi_user(vale1is, addr);
+ __tlbi_level(vale1is, addr, 0);
}
static inline void flush_tlb_page(struct vm_area_struct *vma,
@@ -231,11 +226,9 @@ static inline void __flush_tlb_range(struct vm_area_struct *vma,
dsb(ishst);
for (addr = start; addr < end; addr += stride) {
if (last_level) {
- __tlbi(vale1is, addr);
- __tlbi_user(vale1is, addr);
+ __tlbi_level(vale1is, addr, 0);
} else {
- __tlbi(vae1is, addr);
- __tlbi_user(vae1is, addr);
+ __tlbi_level(vae1is, addr, 0);
}
}
dsb(ish);
ARMv8.4-TTL provides the TTL field in tlbi instruction to indicate the level of translation table walk holding the leaf entry for the address that is being invalidated. This patch use __tlbi_level replace __tlbi and __tlbi_user in Stage-1, and set the default value of level to 0. Signed-off-by: Zhenyu Ye <yezhenyu2@huawei.com> --- arch/arm64/include/asm/tlbflush.h | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-)