@@ -240,18 +240,25 @@ asm_mmu_enable:
.globl asm_mmu_disable
asm_mmu_disable:
+ /*
+ * A test can change the memory attributes for a memory location to
+ * Device or Inner Non-cacheable, which makes the barrier required to
+ * avoid reordering of previous memory accesses with respect to the
+ * cache maintenance.
+ */
+ dmb sy
+ mov r0, sp
+ lsr r0, #THREAD_SHIFT
+ lsl r0, #THREAD_SHIFT
+ add r1, r0, #THREAD_SIZE
+ dcache_by_line_op dccmvac, sy, r0, r1, r2, r3
+
/* SCTLR */
mrc p15, 0, r0, c1, c0, 0
bic r0, #CR_M
mcr p15, 0, r0, c1, c0, 0
isb
- ldr r0, =__phys_offset
- ldr r0, [r0]
- ldr r1, =__phys_end
- ldr r1, [r1]
- dcache_by_line_op dccimvac, sy, r0, r1, r2, r3
-
mov pc, lr
/*
@@ -288,18 +288,23 @@ asm_mmu_enable:
.globl asm_mmu_disable
asm_mmu_disable:
+ /*
+ * A test can change the memory attributes for a memory location to
+ * Device or Inner Non-cacheable, which makes the barrier required to
+ * avoid reordering of previous memory accesses with respect to the
+ * cache maintenance.
+ */
+ dmb sy
+ mov x9, sp
+ and x9, x9, #THREAD_MASK
+ add x10, x9, #THREAD_SIZE
+ dcache_by_line_op cvac, sy, x9, x10, x11, x12
+
mrs x0, sctlr_el1
bic x0, x0, SCTLR_EL1_M
msr sctlr_el1, x0
isb
- /* Clean + invalidate the entire memory */
- adrp x0, __phys_offset
- ldr x0, [x0, :lo12:__phys_offset]
- adrp x1, __phys_end
- ldr x1, [x1, :lo12:__phys_end]
- dcache_by_line_op civac, sy, x0, x1, x2, x3
-
ret
/*