@@ -25,7 +25,9 @@ endif
define arch_elf_check =
endef
-cstart.o = $(TEST_DIR)/cstart.o
+asmobjs = $(TEST_DIR)/cstart.o
+asmobjs += lib/arm/cache.o
+
cflatobjs += lib/arm/spinlock.o
cflatobjs += lib/arm/processor.o
cflatobjs += lib/arm/stack.o
@@ -20,7 +20,9 @@ define arch_elf_check =
$(error $(1) has unsupported reloc types))
endef
-cstart.o = $(TEST_DIR)/cstart64.o
+asmobjs = $(TEST_DIR)/cstart64.o
+asmobjs += lib/arm64/cache.o
+
cflatobjs += lib/arm64/processor.o
cflatobjs += lib/arm64/spinlock.o
cflatobjs += lib/arm64/gic-v3-its.o lib/arm64/gic-v3-its-cmd.o
@@ -60,7 +60,7 @@ eabiobjs = lib/arm/eabi_compat.o
FLATLIBS = $(libcflat) $(LIBFDT_archive) $(libeabi)
%.elf: LDFLAGS = -nostdlib $(arch_LDFLAGS)
-%.elf: %.o $(FLATLIBS) $(SRCDIR)/arm/flat.lds $(cstart.o)
+%.elf: %.o $(FLATLIBS) $(SRCDIR)/arm/flat.lds $(asmobjs)
$(CC) $(CFLAGS) -c -o $(@:.elf=.aux.o) $(SRCDIR)/lib/auxinfo.c \
-DPROGNAME=\"$(@:.elf=.flat)\" -DAUXFLAGS=$(AUXFLAGS)
$(LD) $(LDFLAGS) -o $@ -T $(SRCDIR)/arm/flat.lds \
@@ -81,4 +81,4 @@ arm_clean: asm_offsets_clean
$(TEST_DIR)/.*.d lib/arm/.*.d
generated-files = $(asm-offsets)
-$(tests-all:.flat=.o) $(cstart.o) $(cflatobjs): $(generated-files)
+$(tests-all:.flat=.o) $(asmobjs) $(cflatobjs): $(generated-files)
@@ -41,7 +41,9 @@
.ifc \op, dccimvac
mcr p15, 0, \addr, c7, c14, 1
.else
- .err
+ .ifc \op, dccmvac
+ mcr p15, 0, \addr, c7, c10, 1
+ .endif
.endif
add \addr, \addr, \tmp1
cmp \addr, \end
new file mode 100644
@@ -0,0 +1 @@
+#include "../../arm64/asm/cacheflush.h"
@@ -45,12 +45,6 @@ static inline void flush_tlb_page(unsigned long vaddr)
isb();
}
-static inline void flush_dcache_addr(unsigned long vaddr)
-{
- /* DCCIMVAC */
- asm volatile("mcr p15, 0, %0, c7, c14, 1" :: "r" (vaddr));
-}
-
#include <asm/mmu-api.h>
#endif /* _ASMARM_MMU_H_ */
new file mode 100644
@@ -0,0 +1,89 @@
+/*
+ * Based on arch/arm64/mm/cache.S
+ *
+ * Copyright (C) 2001 Deep Blue Solutions Ltd.
+ * Copyright (C) 2012, 2022 ARM Ltd.
+ *
+ * This work is licensed under the terms of the GNU GPL, version 2.
+ */
+
+#include <asm/assembler.h>
+
+/*
+ * dcache_inval_poc(start, end)
+ *
+ * Ensure that any D-cache lines for the interval [start, end)
+ * are invalidated. Any partial lines at the ends of the interval are
+ * cleaned to PoC instead to prevent data loss.
+ *
+ * - start - start address of region
+ * - end - end address of region
+ */
+.global dcache_inval_poc
+dcache_inval_poc:
+ dmb sy
+ dcache_line_size r2, r3
+ sub r3, r2, #1
+ tst r1, r3 // end cache line aligned?
+ bic r1, r1, r3
+ beq 1f
+ // DCCIMVAC
+ mcr p15, 0, r1, c7, c14, 1 // clean + invalidate end cache line
+1: tst r0, r3 // start cache line aligned?
+ bic r0, r0, r3
+ beq 2f
+ mcr p15, 0, r0, c7, c14, 1 // clean + invalidate start cache line
+ b 3f
+ // DCIMVAC
+2: mcr p15, 0, r0, c7, c6, 1 // invalidate current cache line
+3: add r0, r0, r2
+ cmp r0, r1
+ blo 2b
+ dsb sy
+ mov pc, lr
+
+/*
+ * dcache_clean_poc(start, end)
+ *
+ * Ensure that any D-cache lines for the interval [start, end)
+ * are cleaned to the PoC.
+ *
+ * - start - start address of region
+ * - end - end address of region
+ */
+.global dcache_clean_poc
+dcache_clean_poc:
+ dmb sy
+ dcache_by_line_op dccmvac, sy, r0, r1, r2, r3
+ mov pc, lr
+
+/*
+ * dcache_clean_addr_poc(addr)
+ *
+ * Ensure that the D-cache line for address addr is cleaned to the PoC.
+ *
+ * - addr - the address
+ */
+.global dcache_clean_addr_poc
+dcache_clean_addr_poc:
+ dmb sy
+ // DCCMVAC
+ mcr p15, 0, r0, c7, c10, 1
+ dsb sy
+ mov pc, lr
+
+/*
+ * dcache_clean_inval_addr_poc(addr)
+ *
+ * Ensure that the D-cache line for address addr is cleaned and invalidated
+ * to the PoC.
+ *
+ * - addr - the address
+ */
+.global dcache_clean_inval_addr_poc
+dcache_clean_inval_addr_poc:
+ dmb sy
+ // DCCIMVAC
+ mcr p15, 0, r0, c7, c14, 1
+ dsb sy
+ mov pc, lr
new file mode 100644
@@ -0,0 +1,32 @@
+#ifndef _ASMARM64_CACHEFLUSH_H_
+#define _ASMARM64_CACHEFLUSH_H_
+/*
+ * Based on arch/arm64/asm/include/cacheflush.h
+ *
+ * Copyright (C) 1999-2002 Russell King.
+ * Copyright (C) 2012,2022 ARM Ltd.
+ *
+ * This work is licensed under the terms of the GNU GPL, version 2.
+ */
+
+#include <asm/page.h>
+
+extern void dcache_clean_addr_poc(unsigned long addr);
+extern void dcache_clean_inval_addr_poc(unsigned long addr);
+
+extern void dcache_inval_poc(unsigned long start, unsigned long end);
+extern void dcache_clean_poc(unsigned long start, unsigned long end);
+
+static inline void dcache_inval_page_poc(unsigned long page_addr)
+{
+ assert(PAGE_ALIGN(page_addr) == page_addr);
+ dcache_inval_poc(page_addr, page_addr + PAGE_SIZE);
+}
+
+static inline void dcache_clean_page_poc(unsigned long page_addr)
+{
+ assert(PAGE_ALIGN(page_addr) == page_addr);
+ dcache_clean_poc(page_addr, page_addr + PAGE_SIZE);
+}
+
+#endif /* _ASMARM64_CACHEFLUSH_H_ */
@@ -28,11 +28,6 @@ static inline void flush_tlb_page(unsigned long vaddr)
isb();
}
-static inline void flush_dcache_addr(unsigned long vaddr)
-{
- asm volatile("dc civac, %0" :: "r" (vaddr));
-}
-
#include <asm/mmu-api.h>
#endif /* _ASMARM64_MMU_H_ */
new file mode 100644
@@ -0,0 +1,85 @@
+/*
+ * Based on arch/arm64/mm/cache.S
+ *
+ * Copyright (C) 2001 Deep Blue Solutions Ltd.
+ * Copyright (C) 2012, 2022 ARM Ltd.
+ *
+ * This work is licensed under the terms of the GNU GPL, version 2.
+ */
+
+#include <asm/assembler.h>
+
+/*
+ * dcache_inval_poc(start, end)
+ *
+ * Ensure that any D-cache lines for the interval [start, end)
+ * are invalidated. Any partial lines at the ends of the interval are
+ * cleaned to PoC instead to prevent data loss.
+ *
+ * - start - start address of region
+ * - end - end address of region
+ */
+.global dcache_inval_poc
+dcache_inval_poc:
+ dmb sy
+ raw_dcache_line_size x2, x3
+ sub x3, x2, #1
+ tst x1, x3 // end cache line aligned?
+ bic x1, x1, x3
+ b.eq 1f
+ dc civac, x1 // clean + invalidate end cache line
+1: tst x0, x3 // start cache line aligned?
+ bic x0, x0, x3
+ b.eq 2f
+ dc civac, x0 // clean + invalidate start cache line
+ b 3f
+2: dc ivac, x0 // invalidate current cache line
+3: add x0, x0, x2
+ cmp x0, x1
+ b.lo 2b
+ dsb sy
+ ret
+
+/*
+ * dcache_clean_poc(start, end)
+ *
+ * Ensure that any D-cache lines for the interval [start, end)
+ * are cleaned to the PoC.
+ *
+ * - start - start address of region
+ * - end - end address of region
+ */
+.global dcache_clean_poc
+dcache_clean_poc:
+ dmb sy
+ dcache_by_line_op cvac, sy, x0, x1, x2, x3
+ ret
+
+/*
+ * dcache_clean_addr_poc(addr)
+ *
+ * Ensure that the D-cache line for address addr is cleaned to the PoC.
+ *
+ * - addr - the address
+ */
+.global dcache_clean_addr_poc
+dcache_clean_addr_poc:
+ dmb sy
+ dc cvac, x0
+ dsb sy
+ ret
+
+/*
+ * dcache_clean_inval_addr_poc(addr)
+ *
+ * Ensure that the D-cache line for address addr is cleaned and invalidated
+ * to the PoC.
+ *
+ * - addr - the address
+ */
+.global dcache_clean_inval_addr_poc
+dcache_clean_inval_addr_poc:
+ dmb sy
+ dc civac, x0
+ dsb sy
+ ret
Add C functions for doing cache maintenance, which will be used for implementing the necessary maintenance when kvm-unit-tests boots. Signed-off-by: Alexandru Elisei <alexandru.elisei@arm.com> --- arm/Makefile.arm | 4 +- arm/Makefile.arm64 | 4 +- arm/Makefile.common | 4 +- lib/arm/asm/assembler.h | 4 +- lib/arm/asm/cacheflush.h | 1 + lib/arm/asm/mmu.h | 6 --- lib/arm/cache.S | 89 ++++++++++++++++++++++++++++++++++++++ lib/arm64/asm/cacheflush.h | 32 ++++++++++++++ lib/arm64/asm/mmu.h | 5 --- lib/arm64/cache.S | 85 ++++++++++++++++++++++++++++++++++++ 10 files changed, 218 insertions(+), 16 deletions(-) create mode 100644 lib/arm/asm/cacheflush.h create mode 100644 lib/arm/cache.S create mode 100644 lib/arm64/asm/cacheflush.h create mode 100644 lib/arm64/cache.S