@@ -1,6 +1,6 @@
-obj-bin-y += head.o cbundle.o reloc-trampoline64.o
+obj-bin-y += head.o cbundle.o reloc-trampoline64.o setup-pages64.o
-head-bin-objs := cmdline.o reloc.o reloc-trampoline.o
+head-bin-objs := cmdline.o reloc.o reloc-trampoline.o setup-pages.o
nocov-y += $(head-bin-objs)
noubsan-y += $(head-bin-objs)
@@ -43,7 +43,7 @@ $(obj)/cbundle.o: $(head-bin-objs) $(obj)/build32.other.lds $(obj)/build32.final
$(PYTHON) $(srctree)/tools/make_output \
--script $(obj)/build32.final.lds \
--bin1 $@.1.bin --bin2 $@.2.bin \
- --map $(obj)/cbundle.map --exports cmdline_parse_early,reloc,reloc_trampoline32 \
+ --map $(obj)/cbundle.map --exports cmdline_parse_early,reloc,reloc_trampoline32,setup_pages32 \
--section-header '.section .init.text, "ax", @progbits' \
--output $(obj)/cbundle.s
$(CC) -c $(obj)/cbundle.s -o $@.tmp
@@ -47,6 +47,14 @@ SECTIONS
DECLARE_IMPORT(__trampoline_rel_stop);
DECLARE_IMPORT(__trampoline_seg_start);
DECLARE_IMPORT(__trampoline_seg_stop);
+ DECLARE_IMPORT(l2_xenmap);
+ DECLARE_IMPORT(l2_directmap);
+ DECLARE_IMPORT(l2_bootmap);
+ DECLARE_IMPORT(l3_bootmap);
+ DECLARE_IMPORT(_start);
+ DECLARE_IMPORT(_end);
+ DECLARE_IMPORT(xen_phys_start);
+ //DECLARE_IMPORT();
. = . + GAP;
*(.text)
*(.text.*)
@@ -763,48 +763,7 @@ trampoline_setup:
test $(1 << L2_PAGETABLE_SHIFT) - 1, %eax
jnz .Lnot_aligned
- /* Map Xen into the higher mappings using 2M superpages. */
- lea _PAGE_PSE + PAGE_HYPERVISOR_RWX + sym_esi(_start), %eax
- mov $sym_offs(_start), %ecx /* %eax = PTE to write ^ */
- mov $sym_offs(_end - 1), %edx
- shr $L2_PAGETABLE_SHIFT, %ecx /* %ecx = First slot to write */
- shr $L2_PAGETABLE_SHIFT, %edx /* %edx = Final slot to write */
-
-1: mov %eax, sym_offs(l2_xenmap)(%esi, %ecx, 8)
- add $1, %ecx
- add $1 << L2_PAGETABLE_SHIFT, %eax
-
- cmp %edx, %ecx
- jbe 1b
-
- /*
- * Map Xen into the directmap (needed for early-boot pagetable
- * handling/walking), and identity map Xen into bootmap (needed for
- * the transition into long mode), using 2M superpages.
- */
- lea sym_esi(_start), %ecx
- lea -1 + sym_esi(_end), %edx
- lea _PAGE_PSE + PAGE_HYPERVISOR_RWX(%ecx), %eax /* PTE to write. */
- shr $L2_PAGETABLE_SHIFT, %ecx /* First slot to write. */
- shr $L2_PAGETABLE_SHIFT, %edx /* Final slot to write. */
-
-1: mov %eax, sym_offs(l2_bootmap) (%esi, %ecx, 8)
- mov %eax, sym_offs(l2_directmap)(%esi, %ecx, 8)
- add $1, %ecx
- add $1 << L2_PAGETABLE_SHIFT, %eax
-
- cmp %edx, %ecx
- jbe 1b
-
- /* Map 4x l2_bootmap[] into l3_bootmap[0...3] */
- lea __PAGE_HYPERVISOR + sym_esi(l2_bootmap), %eax
- mov %eax, 0 + sym_esi(l3_bootmap)
- add $PAGE_SIZE, %eax
- mov %eax, 8 + sym_esi(l3_bootmap)
- add $PAGE_SIZE, %eax
- mov %eax, 16 + sym_esi(l3_bootmap)
- add $PAGE_SIZE, %eax
- mov %eax, 24 + sym_esi(l3_bootmap)
+ call setup_pages32
/* Map l1_bootmap[] into l2_bootmap[0]. */
lea __PAGE_HYPERVISOR + sym_esi(l1_bootmap), %eax
new file mode 100644
@@ -0,0 +1,105 @@
+#include <xen/stdint.h>
+#include <xen/compiler.h>
+#include <xen/config.h>
+#ifndef __i386__
+#include <xen/mm.h>
+#endif
+
+#if defined(__i386__)
+
+#pragma GCC visibility push(hidden)
+extern char _start[], _end[];
+extern uint64_t l2_xenmap[512], l3_bootmap[512], l2_directmap[512], l2_bootmap[512];
+extern unsigned long xen_phys_start;
+#pragma GCC visibility pop
+
+#define _PAGE_PRESENT 0x001
+#define _PAGE_RW 0x002
+#define _PAGE_ACCESSED 0x020
+#define _PAGE_DIRTY 0x040
+#define _PAGE_PSE 0x080
+#define _PAGE_GLOBAL 0x100
+
+#define PAGE_HYPERVISOR PAGE_HYPERVISOR_RW
+#define PAGE_HYPERVISOR_RW (__PAGE_HYPERVISOR_RW | _PAGE_GLOBAL)
+#define __PAGE_HYPERVISOR_RW (__PAGE_HYPERVISOR_RO | _PAGE_DIRTY | _PAGE_RW)
+// TODO
+#define _PAGE_NX 0
+#define __PAGE_HYPERVISOR_RO (_PAGE_PRESENT | _PAGE_ACCESSED | _PAGE_NX)
+#define PAGE_HYPERVISOR_RWX (__PAGE_HYPERVISOR | _PAGE_GLOBAL)
+#define __PAGE_HYPERVISOR (__PAGE_HYPERVISOR_RX | _PAGE_DIRTY | _PAGE_RW)
+#define __PAGE_HYPERVISOR_RX (_PAGE_PRESENT | _PAGE_ACCESSED)
+
+#define L2_PAGETABLE_SHIFT 21
+#define L2_PAGETABLE_ENTRIES 512
+#define PAGE_SIZE 4096
+#define l2_table_offset(a) (((a) >> L2_PAGETABLE_SHIFT) & (L2_PAGETABLE_ENTRIES - 1))
+#define l2e_from_paddr(a,f) ((a) | put_pte_flags(f))
+#define l3e_from_paddr(a,f) ((a) | put_pte_flags(f))
+#define l2e_add_flags(x, flags) (x |= put_pte_flags(flags))
+typedef uint64_t l2_pgentry_t;
+static inline int64_t put_pte_flags(unsigned int x)
+{
+ return (((int64_t)x & ~0xfff) << 40) | (x & 0xfff);
+}
+
+void setup_pages32(void)
+#elif defined (__x86_64__)
+void setup_pages64(void)
+#else
+#error Unknow architecture
+#endif
+{
+ unsigned int i;
+
+ /*
+ * Map Xen into the higher mappings, using 2M superpages.
+ *
+ * NB: We are currently in physical mode, so a RIP-relative relocation
+ * against _start/_end result in our arbitrary placement by the bootloader
+ * in memory, rather than the intended high mappings position. Subtract
+ * xen_phys_start to get the appropriate slots in l2_xenmap[].
+ */
+ for ( i = l2_table_offset((unsigned long)_start - xen_phys_start);
+ i <= l2_table_offset((unsigned long)_end - 1 - xen_phys_start); ++i )
+ l2_xenmap[i] =
+ l2e_from_paddr(xen_phys_start + (i << L2_PAGETABLE_SHIFT),
+ PAGE_HYPERVISOR_RWX | _PAGE_PSE);
+
+ /* Check that there is at least 4G of mapping space in l2_*map[] */
+#ifndef __i386__
+ BUILD_BUG_ON((sizeof(l2_bootmap) / L2_PAGETABLE_ENTRIES) < 4);
+ BUILD_BUG_ON((sizeof(l2_directmap) / L2_PAGETABLE_ENTRIES) < 4);
+#endif
+
+ /* Initialize L3 boot-map page directory entries. */
+ for ( i = 0; i < 4; ++i )
+ l3_bootmap[i] = l3e_from_paddr((unsigned long)l2_bootmap + i * PAGE_SIZE,
+ __PAGE_HYPERVISOR);
+ /*
+ * Map Xen into the directmap (needed for early-boot pagetable
+ * handling/walking), and identity map Xen into bootmap (needed for the
+ * transition from the EFI pagetables to Xen), using 2M superpages.
+ *
+ * NB: We are currently in physical mode, so a RIP-relative relocation
+ * against _start/_end gets their real position in memory, which are the
+ * appropriate l2 slots to map.
+ */
+#define l2_4G_offset(a) \
+ (((a) >> L2_PAGETABLE_SHIFT) & (4 * L2_PAGETABLE_ENTRIES - 1))
+
+ for ( i = l2_4G_offset((unsigned long)_start);
+ i <= l2_4G_offset((unsigned long)_end - 1); ++i )
+ {
+ l2_pgentry_t pte = l2e_from_paddr(i << L2_PAGETABLE_SHIFT,
+ __PAGE_HYPERVISOR | _PAGE_PSE);
+
+ l2_bootmap[i] = pte;
+
+ /* Bootmap RWX/Non-global. Directmap RW/Global. */
+ l2e_add_flags(pte, PAGE_HYPERVISOR);
+
+ l2_directmap[i] = pte;
+ }
+#undef l2_4G_offset
+}
new file mode 120000
@@ -0,0 +1 @@
+setup-pages.c
\ No newline at end of file
@@ -615,9 +615,10 @@ static void __init efi_arch_edid(EFI_HANDLE gop_handle)
#endif
}
+void setup_pages64(void);
+
static void __init efi_arch_memory_setup(void)
{
- unsigned int i;
EFI_STATUS status;
/* Allocate space for trampoline (in first Mb). */
@@ -641,54 +642,7 @@ static void __init efi_arch_memory_setup(void)
if ( !efi_enabled(EFI_LOADER) )
return;
- /*
- * Map Xen into the higher mappings, using 2M superpages.
- *
- * NB: We are currently in physical mode, so a RIP-relative relocation
- * against _start/_end result in our arbitrary placement by the bootloader
- * in memory, rather than the intended high mappings position. Subtract
- * xen_phys_start to get the appropriate slots in l2_xenmap[].
- */
- for ( i = l2_table_offset((UINTN)_start - xen_phys_start);
- i <= l2_table_offset((UINTN)_end - 1 - xen_phys_start); ++i )
- l2_xenmap[i] =
- l2e_from_paddr(xen_phys_start + (i << L2_PAGETABLE_SHIFT),
- PAGE_HYPERVISOR_RWX | _PAGE_PSE);
-
- /* Check that there is at least 4G of mapping space in l2_*map[] */
- BUILD_BUG_ON((sizeof(l2_bootmap) / L2_PAGETABLE_ENTRIES) < 4);
- BUILD_BUG_ON((sizeof(l2_directmap) / L2_PAGETABLE_ENTRIES) < 4);
-
- /* Initialize L3 boot-map page directory entries. */
- for ( i = 0; i < 4; ++i )
- l3_bootmap[i] = l3e_from_paddr((UINTN)l2_bootmap + i * PAGE_SIZE,
- __PAGE_HYPERVISOR);
- /*
- * Map Xen into the directmap (needed for early-boot pagetable
- * handling/walking), and identity map Xen into bootmap (needed for the
- * transition from the EFI pagetables to Xen), using 2M superpages.
- *
- * NB: We are currently in physical mode, so a RIP-relative relocation
- * against _start/_end gets their real position in memory, which are the
- * appropriate l2 slots to map.
- */
-#define l2_4G_offset(a) \
- (((a) >> L2_PAGETABLE_SHIFT) & (4 * L2_PAGETABLE_ENTRIES - 1))
-
- for ( i = l2_4G_offset((UINTN)_start);
- i <= l2_4G_offset((UINTN)_end - 1); ++i )
- {
- l2_pgentry_t pte = l2e_from_paddr(i << L2_PAGETABLE_SHIFT,
- __PAGE_HYPERVISOR | _PAGE_PSE);
-
- l2_bootmap[i] = pte;
-
- /* Bootmap RWX/Non-global. Directmap RW/Global. */
- l2e_add_flags(pte, PAGE_HYPERVISOR);
-
- l2_directmap[i] = pte;
- }
-#undef l2_4G_offset
+ setup_pages64();
}
static void __init efi_arch_handle_module(const struct file *file,