@@ -5,6 +5,10 @@
*
* This work is licensed under the terms of the GNU LGPL, version 2.
*/
+#define __ASSEMBLY__
+#include "asm/asm-offsets.h"
+#include "asm/ptrace.h"
+#include "asm/cp15.h"
.arm
@@ -17,6 +21,13 @@ start:
* See the kernel doc Documentation/arm/Booting
*/
ldr sp, =stacktop
+ push {r0-r3}
+
+ /* set up vector table and mode stacks */
+ bl exceptions_init
+
+ /* complete setup */
+ pop {r0-r3}
bl setup
/* run the test */
@@ -27,9 +38,172 @@ start:
bl exit
b halt
+
+.macro set_mode_stack mode, stack
+ add \stack, #S_FRAME_SIZE
+ msr cpsr_c, #(\mode | PSR_I_BIT | PSR_F_BIT)
+ mov sp, \stack
+.endm
+
+exceptions_init:
+ mrc p15, 0, r2, c1, c0, 0 @ read SCTLR
+ bic r2, #CR_V @ SCTLR.V := 0
+ mcr p15, 0, r2, c1, c0, 0 @ write SCTLR
+ ldr r2, =vector_table
+ mcr p15, 0, r2, c12, c0, 0 @ write VBAR
+
+ mrs r2, cpsr
+ ldr r1, =exception_stacks
+
+ /* first frame reserved for svc mode */
+ set_mode_stack UND_MODE, r1
+ set_mode_stack ABT_MODE, r1
+ set_mode_stack IRQ_MODE, r1
+ set_mode_stack FIQ_MODE, r1
+
+ msr cpsr_cxsf, r2 @ back to svc mode
+ mov pc, lr
+
.text
.globl halt
halt:
1: wfi
b 1b
+
+/*
+ * Vector stubs
+ * Simplified version of the Linux kernel implementation
+ * arch/arm/kernel/entry-armv.S
+ *
+ * Each mode has an S_FRAME_SIZE sized stack initialized
+ * in exceptions_init
+ */
+.macro vector_stub, name, vec, mode, correction=0
+.align 5
+vector_\name:
+.if \correction
+ sub lr, lr, #\correction
+.endif
+ /*
+ * Save r0, r1, lr_<exception> (parent PC)
+ * and spsr_<exception> (parent CPSR)
+ */
+ str r0, [sp, #S_R0]
+ str r1, [sp, #S_R1]
+ str lr, [sp, #S_PC]
+ mrs r0, spsr
+ str r0, [sp, #S_PSR]
+
+ /* Prepare for SVC32 mode. */
+ mrs r0, cpsr
+ bic r0, #MODE_MASK
+ orr r0, #SVC_MODE
+ msr spsr_cxsf, r0
+
+ /* Branch to handler in SVC mode */
+ mov r0, #\vec
+ mov r1, sp
+ ldr lr, =vector_common
+ movs pc, lr
+.endm
+
+vector_stub rst, 0, UND_MODE
+vector_stub und, 1, UND_MODE
+vector_stub pabt, 3, ABT_MODE, 4
+vector_stub dabt, 4, ABT_MODE, 8
+vector_stub irq, 6, IRQ_MODE, 4
+vector_stub fiq, 7, FIQ_MODE, 4
+
+.align 5
+vector_svc:
+ /*
+ * Save r0, r1, lr_<exception> (parent PC)
+ * and spsr_<exception> (parent CPSR)
+ */
+ push { r1 }
+ ldr r1, =exception_stacks
+ str r0, [r1, #S_R0]
+ pop { r0 }
+ str r0, [r1, #S_R1]
+ str lr, [r1, #S_PC]
+ mrs r0, spsr
+ str r0, [r1, #S_PSR]
+
+ /*
+ * Branch to handler, still in SVC mode.
+ * r0 := 2 is the svc vector number.
+ */
+ mov r0, #2
+ ldr lr, =vector_common
+ mov pc, lr
+
+vector_common:
+ /* make room for pt_regs */
+ sub sp, #S_FRAME_SIZE
+ tst sp, #4 @ check stack alignment
+ subne sp, #4
+
+ /* store registers r0-r12 */
+ stmia sp, { r0-r12 } @ stored wrong r0 and r1, fix later
+
+ /* get registers saved in the stub */
+ ldr r2, [r1, #S_R0] @ r0
+ ldr r3, [r1, #S_R1] @ r1
+ ldr r4, [r1, #S_PC] @ lr_<exception> (parent PC)
+ ldr r5, [r1, #S_PSR] @ spsr_<exception> (parent CPSR)
+
+ /* fix r0 and r1 */
+ str r2, [sp, #S_R0]
+ str r3, [sp, #S_R1]
+
+ /* store sp_svc, if we were in usr mode we'll fix this later */
+ add r6, sp, #S_FRAME_SIZE
+ addne r6, #4 @ stack wasn't aligned
+ str r6, [sp, #S_SP]
+
+ str lr, [sp, #S_LR] @ store lr_svc, fix later for usr mode
+ str r4, [sp, #S_PC] @ store lr_<exception>
+ str r5, [sp, #S_PSR] @ store spsr_<exception>
+
+ /* set ORIG_r0 */
+ mov r2, #-1
+ str r2, [sp, #S_OLD_R0]
+
+ /* if we were in usr mode then we need sp_usr and lr_usr instead */
+ and r1, r5, #MODE_MASK
+ cmp r1, #USR_MODE
+ bne 1f
+ add r1, sp, #S_SP
+ stmia r1, { sp,lr }^
+
+ /* Call the handler. r0 is the vector number, r1 := pt_regs */
+1: mov r1, sp
+ bl do_handle_exception
+
+ /*
+ * make sure we restore sp_svc on mode change. No need to
+ * worry about lr_svc though, as that gets clobbered on
+ * exception entry anyway.
+ */
+ str r6, [sp, #S_SP]
+
+ /* return from exception */
+ msr spsr_cxsf, r5
+ ldmia sp, { r0-pc }^
+
+.align 5
+vector_addrexcptn:
+ b vector_addrexcptn
+
+.section .text.ex
+.align 5
+vector_table:
+ b vector_rst
+ b vector_und
+ b vector_svc
+ b vector_pabt
+ b vector_dabt
+ b vector_addrexcptn @ should never happen
+ b vector_irq
+ b vector_fiq
@@ -3,7 +3,12 @@ SECTIONS
{
.text : { *(.init) *(.text) *(.text.*) }
. = ALIGN(4K);
- .data : { *(.data) }
+ .data : {
+ exception_stacks = .;
+ . += 4K;
+ exception_stacks_end = .;
+ *(.data)
+ }
. = ALIGN(16);
.rodata : { *(.rodata) }
. = ALIGN(16);
@@ -7,6 +7,9 @@
*/
#include "libcflat.h"
#include "asm/setup.h"
+#include "asm/ptrace.h"
+#include "asm/asm-offsets.h"
+#include "asm/processor.h"
#define TESTGRP "selftest"
@@ -76,14 +79,131 @@ static void check_setup(int argc, char **argv)
assert_args(nr_tests, 2);
}
+static struct pt_regs expected_regs;
+/*
+ * Capture the current register state and execute an instruction
+ * that causes an exception. The test handler will check that its
+ * capture of the current register state matches the capture done
+ * here.
+ *
+ * NOTE: update clobber list if passed insns needs more than r0,r1
+ */
+#define test_exception(pre_insns, excptn_insn, post_insns) \
+ asm volatile( \
+ pre_insns "\n" \
+ "mov r0, %0\n" \
+ "stmia r0, { r0-lr }\n" \
+ "mrs r1, cpsr\n" \
+ "str r1, [r0, #" xstr(S_PSR) "]\n" \
+ "mov r1, #-1\n" \
+ "str r1, [r0, #" xstr(S_OLD_R0) "]\n" \
+ "add r1, pc, #8\n" \
+ "str r1, [r0, #" xstr(S_R1) "]\n" \
+ "str r1, [r0, #" xstr(S_PC) "]\n" \
+ excptn_insn "\n" \
+ post_insns "\n" \
+ :: "r" (&expected_regs) : "r0", "r1")
+
+static bool check_regs(struct pt_regs *regs)
+{
+ unsigned i;
+
+ /* exception handlers should always run in svc mode */
+ if (current_mode() != SVC_MODE)
+ return false;
+
+ for (i = 0; i < ARRAY_SIZE(regs->uregs); ++i) {
+ if (regs->uregs[i] != expected_regs.uregs[i])
+ return false;
+ }
+
+ return true;
+}
+
+static bool und_works;
+static void und_handler(struct pt_regs *regs)
+{
+ und_works = check_regs(regs);
+}
+
+static bool check_und(void)
+{
+ install_exception_handler(EXCPTN_UND, und_handler);
+
+ /* issue an instruction to a coprocessor we don't have */
+ test_exception("", "mcr p2, 0, r0, c0, c0", "");
+
+ install_exception_handler(EXCPTN_UND, NULL);
+
+ return und_works;
+}
+
+static bool svc_works;
+static void svc_handler(struct pt_regs *regs)
+{
+ u32 svc = *(u32 *)(regs->ARM_pc - 4) & 0xffffff;
+
+ if (processor_mode(regs) == SVC_MODE) {
+ /*
+ * When issuing an svc from supervisor mode lr_svc will
+ * get corrupted. So before issuing the svc, callers must
+ * always push it on the stack. We pushed it to offset 4.
+ */
+ regs->ARM_lr = *(unsigned long *)(regs->ARM_sp + 4);
+ }
+
+ svc_works = check_regs(regs) && svc == 123;
+}
+
+static bool check_svc(void)
+{
+ install_exception_handler(EXCPTN_SVC, svc_handler);
+
+ if (current_mode() == SVC_MODE) {
+ /*
+ * An svc from supervisor mode will corrupt lr_svc and
+ * spsr_svc. We need to save/restore them separately.
+ */
+ test_exception(
+ "mrs r0, spsr\n"
+ "push { r0,lr }\n",
+ "svc #123\n",
+ "pop { r0,lr }\n"
+ "msr spsr_cxsf, r0\n"
+ );
+ } else {
+ test_exception("", "svc #123", "");
+ }
+
+ install_exception_handler(EXCPTN_SVC, NULL);
+
+ return svc_works;
+}
+
+static void check_vectors(void *arg __unused)
+{
+ report("%s", check_und() && check_svc(), testname);
+ exit(report_summary());
+}
+
int main(int argc, char **argv)
{
testname_set(NULL);
assert_args(argc, 1);
testname_set(argv[0]);
- if (strcmp(argv[0], "setup") == 0)
+ if (strcmp(argv[0], "setup") == 0) {
+
check_setup(argc-1, &argv[1]);
+ } else if (strcmp(argv[0], "vectors-svc") == 0) {
+
+ check_vectors(NULL);
+
+ } else if (strcmp(argv[0], "vectors-usr") == 0) {
+
+ phys_start_usr(2*PAGE_SIZE, check_vectors, NULL);
+ }
+
return report_summary();
}
@@ -16,3 +16,15 @@ file = selftest.flat
smp = 1
extra_params = -m 256 -append 'setup smp=1 mem=256'
groups = selftest
+
+# Test vector setup and exception handling (svc mode).
+[selftest::vectors-svc]
+file = selftest.flat
+extra_params = -append 'vectors-svc'
+groups = selftest
+
+# Test vector setup and exception handling (usr mode).
+[selftest::vectors-usr]
+file = selftest.flat
+extra_params = -append 'vectors-usr'
+groups = selftest
@@ -39,7 +39,8 @@ cflatobjs += \
lib/virtio-testdev.o \
lib/arm/io.o \
lib/arm/setup.o \
- lib/arm/spinlock.o
+ lib/arm/spinlock.o \
+ lib/arm/processor.o
libeabi = lib/arm/libeabi.a
eabiobjs = lib/arm/eabi_compat.o
new file mode 100644
@@ -0,0 +1,40 @@
+#ifndef _ASMARM_PROCESSOR_H_
+#define _ASMARM_PROCESSOR_H_
+/*
+ * Copyright (C) 2014, Red Hat Inc, Andrew Jones <drjones@redhat.com>
+ *
+ * This work is licensed under the terms of the GNU LGPL, version 2.
+ */
+#include "ptrace.h"
+
+enum vector {
+ EXCPTN_RST,
+ EXCPTN_UND,
+ EXCPTN_SVC,
+ EXCPTN_PABT,
+ EXCPTN_DABT,
+ EXCPTN_ADDREXCPTN,
+ EXCPTN_IRQ,
+ EXCPTN_FIQ,
+ EXCPTN_MAX,
+};
+
+typedef void (*exception_fn)(struct pt_regs *);
+extern void install_exception_handler(enum vector v, exception_fn fn);
+
+extern void show_regs(struct pt_regs *regs);
+extern void *get_sp(void);
+
+static inline unsigned long current_cpsr(void)
+{
+ unsigned long cpsr;
+ asm volatile("mrs %0, cpsr" : "=r" (cpsr));
+ return cpsr;
+}
+
+#define current_mode() (current_cpsr() & MODE_MASK)
+
+extern void
+phys_start_usr(size_t stacksize, void (*func)(void *arg), void *arg);
+
+#endif /* _ASMARM_PROCESSOR_H_ */
new file mode 100644
@@ -0,0 +1,115 @@
+/*
+ * processor control and status functions
+ *
+ * Copyright (C) 2014, Red Hat Inc, Andrew Jones <drjones@redhat.com>
+ *
+ * This work is licensed under the terms of the GNU LGPL, version 2.
+ */
+#include "libcflat.h"
+#include "asm/setup.h"
+#include "asm/ptrace.h"
+#include "asm/processor.h"
+
+static const char *processor_modes[] = {
+ "USER_26", "FIQ_26" , "IRQ_26" , "SVC_26" ,
+ "UK4_26" , "UK5_26" , "UK6_26" , "UK7_26" ,
+ "UK8_26" , "UK9_26" , "UK10_26", "UK11_26",
+ "UK12_26", "UK13_26", "UK14_26", "UK15_26",
+ "USER_32", "FIQ_32" , "IRQ_32" , "SVC_32" ,
+ "UK4_32" , "UK5_32" , "UK6_32" , "ABT_32" ,
+ "UK8_32" , "UK9_32" , "UK10_32", "UND_32" ,
+ "UK12_32", "UK13_32", "UK14_32", "SYS_32"
+};
+
+static char *vector_names[] = {
+ "rst", "und", "svc", "pabt", "dabt", "addrexcptn", "irq", "fiq"
+};
+
+void show_regs(struct pt_regs *regs)
+{
+ unsigned long flags;
+ char buf[64];
+
+ printf("pc : [<%08lx>] lr : [<%08lx>] psr: %08lx\n"
+ "sp : %08lx ip : %08lx fp : %08lx\n",
+ regs->ARM_pc, regs->ARM_lr, regs->ARM_cpsr,
+ regs->ARM_sp, regs->ARM_ip, regs->ARM_fp);
+ printf("r10: %08lx r9 : %08lx r8 : %08lx\n",
+ regs->ARM_r10, regs->ARM_r9, regs->ARM_r8);
+ printf("r7 : %08lx r6 : %08lx r5 : %08lx r4 : %08lx\n",
+ regs->ARM_r7, regs->ARM_r6, regs->ARM_r5, regs->ARM_r4);
+ printf("r3 : %08lx r2 : %08lx r1 : %08lx r0 : %08lx\n",
+ regs->ARM_r3, regs->ARM_r2, regs->ARM_r1, regs->ARM_r0);
+
+ flags = regs->ARM_cpsr;
+ buf[0] = flags & PSR_N_BIT ? 'N' : 'n';
+ buf[1] = flags & PSR_Z_BIT ? 'Z' : 'z';
+ buf[2] = flags & PSR_C_BIT ? 'C' : 'c';
+ buf[3] = flags & PSR_V_BIT ? 'V' : 'v';
+ buf[4] = '\0';
+
+ printf("Flags: %s IRQs o%s FIQs o%s Mode %s\n",
+ buf, interrupts_enabled(regs) ? "n" : "ff",
+ fast_interrupts_enabled(regs) ? "n" : "ff",
+ processor_modes[processor_mode(regs)]);
+
+ if (!user_mode(regs)) {
+ unsigned int ctrl, transbase, dac;
+ asm volatile(
+ "mrc p15, 0, %0, c1, c0\n"
+ "mrc p15, 0, %1, c2, c0\n"
+ "mrc p15, 0, %2, c3, c0\n"
+ : "=r" (ctrl), "=r" (transbase), "=r" (dac));
+ printf("Control: %08x Table: %08x DAC: %08x\n",
+ ctrl, transbase, dac);
+ }
+}
+
+void *get_sp(void)
+{
+ register unsigned long sp asm("sp");
+ return (void *)sp;
+}
+
+static exception_fn exception_handlers[EXCPTN_MAX];
+
+void install_exception_handler(enum vector v, exception_fn fn)
+{
+ if (v < EXCPTN_MAX)
+ exception_handlers[v] = fn;
+}
+
+void do_handle_exception(enum vector v, struct pt_regs *regs)
+{
+ if (v < EXCPTN_MAX && exception_handlers[v]) {
+ exception_handlers[v](regs);
+ return;
+ }
+
+ if (v < EXCPTN_MAX)
+ printf("Unhandled exception %d (%s)\n", v, vector_names[v]);
+ else
+ printf("%s called with vector=%d\n", __func__, v);
+
+ printf("Exception frame registers:\n");
+ show_regs(regs);
+ abort();
+}
+
+void phys_start_usr(size_t stacksize, void (*func)(void *arg), void *arg)
+{
+ struct memregion *m = memregion_new(stacksize);
+ unsigned long sp_usr = (unsigned long)(m->addr + m->size);
+
+ sp_usr &= (~7UL); /* stack ptr needs 8-byte alignment */
+
+ asm volatile(
+ "mrs r0, cpsr\n"
+ "bic r0, #" xstr(MODE_MASK) "\n"
+ "orr r0, #" xstr(USR_MODE) "\n"
+ "msr cpsr_c, r0\n"
+ "mov r0, %0\n"
+ "mov sp, %1\n"
+ "mov pc, %2\n"
+ :: "r" (arg), "r" (sp_usr), "r" (func) : "r0");
+}