@@ -15,6 +15,7 @@ typedef uint64_t u64;
#define __aligned(x) __attribute__((__aligned__(x)))
#define __packed __attribute__((packed))
+#define __used __attribute__((__used__))
/* Derived from asm-generic/bitsperlong.h. */
#if __x86_64__
@@ -342,14 +342,28 @@ static void test_sgx_vdso(struct sgx_secs *secs)
EXPECT_EQ(exception.leaf, ENCLU_EENTER);
}
-static int basic_exit_handler(long rdi, long rsi, long rdx, long ursp,
- long r8, long r9, void *tcs, int ret,
- struct sgx_enclave_exception *e)
+static int __used __basic_exit_handler(long rdi, long rsi, long rdx, long ursp,
+ long r8, long r9, void *tcs, int ret,
+ struct sgx_enclave_exception *e)
{
+ ASSERT_RAW(!(r9 & 0xf), "Pre-CALL RSP not 16-byte aligned: %lx\n", r9);
ASSERT_EQ(ret, 0);
return 0;
}
+extern void *basic_exit_handler;
+
+static void __used basic_exit_handler_trampoline(void)
+{
+ /* Load the pre-CALL %rsp into %r9 to verify correct alignment. */
+ asm volatile("1:\n\t"
+ "lea 0x8(%%rsp), %%r9\n\t"
+ "jmp __basic_exit_handler\n\t"
+ "basic_exit_handler: .quad 1b\n\t"
+ ".global basic_exit_handler"
+ ::: "memory");
+}
+
/*
* Test the vDSO API, __vdso_sgx_enter_enclave(), with an exit handler.
*/
Add an assembly trampoline to the basic exit handler to snapshot the pre-CALL %rsp. Use the snapshot to verify that the stack is 16-byte aligned as required by the x86_64 ABI. Signed-off-by: Sean Christopherson <sean.j.christopherson@intel.com> --- tools/testing/selftests/x86/sgx/defines.h | 1 + tools/testing/selftests/x86/sgx/main.c | 20 +++++++++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-)