diff mbox series

[boot-wrapper,2/7] aarch64: Prepare for lower EL booting

Message ID 20240606133628.3330423-3-luca.fancellu@arm.com (mailing list archive)
State New, archived
Headers show
Series Add Armv8-R AArch64 support | expand

Commit Message

Luca Fancellu June 6, 2024, 1:36 p.m. UTC
Store the value of the initial SPSR into a variable during
EL3 initialization and load it from the variable before dropping
EL, this is done as preparation work to be able to boot from a
different exception level.

Signed-off-by: Luca Fancellu <luca.fancellu@arm.com>
---
 arch/aarch64/boot.S | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)

Comments

Andre Przywara June 6, 2024, 4:30 p.m. UTC | #1
On Thu,  6 Jun 2024 14:36:23 +0100
Luca Fancellu <luca.fancellu@arm.com> wrote:

Hi,

> Store the value of the initial SPSR into a variable during
> EL3 initialization and load it from the variable before dropping
> EL, this is done as preparation work to be able to boot from a
> different exception level.
> 
> Signed-off-by: Luca Fancellu <luca.fancellu@arm.com>

Reviewed-by: Andre Przywara <andre.przywara@arm.com>

Cheers,
Andre

> ---
>  arch/aarch64/boot.S | 8 +++++++-
>  1 file changed, 7 insertions(+), 1 deletion(-)
> 
> diff --git a/arch/aarch64/boot.S b/arch/aarch64/boot.S
> index 7727475925c1..211077af17c8 100644
> --- a/arch/aarch64/boot.S
> +++ b/arch/aarch64/boot.S
> @@ -51,6 +51,10 @@ reset_at_el3:
>  	b.eq	err_invalid_id
>  	bl	setup_stack
>  
> +	mov	w0, #SPSR_KERNEL
> +	ldr	x1, =spsr_to_elx
> +	str	w0, [x1]
> +
>  	bl	cpu_init_bootwrapper
>  
>  	bl	cpu_init_el3
> @@ -135,7 +139,7 @@ ASM_FUNC(jump_kernel)
>  	b.eq	1f
>  	br	x19			// Keep EL
>  
> -1:	mov	x4, #SPSR_KERNEL
> +1:	ldr	w4, spsr_to_elx
>  
>  	/*
>  	 * If bit 0 of the kernel address is set, we're entering in AArch32
> @@ -153,3 +157,5 @@ ASM_FUNC(jump_kernel)
>  	.align 3
>  flag_keep_el:
>  	.long 0
> +spsr_to_elx:
> +	.long 0
diff mbox series

Patch

diff --git a/arch/aarch64/boot.S b/arch/aarch64/boot.S
index 7727475925c1..211077af17c8 100644
--- a/arch/aarch64/boot.S
+++ b/arch/aarch64/boot.S
@@ -51,6 +51,10 @@  reset_at_el3:
 	b.eq	err_invalid_id
 	bl	setup_stack
 
+	mov	w0, #SPSR_KERNEL
+	ldr	x1, =spsr_to_elx
+	str	w0, [x1]
+
 	bl	cpu_init_bootwrapper
 
 	bl	cpu_init_el3
@@ -135,7 +139,7 @@  ASM_FUNC(jump_kernel)
 	b.eq	1f
 	br	x19			// Keep EL
 
-1:	mov	x4, #SPSR_KERNEL
+1:	ldr	w4, spsr_to_elx
 
 	/*
 	 * If bit 0 of the kernel address is set, we're entering in AArch32
@@ -153,3 +157,5 @@  ASM_FUNC(jump_kernel)
 	.align 3
 flag_keep_el:
 	.long 0
+spsr_to_elx:
+	.long 0