@@ -28,6 +28,9 @@
#include <asm-generic/memory_layout.h>
#include <asm/sections.h>
+unsigned long free_mem_ptr;
+unsigned long free_mem_end_ptr;
+
#ifdef CONFIG_IMAGE_COMPRESSION
void __naked __section(.text_head_entry) compressed_start(void)
{
@@ -158,6 +161,10 @@ void __naked __section(.text_ll_return) board_init_lowlevel_return(void)
/* clear bss */
memset(__bss_start, 0, __bss_stop - __bss_start);
+ /* set 128 KiB before the STACK_BASE - 16 address for early malloc */
+ free_mem_ptr = STACK_BASE - 0x20000 - 16;
+ free_mem_end_ptr = STACK_BASE - 16;
+
/* flush I-cache before jumping to the copied binary */
__asm__ __volatile__("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
This is not needed by lzo but by gunzip, xz and others. Signed-off-by: Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com> --- arch/arm/cpu/start.c | 7 +++++++ 1 file changed, 7 insertions(+)