diff options
author | Lucas Stach <l.stach@pengutronix.de> | 2019-12-19 10:13:08 +0100 |
---|---|---|
committer | Sascha Hauer <s.hauer@pengutronix.de> | 2019-12-20 16:14:20 +0100 |
commit | f76dca195ecb19136c8e51399c94e7c0ea23b1d0 (patch) | |
tree | d688ec92acdfd4fdabdd7082abbc021fd6cf9166 /arch/arm/cpu | |
parent | d92e4b7e61215a0b7c609878bc294cb2b4b33f20 (diff) | |
download | barebox-f76dca195ecb19136c8e51399c94e7c0ea23b1d0.tar.gz barebox-f76dca195ecb19136c8e51399c94e7c0ea23b1d0.tar.xz |
ARM64: entry: save/restore potentially clobbered registers
While the comment is correct that currently arm_early_mmu_cache_invalidate()
is only a call to to v8_invalidate_icache_all() , which doesn't clobber x0-x2,
this starts to fall apart as soon as we do something more in this function.
Make sure to properly save/restore the parameters passed to the entry function.
Signed-off-by: Lucas Stach <l.stach@pengutronix.de>
Signed-off-by: Sascha Hauer <s.hauer@pengutronix.de>
Diffstat (limited to 'arch/arm/cpu')
-rw-r--r-- | arch/arm/cpu/entry_ll_64.S | 12 |
1 files changed, 7 insertions, 5 deletions
diff --git a/arch/arm/cpu/entry_ll_64.S b/arch/arm/cpu/entry_ll_64.S index 37e0cb66b5..fb8645e0a0 100644 --- a/arch/arm/cpu/entry_ll_64.S +++ b/arch/arm/cpu/entry_ll_64.S @@ -10,14 +10,16 @@ .section .text.__barebox_arm_entry ENTRY(__barebox_arm_entry) mov sp, x3 - /* - * arm_early_mmu_cache_invalidate is jsut a call to - * v8_invalidate_icache_all() which doesn't clobber x0, x1 or x2 - */ + mov x19, x0 + mov x20, x1 + mov x21, x2 bl arm_early_mmu_cache_invalidate + mov x0, x19 + mov x1, x20 + mov x2, x21 #if IS_ENABLED(CONFIG_PBL_IMAGE) b barebox_pbl_start #else b barebox_non_pbl_start #endif -ENDPROC(__barebox_arm_entry)
\ No newline at end of file +ENDPROC(__barebox_arm_entry) |