summaryrefslogtreecommitdiffstats
path: root/MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S
diff options
context:
space:
mode:
authorLeif Lindholm <quic_llindhol@quicinc.com>2020-10-01 19:37:11 +0100
committermergify[bot] <37929162+mergify[bot]@users.noreply.github.com>2023-10-02 15:48:02 +0000
commit2b2705343a810538bc27f5e897da693984c82394 (patch)
tree07afdd14ebb09f72a715268cc2f72386b3af4afa /MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S
parentae79efb7bd436068156605ae7111b93dab710546 (diff)
downloadedk2-2b2705343a810538bc27f5e897da693984c82394.tar.gz
edk2-2b2705343a810538bc27f5e897da693984c82394.tar.bz2
edk2-2b2705343a810538bc27f5e897da693984c82394.zip
MdePkg/BaseLib: correct register sizes in AArch64 SetJump/LongJump
Both in SetJump and in InternalLongJump, 32-bit w register views were used for the UINTN return value. In SetJump, this did not cause errors; it was only counterintuitive. But in InternalLongJump, it meant the top 32 bits of Value were stripped off. Change all of these to use the 64-bit x register views. Signed-off-by: Leif Lindholm <quic_llindhol@quicinc.com> Reanimated-by: Andrei Warkentin <andrei.warkentin@intel.com> Cc: Ard Biesheuvel <ardb+tianocore@kernel.org> Cc: Sami Mujawar <sami.mujawar@arm.com> Reviewed-by: Sami Mujawar <sami.mujawar@arm.com> Reviewed-by: Andrei Warkentin <andrei.warkentin@intel.com>
Diffstat (limited to 'MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S')
-rw-r--r--MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S8
1 files changed, 4 insertions, 4 deletions
diff --git a/MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S b/MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S
index de79ad3a0a..3e58119b25 100644
--- a/MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S
+++ b/MdePkg/Library/BaseLib/AArch64/SetJumpLongJump.S
@@ -61,7 +61,7 @@ ASM_PFX(SetJump):
FPR_LAYOUT
#undef REG_PAIR
#undef REG_ONE
- mov w0, #0
+ mov x0, #0
ret
#/**
@@ -91,9 +91,9 @@ ASM_PFX(InternalLongJump):
#undef REG_PAIR
#undef REG_ONE
mov sp, x16
- cmp w1, #0
- mov w0, #1
- csel w0, w1, w0, ne
+ cmp x1, #0
+ mov x0, #1
+ csel x0, x1, x0, ne
ret
ASM_FUNCTION_REMOVE_IF_UNREFERENCED