summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2017-02-22 09:38:20 +0000
committerArd Biesheuvel <ard.biesheuvel@linaro.org>2017-02-22 13:22:42 +0000
commit97f0d01d5df07c7a81e4c505494f43b4b46ffe6c (patch)
treed8a6f4599231a33422badffd361ca9a9a6b7089e
parentde2a78247a2955e20a014fd8c47eb9792d1b437b (diff)
downloadedk2-97f0d01d5df07c7a81e4c505494f43b4b46ffe6c.tar.gz
edk2-97f0d01d5df07c7a81e4c505494f43b4b46ffe6c.tar.bz2
edk2-97f0d01d5df07c7a81e4c505494f43b4b46ffe6c.zip
ArmPkg/ArmLib: AARCH64: allow the stack aligment (SA) bit to be managed
In preparation of enabling stack alignment checking, which is mandated by the UEFI spec for AARCH64, add the code to manage this bit to ArmLib. Contributed-under: TianoCore Contribution Agreement 1.0 Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Reviewed-by: Leif Lindholm <leif.lindholm@linaro.org>
-rw-r--r--ArmPkg/Include/Chipset/AArch64.h12
-rw-r--r--ArmPkg/Library/ArmLib/AArch64/AArch64Support.S34
2 files changed, 46 insertions, 0 deletions
diff --git a/ArmPkg/Include/Chipset/AArch64.h b/ArmPkg/Include/Chipset/AArch64.h
index 9aecb1df81..cebfc5da42 100644
--- a/ArmPkg/Include/Chipset/AArch64.h
+++ b/ArmPkg/Include/Chipset/AArch64.h
@@ -194,6 +194,18 @@ ArmEnableAlignmentCheck (
VOID
EFIAPI
+ArmDisableStackAlignmentCheck (
+ VOID
+ );
+
+VOID
+EFIAPI
+ArmEnableStackAlignmentCheck (
+ VOID
+ );
+
+VOID
+EFIAPI
ArmDisableAllExceptions (
VOID
);
diff --git a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
index 886c420962..6e8074a486 100644
--- a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
+++ b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
@@ -20,6 +20,7 @@
.set CTRL_M_BIT, (1 << 0)
.set CTRL_A_BIT, (1 << 1)
.set CTRL_C_BIT, (1 << 2)
+.set CTRL_SA_BIT, (1 << 3)
.set CTRL_I_BIT, (1 << 12)
.set CTRL_V_BIT, (1 << 12)
.set CPACR_VFP_BITS, (3 << 20)
@@ -259,6 +260,39 @@ ASM_FUNC(ArmDisableAlignmentCheck)
isb
ret
+ASM_FUNC(ArmEnableStackAlignmentCheck)
+ EL1_OR_EL2(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 3f
+2: mrs x0, sctlr_el2 // Get control register EL2
+3: orr x0, x0, #CTRL_SA_BIT // Set SA (stack alignment check) bit
+ EL1_OR_EL2(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 3f
+2: msr sctlr_el2, x0 // Write back control register
+3: dsb sy
+ isb
+ ret
+
+
+ASM_FUNC(ArmDisableStackAlignmentCheck)
+ EL1_OR_EL2_OR_EL3(x1)
+1: mrs x0, sctlr_el1 // Get control register EL1
+ b 4f
+2: mrs x0, sctlr_el2 // Get control register EL2
+ b 4f
+3: mrs x0, sctlr_el3 // Get control register EL3
+4: bic x0, x0, #CTRL_SA_BIT // Clear SA (stack alignment check) bit
+ EL1_OR_EL2_OR_EL3(x1)
+1: msr sctlr_el1, x0 // Write back control register
+ b 4f
+2: msr sctlr_el2, x0 // Write back control register
+ b 4f
+3: msr sctlr_el3, x0 // Write back control register
+4: dsb sy
+ isb
+ ret
+
// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now
ASM_FUNC(ArmEnableBranchPrediction)