summaryrefslogtreecommitdiffstats
path: root/ArmPkg/Library/ArmLib
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2016-04-11 15:47:24 +0200
committerArd Biesheuvel <ard.biesheuvel@linaro.org>2016-04-14 18:01:52 +0200
commit61b02ba1f2a3f80fa06f5006f0aea1572093a067 (patch)
tree5ec0390876e68b78879c59e5492b8247b8ba45db /ArmPkg/Library/ArmLib
parentd354963f0dc350771167fa5d3c28b9de8d632d9c (diff)
downloadedk2-61b02ba1f2a3f80fa06f5006f0aea1572093a067.tar.gz
edk2-61b02ba1f2a3f80fa06f5006f0aea1572093a067.tar.bz2
edk2-61b02ba1f2a3f80fa06f5006f0aea1572093a067.zip
ArmPkg/AArch64Mmu: disable MMU during page table manipulations
On ARM, manipulating live page tables is cumbersome since the architecture mandates the use of break-before-make, i.e., replacing a block entry with a table entry requires an intermediate step via an invalid entry, or TLB conflicts may occur. Since it is not generally feasible to decide in the page table manipulation routines whether such an invalid entry will result in those routines themselves to become unavailable, use a function that is callable with the MMU off (i.e., a leaf function that does not access the stack) to perform the change of a block entry into a table entry. Note that the opposite should never occur, i.e., table entries are never coalesced into block entries. Contributed-under: TianoCore Contribution Agreement 1.0 Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Acked-by: Mark Rutland <mark.rutland@arm.com>
Diffstat (limited to 'ArmPkg/Library/ArmLib')
-rw-r--r--ArmPkg/Library/ArmLib/AArch64/AArch64Lib.inf5
-rw-r--r--ArmPkg/Library/ArmLib/AArch64/AArch64LibConstructor.c36
-rw-r--r--ArmPkg/Library/ArmLib/AArch64/AArch64Mmu.c17
-rw-r--r--ArmPkg/Library/ArmLib/AArch64/AArch64Support.S62
4 files changed, 118 insertions, 2 deletions
diff --git a/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.inf b/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.inf
index dd585dea91..58684e8492 100644
--- a/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.inf
+++ b/ArmPkg/Library/ArmLib/AArch64/AArch64Lib.inf
@@ -17,9 +17,10 @@
INF_VERSION = 0x00010005
BASE_NAME = AArch64Lib
FILE_GUID = ef20ddf5-b334-47b3-94cf-52ff44c29138
- MODULE_TYPE = DXE_DRIVER
+ MODULE_TYPE = BASE
VERSION_STRING = 1.0
LIBRARY_CLASS = ArmLib
+ CONSTRUCTOR = AArch64LibConstructor
[Sources.AARCH64]
AArch64Lib.c
@@ -31,6 +32,7 @@
../Common/AArch64/ArmLibSupport.S
../Common/ArmLib.c
+ AArch64LibConstructor.c
[Packages]
ArmPkg/ArmPkg.dec
@@ -38,6 +40,7 @@
[LibraryClasses]
MemoryAllocationLib
+ CacheMaintenanceLib
[Protocols]
gEfiCpuArchProtocolGuid
diff --git a/ArmPkg/Library/ArmLib/AArch64/AArch64LibConstructor.c b/ArmPkg/Library/ArmLib/AArch64/AArch64LibConstructor.c
new file mode 100644
index 0000000000..d2d0d3c15e
--- /dev/null
+++ b/ArmPkg/Library/ArmLib/AArch64/AArch64LibConstructor.c
@@ -0,0 +1,36 @@
+#/* @file
+#
+# Copyright (c) 2016, Linaro Limited. All rights reserved.
+#
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+#*/
+
+#include <Base.h>
+
+#include <Library/ArmLib.h>
+#include <Library/CacheMaintenanceLib.h>
+
+RETURN_STATUS
+EFIAPI
+AArch64LibConstructor (
+ VOID
+ )
+{
+ extern UINT32 ArmReplaceLiveTranslationEntrySize;
+
+ //
+ // The ArmReplaceLiveTranslationEntry () helper function may be invoked
+ // with the MMU off so we have to ensure that it gets cleaned to the PoC
+ //
+ WriteBackDataCacheRange (ArmReplaceLiveTranslationEntry,
+ ArmReplaceLiveTranslationEntrySize);
+
+ return RETURN_SUCCESS;
+}
diff --git a/ArmPkg/Library/ArmLib/AArch64/AArch64Mmu.c b/ArmPkg/Library/ArmLib/AArch64/AArch64Mmu.c
index 918957d4ff..48ca827184 100644
--- a/ArmPkg/Library/ArmLib/AArch64/AArch64Mmu.c
+++ b/ArmPkg/Library/ArmLib/AArch64/AArch64Mmu.c
@@ -169,6 +169,20 @@ GetRootTranslationTableInfo (
STATIC
VOID
+ReplaceLiveEntry (
+ IN UINT64 *Entry,
+ IN UINT64 Value
+ )
+{
+ if (!ArmMmuEnabled ()) {
+ *Entry = Value;
+ } else {
+ ArmReplaceLiveTranslationEntry (Entry, Value);
+ }
+}
+
+STATIC
+VOID
LookupAddresstoRootTable (
IN UINT64 MaxAddress,
OUT UINTN *T0SZ,
@@ -330,7 +344,8 @@ GetBlockEntryListFromAddress (
}
// Fill the BlockEntry with the new TranslationTable
- *BlockEntry = ((UINTN)TranslationTable & TT_ADDRESS_MASK_DESCRIPTION_TABLE) | TableAttributes | TT_TYPE_TABLE_ENTRY;
+ ReplaceLiveEntry (BlockEntry,
+ ((UINTN)TranslationTable & TT_ADDRESS_MASK_DESCRIPTION_TABLE) | TableAttributes | TT_TYPE_TABLE_ENTRY);
}
} else {
if (IndexLevel != PageLevel) {
diff --git a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
index 1a3023b794..43f7a795ac 100644
--- a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
+++ b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
@@ -56,6 +56,8 @@ GCC_ASM_EXPORT (ArmReadIdPfr1)
GCC_ASM_EXPORT (ArmWriteHcr)
GCC_ASM_EXPORT (ArmReadHcr)
GCC_ASM_EXPORT (ArmReadCurrentEL)
+GCC_ASM_EXPORT (ArmReplaceLiveTranslationEntry)
+GCC_ASM_EXPORT (ArmReplaceLiveTranslationEntrySize)
.set CTRL_M_BIT, (1 << 0)
.set CTRL_A_BIT, (1 << 1)
@@ -481,4 +483,64 @@ ASM_PFX(ArmReadCurrentEL):
mrs x0, CurrentEL
ret
+
+ .macro __replace_entry, el
+
+ // disable the MMU
+ mrs x8, sctlr_el\el
+ bic x9, x8, #CTRL_M_BIT
+ msr sctlr_el\el, x9
+ isb
+
+ // write updated entry
+ str x1, [x0]
+
+ // invalidate again to get rid of stale clean cachelines that may
+ // have been filled speculatively since the last invalidate
+ dmb sy
+ dc ivac, x0
+
+ // flush the TLBs
+ .if \el == 1
+ tlbi vmalle1
+ .else
+ tlbi alle\el
+ .endif
+ dsb sy
+
+ // re-enable the MMU
+ msr sctlr_el\el, x8
+ isb
+ .endm
+
+//VOID
+//ArmReplaceLiveTranslationEntry (
+// IN UINT64 *Entry,
+// IN UINT64 Value
+// )
+ASM_PFX(ArmReplaceLiveTranslationEntry):
+
+ // disable interrupts
+ mrs x2, daif
+ msr daifset, #0xf
+ isb
+
+ // clean and invalidate first so that we don't clobber
+ // adjacent entries that are dirty in the caches
+ dc civac, x0
+ dsb ish
+
+ EL1_OR_EL2_OR_EL3(x3)
+1:__replace_entry 1
+ b 4f
+2:__replace_entry 2
+ b 4f
+3:__replace_entry 3
+
+4:msr daif, x2
+ ret
+
+ASM_PFX(ArmReplaceLiveTranslationEntrySize):
+ .long . - ArmReplaceLiveTranslationEntry
+
ASM_FUNCTION_REMOVE_IF_UNREFERENCED