summaryrefslogtreecommitdiffstats
path: root/arch/powerpc
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/Kconfig11
-rw-r--r--arch/powerpc/include/asm/head-64.h18
-rw-r--r--arch/powerpc/kernel/vmlinux.lds.S5
3 files changed, 34 insertions, 0 deletions
diff --git a/arch/powerpc/Kconfig b/arch/powerpc/Kconfig
index d090275ace44..0153275e9f75 100644
--- a/arch/powerpc/Kconfig
+++ b/arch/powerpc/Kconfig
@@ -455,6 +455,17 @@ config PPC_TRANSACTIONAL_MEM
---help---
Support user-mode Transactional Memory on POWERPC.
+config LD_HEAD_STUB_CATCH
+ bool "Reserve 256 bytes to cope with linker stubs in HEAD text" if EXPERT
+ depends on PPC64
+ default n
+ help
+ Very large kernels can cause linker branch stubs to be generated by
+ code in head_64.S, which moves the head text sections out of their
+ specified location. This option can work around the problem.
+
+ If unsure, say "N".
+
config DISABLE_MPROFILE_KERNEL
bool "Disable use of mprofile-kernel for kernel tracing"
depends on PPC64 && CPU_LITTLE_ENDIAN
diff --git a/arch/powerpc/include/asm/head-64.h b/arch/powerpc/include/asm/head-64.h
index 86eb87382031..68828aa6e056 100644
--- a/arch/powerpc/include/asm/head-64.h
+++ b/arch/powerpc/include/asm/head-64.h
@@ -63,11 +63,29 @@
. = 0x0; \
start_##sname:
+/*
+ * .linker_stub_catch section is used to catch linker stubs from being
+ * inserted in our .text section, above the start_text label (which breaks
+ * the ABS_ADDR calculation). See kernel/vmlinux.lds.S and tools/head_check.sh
+ * for more details. We would prefer to just keep a cacheline (0x80), but
+ * 0x100 seems to be how the linker aligns branch stub groups.
+ */
+#ifdef CONFIG_LD_HEAD_STUB_CATCH
+#define OPEN_TEXT_SECTION(start) \
+ .section ".linker_stub_catch","ax",@progbits; \
+linker_stub_catch: \
+ . = 0x4; \
+ text_start = (start) + 0x100; \
+ .section ".text","ax",@progbits; \
+ .balign 0x100; \
+start_text:
+#else
#define OPEN_TEXT_SECTION(start) \
text_start = (start); \
.section ".text","ax",@progbits; \
. = 0x0; \
start_text:
+#endif
#define ZERO_FIXED_SECTION(sname, start, end) \
sname##_start = (start); \
diff --git a/arch/powerpc/kernel/vmlinux.lds.S b/arch/powerpc/kernel/vmlinux.lds.S
index bcfda21c3179..be8578e02ab6 100644
--- a/arch/powerpc/kernel/vmlinux.lds.S
+++ b/arch/powerpc/kernel/vmlinux.lds.S
@@ -103,6 +103,11 @@ SECTIONS
* section placement to work.
*/
.text BLOCK(0) : AT(ADDR(.text) - LOAD_OFFSET) {
+#ifdef CONFIG_LD_HEAD_STUB_CATCH
+ *(.linker_stub_catch);
+ . = . ;
+#endif
+
#else
.text : AT(ADDR(.text) - LOAD_OFFSET) {
ALIGN_FUNCTION();