diff options
-rw-r--r-- | include/asm-generic/vmlinux.lds.h | 9 |
1 files changed, 9 insertions, 0 deletions
diff --git a/include/asm-generic/vmlinux.lds.h b/include/asm-generic/vmlinux.lds.h index b3bb326ae5b6..3fa94288aa93 100644 --- a/include/asm-generic/vmlinux.lds.h +++ b/include/asm-generic/vmlinux.lds.h | |||
@@ -6,6 +6,9 @@ | |||
6 | #define VMLINUX_SYMBOL(_sym_) _sym_ | 6 | #define VMLINUX_SYMBOL(_sym_) _sym_ |
7 | #endif | 7 | #endif |
8 | 8 | ||
9 | /* Align . to a 8 byte boundary equals to maximum function alignment. */ | ||
10 | #define ALIGN_FUNCTION() . = ALIGN(8) | ||
11 | |||
9 | #define RODATA \ | 12 | #define RODATA \ |
10 | .rodata : AT(ADDR(.rodata) - LOAD_OFFSET) { \ | 13 | .rodata : AT(ADDR(.rodata) - LOAD_OFFSET) { \ |
11 | *(.rodata) *(.rodata.*) \ | 14 | *(.rodata) *(.rodata.*) \ |
@@ -79,12 +82,18 @@ | |||
79 | VMLINUX_SYMBOL(__security_initcall_end) = .; \ | 82 | VMLINUX_SYMBOL(__security_initcall_end) = .; \ |
80 | } | 83 | } |
81 | 84 | ||
85 | /* sched.text is aling to function alignment to secure we have same | ||
86 | * address even at second ld pass when generating System.map */ | ||
82 | #define SCHED_TEXT \ | 87 | #define SCHED_TEXT \ |
88 | ALIGN_FUNCTION(); \ | ||
83 | VMLINUX_SYMBOL(__sched_text_start) = .; \ | 89 | VMLINUX_SYMBOL(__sched_text_start) = .; \ |
84 | *(.sched.text) \ | 90 | *(.sched.text) \ |
85 | VMLINUX_SYMBOL(__sched_text_end) = .; | 91 | VMLINUX_SYMBOL(__sched_text_end) = .; |
86 | 92 | ||
93 | /* spinlock.text is aling to function alignment to secure we have same | ||
94 | * address even at second ld pass when generating System.map */ | ||
87 | #define LOCK_TEXT \ | 95 | #define LOCK_TEXT \ |
96 | ALIGN_FUNCTION(); \ | ||
88 | VMLINUX_SYMBOL(__lock_text_start) = .; \ | 97 | VMLINUX_SYMBOL(__lock_text_start) = .; \ |
89 | *(.spinlock.text) \ | 98 | *(.spinlock.text) \ |
90 | VMLINUX_SYMBOL(__lock_text_end) = .; | 99 | VMLINUX_SYMBOL(__lock_text_end) = .; |