aboutsummaryrefslogtreecommitdiffstats
path: root/arch/blackfin/kernel/vmlinux.lds.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/blackfin/kernel/vmlinux.lds.S')
-rw-r--r--arch/blackfin/kernel/vmlinux.lds.S70
1 files changed, 65 insertions, 5 deletions
diff --git a/arch/blackfin/kernel/vmlinux.lds.S b/arch/blackfin/kernel/vmlinux.lds.S
index 66799e763dc9..984c78172397 100644
--- a/arch/blackfin/kernel/vmlinux.lds.S
+++ b/arch/blackfin/kernel/vmlinux.lds.S
@@ -15,7 +15,12 @@ _jiffies = _jiffies_64;
15 15
16SECTIONS 16SECTIONS
17{ 17{
18#ifdef CONFIG_RAMKERNEL
18 . = CONFIG_BOOT_LOAD; 19 . = CONFIG_BOOT_LOAD;
20#else
21 . = CONFIG_ROM_BASE;
22#endif
23
19 /* Neither the text, ro_data or bss section need to be aligned 24 /* Neither the text, ro_data or bss section need to be aligned
20 * So pack them back to back 25 * So pack them back to back
21 */ 26 */
@@ -31,6 +36,12 @@ SECTIONS
31 LOCK_TEXT 36 LOCK_TEXT
32 IRQENTRY_TEXT 37 IRQENTRY_TEXT
33 KPROBES_TEXT 38 KPROBES_TEXT
39#ifdef CONFIG_ROMKERNEL
40 __sinittext = .;
41 INIT_TEXT
42 __einittext = .;
43 EXIT_TEXT
44#endif
34 *(.text.*) 45 *(.text.*)
35 *(.fixup) 46 *(.fixup)
36 47
@@ -50,8 +61,14 @@ SECTIONS
50 61
51 /* Just in case the first read only is a 32-bit access */ 62 /* Just in case the first read only is a 32-bit access */
52 RO_DATA(4) 63 RO_DATA(4)
64 __rodata_end = .;
53 65
66#ifdef CONFIG_ROMKERNEL
67 . = CONFIG_BOOT_LOAD;
68 .bss : AT(__rodata_end)
69#else
54 .bss : 70 .bss :
71#endif
55 { 72 {
56 . = ALIGN(4); 73 . = ALIGN(4);
57 ___bss_start = .; 74 ___bss_start = .;
@@ -67,7 +84,11 @@ SECTIONS
67 ___bss_stop = .; 84 ___bss_stop = .;
68 } 85 }
69 86
87#if defined(CONFIG_ROMKERNEL)
88 .data : AT(LOADADDR(.bss) + SIZEOF(.bss))
89#else
70 .data : 90 .data :
91#endif
71 { 92 {
72 __sdata = .; 93 __sdata = .;
73 /* This gets done first, so the glob doesn't suck it in */ 94 /* This gets done first, so the glob doesn't suck it in */
@@ -94,6 +115,8 @@ SECTIONS
94 115
95 __edata = .; 116 __edata = .;
96 } 117 }
118 __data_lma = LOADADDR(.data);
119 __data_len = SIZEOF(.data);
97 120
98 /* The init section should be last, so when we free it, it goes into 121 /* The init section should be last, so when we free it, it goes into
99 * the general memory pool, and (hopefully) will decrease fragmentation 122 * the general memory pool, and (hopefully) will decrease fragmentation
@@ -103,25 +126,58 @@ SECTIONS
103 . = ALIGN(PAGE_SIZE); 126 . = ALIGN(PAGE_SIZE);
104 ___init_begin = .; 127 ___init_begin = .;
105 128
129#ifdef CONFIG_RAMKERNEL
106 INIT_TEXT_SECTION(PAGE_SIZE) 130 INIT_TEXT_SECTION(PAGE_SIZE)
107 . = ALIGN(16);
108 INIT_DATA_SECTION(16)
109 PERCPU(4)
110 131
111 /* we have to discard exit text and such at runtime, not link time, to 132 /* We have to discard exit text and such at runtime, not link time, to
112 * handle embedded cross-section references (alt instructions, bug 133 * handle embedded cross-section references (alt instructions, bug
113 * table, eh_frame, etc...) 134 * table, eh_frame, etc...). We need all of our .text up front and
135 * .data after it for PCREL call issues.
114 */ 136 */
115 .exit.text : 137 .exit.text :
116 { 138 {
117 EXIT_TEXT 139 EXIT_TEXT
118 } 140 }
141
142 . = ALIGN(16);
143 INIT_DATA_SECTION(16)
144 PERCPU(4)
145
119 .exit.data : 146 .exit.data :
120 { 147 {
121 EXIT_DATA 148 EXIT_DATA
122 } 149 }
123 150
124 .text_l1 L1_CODE_START : AT(LOADADDR(.exit.data) + SIZEOF(.exit.data)) 151 .text_l1 L1_CODE_START : AT(LOADADDR(.exit.data) + SIZEOF(.exit.data))
152#else
153 .init.data : AT(__data_lma + __data_len)
154 {
155 __sinitdata = .;
156 INIT_DATA
157 INIT_SETUP(16)
158 INIT_CALLS
159 CON_INITCALL
160 SECURITY_INITCALL
161 INIT_RAM_FS
162
163 . = ALIGN(4);
164 ___per_cpu_load = .;
165 ___per_cpu_start = .;
166 *(.data.percpu.first)
167 *(.data.percpu.page_aligned)
168 *(.data.percpu)
169 *(.data.percpu.shared_aligned)
170 ___per_cpu_end = .;
171
172 EXIT_DATA
173 __einitdata = .;
174 }
175 __init_data_lma = LOADADDR(.init.data);
176 __init_data_len = SIZEOF(.init.data);
177 __init_data_end = .;
178
179 .text_l1 L1_CODE_START : AT(__init_data_lma + __init_data_len)
180#endif
125 { 181 {
126 . = ALIGN(4); 182 . = ALIGN(4);
127 __stext_l1 = .; 183 __stext_l1 = .;
@@ -202,7 +258,11 @@ SECTIONS
202 /* Force trailing alignment of our init section so that when we 258 /* Force trailing alignment of our init section so that when we
203 * free our init memory, we don't leave behind a partial page. 259 * free our init memory, we don't leave behind a partial page.
204 */ 260 */
261#ifdef CONFIG_RAMKERNEL
205 . = __l2_lma + __l2_len; 262 . = __l2_lma + __l2_len;
263#else
264 . = __init_data_end;
265#endif
206 . = ALIGN(PAGE_SIZE); 266 . = ALIGN(PAGE_SIZE);
207 ___init_end = .; 267 ___init_end = .;
208 268