aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorNicholas Piggin <npiggin@gmail.com>2016-09-27 21:31:48 -0400
committerMichael Ellerman <mpe@ellerman.id.au>2016-10-03 22:06:56 -0400
commit57f266497d81e16141bd2c9009e91dad34ea5f70 (patch)
treecfe7f69e2945f26758dcb6a81c1a40b4ecfb73e9
parent573819e3433278cde17e7b93e101e59e96f19ccf (diff)
powerpc: Use gas sections for arranging exception vectors
Use assembler sections of fixed size and location to arrange the 64-bit Book3S exception vector code (64-bit Book3E also uses it in head_64.S for 0x0..0x100). This allows better flexibility in arranging exception code and hiding unimportant details behind macros. Gas sections can be a bit painful to use this way, mainly because the assembler does not know where they will be finally linked. Taking absolute addresses requires a bit of trickery for example, but it can be hidden behind macros for the most part. Generated code is mostly the same except locations, offsets, alignments. The "+ 0x2" is only required for the trap number / kvm exit number, which gets loaded as a constant into a register. Previously, code also used + 0x2 for label names, but we changed to using "H" to distinguish HV case for that. Remove the last vestiges of that. __after_prom_start is taking absolute address of a label in another fixed section. Newer toolchains seemed to compile this okay, but older ones do not. FIXED_SYMBOL_ABS_ADDR is more foolproof, it just takes an additional line to define. Signed-off-by: Nicholas Piggin <npiggin@gmail.com> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
-rw-r--r--arch/powerpc/include/asm/exception-64s.h2
-rw-r--r--arch/powerpc/include/asm/head-64.h261
-rw-r--r--arch/powerpc/kernel/exceptions-64s.S111
-rw-r--r--arch/powerpc/kernel/head_64.S44
-rw-r--r--arch/powerpc/kernel/vmlinux.lds.S53
5 files changed, 404 insertions, 67 deletions
diff --git a/arch/powerpc/include/asm/exception-64s.h b/arch/powerpc/include/asm/exception-64s.h
index 72f2b1e3f343..2e4e7d878c8e 100644
--- a/arch/powerpc/include/asm/exception-64s.h
+++ b/arch/powerpc/include/asm/exception-64s.h
@@ -91,7 +91,7 @@
91 */ 91 */
92#define LOAD_HANDLER(reg, label) \ 92#define LOAD_HANDLER(reg, label) \
93 ld reg,PACAKBASE(r13); /* get high part of &label */ \ 93 ld reg,PACAKBASE(r13); /* get high part of &label */ \
94 ori reg,reg,((label)-_stext)@l; /* virt addr of handler ... */ 94 ori reg,reg,(FIXED_SYMBOL_ABS_ADDR(label))@l;
95 95
96/* Exception register prefixes */ 96/* Exception register prefixes */
97#define EXC_HV H 97#define EXC_HV H
diff --git a/arch/powerpc/include/asm/head-64.h b/arch/powerpc/include/asm/head-64.h
index 613f743e91aa..ab90c2fa1ea6 100644
--- a/arch/powerpc/include/asm/head-64.h
+++ b/arch/powerpc/include/asm/head-64.h
@@ -3,28 +3,218 @@
3 3
4#include <asm/cache.h> 4#include <asm/cache.h>
5 5
6/*
7 * We can't do CPP stringification and concatination directly into the section
8 * name for some reason, so these macros can do it for us.
9 */
10.macro define_ftsec name
11 .section ".head.text.\name\()","ax",@progbits
12.endm
13.macro define_data_ftsec name
14 .section ".head.data.\name\()","a",@progbits
15.endm
16.macro use_ftsec name
17 .section ".head.text.\name\()"
18.endm
19
20/*
21 * Fixed (location) sections are used by opening fixed sections and emitting
22 * fixed section entries into them before closing them. Multiple fixed sections
23 * can be open at any time.
24 *
25 * Each fixed section created in a .S file must have corresponding linkage
26 * directives including location, added to arch/powerpc/kernel/vmlinux.lds.S
27 *
28 * For each fixed section, code is generated into it in the order which it
29 * appears in the source. Fixed section entries can be placed at a fixed
30 * location within the section using _LOCATION postifx variants. These must
31 * be ordered according to their relative placements within the section.
32 *
33 * OPEN_FIXED_SECTION(section_name, start_address, end_address)
34 * FIXED_SECTION_ENTRY_BEGIN(section_name, label1)
35 *
36 * USE_FIXED_SECTION(section_name)
37 * label3:
38 * li r10,128
39 * mv r11,r10
40
41 * FIXED_SECTION_ENTRY_BEGIN_LOCATION(section_name, label2, start_address)
42 * FIXED_SECTION_ENTRY_END_LOCATION(section_name, label2, end_address)
43 * CLOSE_FIXED_SECTION(section_name)
44 *
45 * ZERO_FIXED_SECTION can be used to emit zeroed data.
46 *
47 * Troubleshooting:
48 * - If the build dies with "Error: attempt to move .org backwards" at
49 * CLOSE_FIXED_SECTION() or elsewhere, there may be something
50 * unexpected being added there. Remove the '. = x_len' line, rebuild, and
51 * check what is pushing the section down.
52 * - If the build dies in linking, check arch/powerpc/kernel/vmlinux.lds.S
53 * for instructions.
54 * - If the kernel crashes or hangs in very early boot, it could be linker
55 * stubs at the start of the main text.
56 */
57
58#define OPEN_FIXED_SECTION(sname, start, end) \
59 sname##_start = (start); \
60 sname##_end = (end); \
61 sname##_len = (end) - (start); \
62 define_ftsec sname; \
63 . = 0x0; \
64start_##sname:
65
66#define OPEN_TEXT_SECTION(start) \
67 text_start = (start); \
68 .section ".text","ax",@progbits; \
69 . = 0x0; \
70start_text:
71
72#define ZERO_FIXED_SECTION(sname, start, end) \
73 sname##_start = (start); \
74 sname##_end = (end); \
75 sname##_len = (end) - (start); \
76 define_data_ftsec sname; \
77 . = 0x0; \
78 . = sname##_len;
79
80#define USE_FIXED_SECTION(sname) \
81 fs_label = start_##sname; \
82 fs_start = sname##_start; \
83 use_ftsec sname;
84
85#define USE_TEXT_SECTION() \
86 fs_label = start_text; \
87 fs_start = text_start; \
88 .text
89
90#define CLOSE_FIXED_SECTION(sname) \
91 USE_FIXED_SECTION(sname); \
92 . = sname##_len; \
93end_##sname:
94
95
96#define __FIXED_SECTION_ENTRY_BEGIN(sname, name, __align) \
97 USE_FIXED_SECTION(sname); \
98 .align __align; \
99 .global name; \
100name:
101
102#define FIXED_SECTION_ENTRY_BEGIN(sname, name) \
103 __FIXED_SECTION_ENTRY_BEGIN(sname, name, 0)
104
105#define FIXED_SECTION_ENTRY_BEGIN_LOCATION(sname, name, start) \
106 USE_FIXED_SECTION(sname); \
107 name##_start = (start); \
108 .if (start) < sname##_start; \
109 .error "Fixed section underflow"; \
110 .abort; \
111 .endif; \
112 . = (start) - sname##_start; \
113 .global name; \
114name:
115
116#define FIXED_SECTION_ENTRY_END_LOCATION(sname, name, end) \
117 .if (end) > sname##_end; \
118 .error "Fixed section overflow"; \
119 .abort; \
120 .endif; \
121 .if (. - name > end - name##_start); \
122 .error "Fixed entry overflow"; \
123 .abort; \
124 .endif; \
125 . = ((end) - sname##_start); \
126
127
128/*
129 * These macros are used to change symbols in other fixed sections to be
130 * absolute or related to our current fixed section.
131 *
132 * - DEFINE_FIXED_SYMBOL / FIXED_SYMBOL_ABS_ADDR is used to find the
133 * absolute address of a symbol within a fixed section, from any section.
134 *
135 * - ABS_ADDR is used to find the absolute address of any symbol, from within
136 * a fixed section.
137 */
138#define DEFINE_FIXED_SYMBOL(label) \
139 label##_absolute = (label - fs_label + fs_start)
140
141#define FIXED_SYMBOL_ABS_ADDR(label) \
142 (label##_absolute)
143
144#define ABS_ADDR(label) (label - fs_label + fs_start)
145
146/*
147 * Following are the BOOK3S exception handler helper macros.
148 * Handlers come in a number of types, and each type has a number of varieties.
149 *
150 * EXC_REAL_* - real, unrelocated exception vectors
151 * EXC_VIRT_* - virt (AIL), unrelocated exception vectors
152 * TRAMP_REAL_* - real, unrelocated helpers (virt can call these)
153 * TRAMP_VIRT_* - virt, unreloc helpers (in practice, real can use)
154 * TRAMP_KVM - KVM handlers that get put into real, unrelocated
155 * EXC_COMMON_* - virt, relocated common handlers
156 *
157 * The EXC handlers are given a name, and branch to name_common, or the
158 * appropriate KVM or masking function. Vector handler verieties are as
159 * follows:
160 *
161 * EXC_{REAL|VIRT}_BEGIN/END - used to open-code the exception
162 *
163 * EXC_{REAL|VIRT} - standard exception
164 *
165 * EXC_{REAL|VIRT}_suffix
166 * where _suffix is:
167 * - _MASKABLE - maskable exception
168 * - _OOL - out of line with trampoline to common handler
169 * - _HV - HV exception
170 *
171 * There can be combinations, e.g., EXC_VIRT_OOL_MASKABLE_HV
172 *
173 * The one unusual case is __EXC_REAL_OOL_HV_DIRECT, which is
174 * an OOL vector that branches to a specified handler rather than the usual
175 * trampoline that goes to common. It, and other underscore macros, should
176 * be used with care.
177 *
178 * KVM handlers come in the following verieties:
179 * TRAMP_KVM
180 * TRAMP_KVM_SKIP
181 * TRAMP_KVM_HV
182 * TRAMP_KVM_HV_SKIP
183 *
184 * COMMON handlers come in the following verieties:
185 * EXC_COMMON_BEGIN/END - used to open-code the handler
186 * EXC_COMMON
187 * EXC_COMMON_ASYNC
188 * EXC_COMMON_HV
189 *
190 * TRAMP_REAL and TRAMP_VIRT can be used with BEGIN/END. KVM
191 * and OOL handlers are implemented as types of TRAMP and TRAMP_VIRT handlers.
192 */
193
6#define EXC_REAL_BEGIN(name, start, end) \ 194#define EXC_REAL_BEGIN(name, start, end) \
7 . = start ; \ 195 FIXED_SECTION_ENTRY_BEGIN_LOCATION(real_vectors, exc_real_##start##_##name, start)
8 .global exc_real_##start##_##name ; \
9exc_real_##start##_##name:
10 196
11#define EXC_REAL_END(name, start, end) 197#define EXC_REAL_END(name, start, end) \
198 FIXED_SECTION_ENTRY_END_LOCATION(real_vectors, exc_real_##start##_##name, end)
12 199
13#define EXC_VIRT_BEGIN(name, start, end) \ 200#define EXC_VIRT_BEGIN(name, start, end) \
14 . = start ; \ 201 FIXED_SECTION_ENTRY_BEGIN_LOCATION(virt_vectors, exc_virt_##start##_##name, start)
15 .global exc_virt_##start##_##name ; \
16exc_virt_##start##_##name:
17 202
18#define EXC_VIRT_END(name, start, end) 203#define EXC_VIRT_END(name, start, end) \
204 FIXED_SECTION_ENTRY_END_LOCATION(virt_vectors, exc_virt_##start##_##name, end)
19 205
20#define EXC_COMMON_BEGIN(name) \ 206#define EXC_COMMON_BEGIN(name) \
207 USE_TEXT_SECTION(); \
21 .align 7; \ 208 .align 7; \
22 .global name; \ 209 .global name; \
210 DEFINE_FIXED_SYMBOL(name); \
23name: 211name:
24 212
25#define TRAMP_REAL_BEGIN(name) \ 213#define TRAMP_REAL_BEGIN(name) \
26 .global name ; \ 214 FIXED_SECTION_ENTRY_BEGIN(real_trampolines, name)
27name: 215
216#define TRAMP_VIRT_BEGIN(name) \
217 FIXED_SECTION_ENTRY_BEGIN(virt_trampolines, name)
28 218
29#ifdef CONFIG_KVM_BOOK3S_64_HANDLER 219#ifdef CONFIG_KVM_BOOK3S_64_HANDLER
30#define TRAMP_KVM_BEGIN(name) \ 220#define TRAMP_KVM_BEGIN(name) \
@@ -33,9 +223,13 @@ name:
33#define TRAMP_KVM_BEGIN(name) 223#define TRAMP_KVM_BEGIN(name)
34#endif 224#endif
35 225
36#define EXC_REAL_NONE(start, end) 226#define EXC_REAL_NONE(start, end) \
227 FIXED_SECTION_ENTRY_BEGIN_LOCATION(real_vectors, exc_real_##start##_##unused, start); \
228 FIXED_SECTION_ENTRY_END_LOCATION(real_vectors, exc_real_##start##_##unused, end)
37 229
38#define EXC_VIRT_NONE(start, end) 230#define EXC_VIRT_NONE(start, end) \
231 FIXED_SECTION_ENTRY_BEGIN_LOCATION(virt_vectors, exc_virt_##start##_##unused, start); \
232 FIXED_SECTION_ENTRY_END_LOCATION(virt_vectors, exc_virt_##start##_##unused, end);
39 233
40 234
41#define EXC_REAL(name, start, end) \ 235#define EXC_REAL(name, start, end) \
@@ -77,6 +271,10 @@ name:
77 TRAMP_REAL_BEGIN(tramp_real_##name); \ 271 TRAMP_REAL_BEGIN(tramp_real_##name); \
78 STD_EXCEPTION_PSERIES_OOL(vec, name##_common); \ 272 STD_EXCEPTION_PSERIES_OOL(vec, name##_common); \
79 273
274#define EXC_REAL_OOL(name, start, end) \
275 __EXC_REAL_OOL(name, start, end); \
276 __TRAMP_REAL_REAL_OOL(name, start);
277
80#define __EXC_REAL_OOL_MASKABLE(name, start, end) \ 278#define __EXC_REAL_OOL_MASKABLE(name, start, end) \
81 __EXC_REAL_OOL(name, start, end); 279 __EXC_REAL_OOL(name, start, end);
82 280
@@ -84,6 +282,10 @@ name:
84 TRAMP_REAL_BEGIN(tramp_real_##name); \ 282 TRAMP_REAL_BEGIN(tramp_real_##name); \
85 MASKABLE_EXCEPTION_PSERIES_OOL(vec, name##_common); \ 283 MASKABLE_EXCEPTION_PSERIES_OOL(vec, name##_common); \
86 284
285#define EXC_REAL_OOL_MASKABLE(name, start, end) \
286 __EXC_REAL_OOL_MASKABLE(name, start, end); \
287 __TRAMP_REAL_REAL_OOL_MASKABLE(name, start);
288
87#define __EXC_REAL_OOL_HV_DIRECT(name, start, end, handler) \ 289#define __EXC_REAL_OOL_HV_DIRECT(name, start, end, handler) \
88 EXC_REAL_BEGIN(name, start, end); \ 290 EXC_REAL_BEGIN(name, start, end); \
89 __OOL_EXCEPTION(start, label, handler); \ 291 __OOL_EXCEPTION(start, label, handler); \
@@ -96,6 +298,10 @@ name:
96 TRAMP_REAL_BEGIN(tramp_real_##name); \ 298 TRAMP_REAL_BEGIN(tramp_real_##name); \
97 STD_EXCEPTION_HV_OOL(vec, name##_common); \ 299 STD_EXCEPTION_HV_OOL(vec, name##_common); \
98 300
301#define EXC_REAL_OOL_HV(name, start, end) \
302 __EXC_REAL_OOL_HV(name, start, end); \
303 __TRAMP_REAL_REAL_OOL_HV(name, start);
304
99#define __EXC_REAL_OOL_MASKABLE_HV(name, start, end) \ 305#define __EXC_REAL_OOL_MASKABLE_HV(name, start, end) \
100 __EXC_REAL_OOL(name, start, end); 306 __EXC_REAL_OOL(name, start, end);
101 307
@@ -103,36 +309,56 @@ name:
103 TRAMP_REAL_BEGIN(tramp_real_##name); \ 309 TRAMP_REAL_BEGIN(tramp_real_##name); \
104 MASKABLE_EXCEPTION_HV_OOL(vec, name##_common); \ 310 MASKABLE_EXCEPTION_HV_OOL(vec, name##_common); \
105 311
312#define EXC_REAL_OOL_MASKABLE_HV(name, start, end) \
313 __EXC_REAL_OOL_MASKABLE_HV(name, start, end); \
314 __TRAMP_REAL_REAL_OOL_MASKABLE_HV(name, start);
315
106#define __EXC_VIRT_OOL(name, start, end) \ 316#define __EXC_VIRT_OOL(name, start, end) \
107 EXC_VIRT_BEGIN(name, start, end); \ 317 EXC_VIRT_BEGIN(name, start, end); \
108 __OOL_EXCEPTION(start, label, tramp_virt_##name); \ 318 __OOL_EXCEPTION(start, label, tramp_virt_##name); \
109 EXC_VIRT_END(name, start, end); 319 EXC_VIRT_END(name, start, end);
110 320
111#define __TRAMP_REAL_VIRT_OOL(name, realvec) \ 321#define __TRAMP_REAL_VIRT_OOL(name, realvec) \
112 TRAMP_REAL_BEGIN(tramp_virt_##name); \ 322 TRAMP_VIRT_BEGIN(tramp_virt_##name); \
113 STD_RELON_EXCEPTION_PSERIES_OOL(realvec, name##_common); \ 323 STD_RELON_EXCEPTION_PSERIES_OOL(realvec, name##_common); \
114 324
325#define EXC_VIRT_OOL(name, start, end, realvec) \
326 __EXC_VIRT_OOL(name, start, end); \
327 __TRAMP_REAL_VIRT_OOL(name, realvec);
328
115#define __EXC_VIRT_OOL_MASKABLE(name, start, end) \ 329#define __EXC_VIRT_OOL_MASKABLE(name, start, end) \
116 __EXC_VIRT_OOL(name, start, end); 330 __EXC_VIRT_OOL(name, start, end);
117 331
118#define __TRAMP_REAL_VIRT_OOL_MASKABLE(name, realvec) \ 332#define __TRAMP_REAL_VIRT_OOL_MASKABLE(name, realvec) \
119 TRAMP_REAL_BEGIN(tramp_virt_##name); \ 333 TRAMP_VIRT_BEGIN(tramp_virt_##name); \
120 MASKABLE_RELON_EXCEPTION_PSERIES_OOL(realvec, name##_common); \ 334 MASKABLE_RELON_EXCEPTION_PSERIES_OOL(realvec, name##_common); \
121 335
336#define EXC_VIRT_OOL_MASKABLE(name, start, end, realvec) \
337 __EXC_VIRT_OOL_MASKABLE(name, start, end); \
338 __TRAMP_REAL_VIRT_OOL_MASKABLE(name, realvec);
339
122#define __EXC_VIRT_OOL_HV(name, start, end) \ 340#define __EXC_VIRT_OOL_HV(name, start, end) \
123 __EXC_VIRT_OOL(name, start, end); 341 __EXC_VIRT_OOL(name, start, end);
124 342
125#define __TRAMP_REAL_VIRT_OOL_HV(name, realvec) \ 343#define __TRAMP_REAL_VIRT_OOL_HV(name, realvec) \
126 TRAMP_REAL_BEGIN(tramp_virt_##name); \ 344 TRAMP_VIRT_BEGIN(tramp_virt_##name); \
127 STD_RELON_EXCEPTION_HV_OOL(realvec, name##_common); \ 345 STD_RELON_EXCEPTION_HV_OOL(realvec, name##_common); \
128 346
347#define EXC_VIRT_OOL_HV(name, start, end, realvec) \
348 __EXC_VIRT_OOL_HV(name, start, end); \
349 __TRAMP_REAL_VIRT_OOL_HV(name, realvec);
350
129#define __EXC_VIRT_OOL_MASKABLE_HV(name, start, end) \ 351#define __EXC_VIRT_OOL_MASKABLE_HV(name, start, end) \
130 __EXC_VIRT_OOL(name, start, end); 352 __EXC_VIRT_OOL(name, start, end);
131 353
132#define __TRAMP_REAL_VIRT_OOL_MASKABLE_HV(name, realvec) \ 354#define __TRAMP_REAL_VIRT_OOL_MASKABLE_HV(name, realvec) \
133 TRAMP_REAL_BEGIN(tramp_virt_##name); \ 355 TRAMP_VIRT_BEGIN(tramp_virt_##name); \
134 MASKABLE_RELON_EXCEPTION_HV_OOL(realvec, name##_common); \ 356 MASKABLE_RELON_EXCEPTION_HV_OOL(realvec, name##_common); \
135 357
358#define EXC_VIRT_OOL_MASKABLE_HV(name, start, end, realvec) \
359 __EXC_VIRT_OOL_MASKABLE_HV(name, start, end); \
360 __TRAMP_REAL_VIRT_OOL_MASKABLE_HV(name, realvec);
361
136#define TRAMP_KVM(area, n) \ 362#define TRAMP_KVM(area, n) \
137 TRAMP_KVM_BEGIN(do_kvm_##n); \ 363 TRAMP_KVM_BEGIN(do_kvm_##n); \
138 KVM_HANDLER(area, EXC_STD, n); \ 364 KVM_HANDLER(area, EXC_STD, n); \
@@ -141,6 +367,9 @@ name:
141 TRAMP_KVM_BEGIN(do_kvm_##n); \ 367 TRAMP_KVM_BEGIN(do_kvm_##n); \
142 KVM_HANDLER_SKIP(area, EXC_STD, n); \ 368 KVM_HANDLER_SKIP(area, EXC_STD, n); \
143 369
370/*
371 * HV variant exceptions get the 0x2 bit added to their trap number.
372 */
144#define TRAMP_KVM_HV(area, n) \ 373#define TRAMP_KVM_HV(area, n) \
145 TRAMP_KVM_BEGIN(do_kvm_H##n); \ 374 TRAMP_KVM_BEGIN(do_kvm_H##n); \
146 KVM_HANDLER(area, EXC_HV, n + 0x2); \ 375 KVM_HANDLER(area, EXC_HV, n + 0x2); \
diff --git a/arch/powerpc/kernel/exceptions-64s.S b/arch/powerpc/kernel/exceptions-64s.S
index d398e8716ef8..6ea330a3c51a 100644
--- a/arch/powerpc/kernel/exceptions-64s.S
+++ b/arch/powerpc/kernel/exceptions-64s.S
@@ -19,16 +19,68 @@
19#include <asm/head-64.h> 19#include <asm/head-64.h>
20 20
21/* 21/*
22 * There are a few constraints to be concerned with.
23 * - Real mode exceptions code/data must be located at their physical location.
24 * - Virtual mode exceptions must be mapped at their 0xc000... location.
25 * - Fixed location code must not call directly beyond the __end_interrupts
26 * area when built with CONFIG_RELOCATABLE. LOAD_HANDLER / bctr sequence
27 * must be used.
28 * - LOAD_HANDLER targets must be within first 64K of physical 0 /
29 * virtual 0xc00...
30 * - Conditional branch targets must be within +/-32K of caller.
31 *
32 * "Virtual exceptions" run with relocation on (MSR_IR=1, MSR_DR=1), and
33 * therefore don't have to run in physically located code or rfid to
34 * virtual mode kernel code. However on relocatable kernels they do have
35 * to branch to KERNELBASE offset because the rest of the kernel (outside
36 * the exception vectors) may be located elsewhere.
37 *
38 * Virtual exceptions correspond with physical, except their entry points
39 * are offset by 0xc000000000000000 and also tend to get an added 0x4000
40 * offset applied. Virtual exceptions are enabled with the Alternate
41 * Interrupt Location (AIL) bit set in the LPCR. However this does not
42 * guarantee they will be delivered virtually. Some conditions (see the ISA)
43 * cause exceptions to be delivered in real mode.
44 *
45 * It's impossible to receive interrupts below 0x300 via AIL.
46 *
47 * KVM: None of the virtual exceptions are from the guest. Anything that
48 * escalated to HV=1 from HV=0 is delivered via real mode handlers.
49 *
50 *
22 * We layout physical memory as follows: 51 * We layout physical memory as follows:
23 * 0x0000 - 0x00ff : Secondary processor spin code 52 * 0x0000 - 0x00ff : Secondary processor spin code
24 * 0x0100 - 0x17ff : pSeries Interrupt prologs 53 * 0x0100 - 0x18ff : Real mode pSeries interrupt vectors
25 * 0x1800 - 0x4000 : interrupt support common interrupt prologs 54 * 0x1900 - 0x3fff : Real mode trampolines
26 * 0x4000 - 0x5fff : pSeries interrupts with IR=1,DR=1 55 * 0x4000 - 0x58ff : Relon (IR=1,DR=1) mode pSeries interrupt vectors
27 * 0x6000 - 0x6fff : more interrupt support including for IR=1,DR=1 56 * 0x5900 - 0x6fff : Relon mode trampolines
28 * 0x7000 - 0x7fff : FWNMI data area 57 * 0x7000 - 0x7fff : FWNMI data area
29 * 0x8000 - 0x8fff : Initial (CPU0) segment table 58 * 0x8000 - .... : Common interrupt handlers, remaining early
30 * 0x9000 - : Early init and support code 59 * setup code, rest of kernel.
60 */
61OPEN_FIXED_SECTION(real_vectors, 0x0100, 0x1900)
62OPEN_FIXED_SECTION(real_trampolines, 0x1900, 0x4000)
63OPEN_FIXED_SECTION(virt_vectors, 0x4000, 0x5900)
64OPEN_FIXED_SECTION(virt_trampolines, 0x5900, 0x7000)
65#if defined(CONFIG_PPC_PSERIES) || defined(CONFIG_PPC_POWERNV)
66/*
67 * Data area reserved for FWNMI option.
68 * This address (0x7000) is fixed by the RPA.
69 * pseries and powernv need to keep the whole page from
70 * 0x7000 to 0x8000 free for use by the firmware
31 */ 71 */
72ZERO_FIXED_SECTION(fwnmi_page, 0x7000, 0x8000)
73OPEN_TEXT_SECTION(0x8000)
74#else
75OPEN_TEXT_SECTION(0x7000)
76#endif
77
78USE_FIXED_SECTION(real_vectors)
79
80#define LOAD_SYSCALL_HANDLER(reg) \
81 ld reg,PACAKBASE(r13); \
82 ori reg,reg,(ABS_ADDR(system_call_common))@l;
83
32 /* Syscall routine is used twice, in reloc-off and reloc-on paths */ 84 /* Syscall routine is used twice, in reloc-off and reloc-on paths */
33#define SYSCALL_PSERIES_1 \ 85#define SYSCALL_PSERIES_1 \
34BEGIN_FTR_SECTION \ 86BEGIN_FTR_SECTION \
@@ -42,7 +94,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_REAL_LE) \
42 94
43#define SYSCALL_PSERIES_2_RFID \ 95#define SYSCALL_PSERIES_2_RFID \
44 mfspr r12,SPRN_SRR1 ; \ 96 mfspr r12,SPRN_SRR1 ; \
45 LOAD_HANDLER(r10, system_call_common) ; \ 97 LOAD_SYSCALL_HANDLER(r10) ; \
46 mtspr SPRN_SRR0,r10 ; \ 98 mtspr SPRN_SRR0,r10 ; \
47 ld r10,PACAKMSR(r13) ; \ 99 ld r10,PACAKMSR(r13) ; \
48 mtspr SPRN_SRR1,r10 ; \ 100 mtspr SPRN_SRR1,r10 ; \
@@ -63,7 +115,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_REAL_LE) \
63 * is volatile across system calls. 115 * is volatile across system calls.
64 */ 116 */
65#define SYSCALL_PSERIES_2_DIRECT \ 117#define SYSCALL_PSERIES_2_DIRECT \
66 LOAD_HANDLER(r12, system_call_common) ; \ 118 LOAD_SYSCALL_HANDLER(r12) ; \
67 mtctr r12 ; \ 119 mtctr r12 ; \
68 mfspr r12,SPRN_SRR1 ; \ 120 mfspr r12,SPRN_SRR1 ; \
69 li r10,MSR_RI ; \ 121 li r10,MSR_RI ; \
@@ -86,7 +138,6 @@ END_FTR_SECTION_IFSET(CPU_FTR_REAL_LE) \
86 * Therefore any relative branches in this section must only 138 * Therefore any relative branches in this section must only
87 * branch to labels in this section. 139 * branch to labels in this section.
88 */ 140 */
89 . = 0x100
90 .globl __start_interrupts 141 .globl __start_interrupts
91__start_interrupts: 142__start_interrupts:
92 143
@@ -200,9 +251,6 @@ EXC_REAL_BEGIN(instruction_access_slb, 0x480, 0x500)
200#endif 251#endif
201EXC_REAL_END(instruction_access_slb, 0x480, 0x500) 252EXC_REAL_END(instruction_access_slb, 0x480, 0x500)
202 253
203 /* We open code these as we can't have a ". = x" (even with
204 * x = "." within a feature section
205 */
206EXC_REAL_BEGIN(hardware_interrupt, 0x500, 0x600) 254EXC_REAL_BEGIN(hardware_interrupt, 0x500, 0x600)
207 .globl hardware_interrupt_hv; 255 .globl hardware_interrupt_hv;
208hardware_interrupt_hv: 256hardware_interrupt_hv:
@@ -306,7 +354,6 @@ __EXC_REAL_OOL_HV(h_facility_unavailable, 0xf80, 0xfa0)
306 354
307EXC_REAL_NONE(0xfa0, 0x1200) 355EXC_REAL_NONE(0xfa0, 0x1200)
308 356
309
310#ifdef CONFIG_CBE_RAS 357#ifdef CONFIG_CBE_RAS
311EXC_REAL_HV(cbe_system_error, 0x1200, 0x1300) 358EXC_REAL_HV(cbe_system_error, 0x1200, 0x1300)
312 359
@@ -359,7 +406,6 @@ TRAMP_KVM_HV_SKIP(PACA_EXGEN, 0x1800)
359 406
360#else /* CONFIG_CBE_RAS */ 407#else /* CONFIG_CBE_RAS */
361EXC_REAL_NONE(0x1800, 0x1900) 408EXC_REAL_NONE(0x1800, 0x1900)
362 . = 0x1800
363#endif 409#endif
364 410
365 411
@@ -606,7 +652,13 @@ masked_##_H##interrupt: \
606 GET_SCRATCH0(r13); \ 652 GET_SCRATCH0(r13); \
607 ##_H##rfid; \ 653 ##_H##rfid; \
608 b . 654 b .
609 655
656/*
657 * Real mode exceptions actually use this too, but alternate
658 * instruction code patches (which end up in the common .text area)
659 * cannot reach these if they are put there.
660 */
661USE_FIXED_SECTION(virt_trampolines)
610 MASKED_INTERRUPT() 662 MASKED_INTERRUPT()
611 MASKED_INTERRUPT(H) 663 MASKED_INTERRUPT(H)
612 664
@@ -620,6 +672,7 @@ masked_##_H##interrupt: \
620 * in the generated frame has EE set to 1 or the exception 672 * in the generated frame has EE set to 1 or the exception
621 * handler will not properly re-enable them. 673 * handler will not properly re-enable them.
622 */ 674 */
675USE_TEXT_SECTION()
623_GLOBAL(__replay_interrupt) 676_GLOBAL(__replay_interrupt)
624 /* We are going to jump to the exception common code which 677 /* We are going to jump to the exception common code which
625 * will retrieve various register values from the PACA which 678 * will retrieve various register values from the PACA which
@@ -862,7 +915,7 @@ EXC_VIRT(altivec_assist, 0x5700, 0x5800, 0x1700)
862 915
863EXC_VIRT_NONE(0x5800, 0x5900) 916EXC_VIRT_NONE(0x5800, 0x5900)
864 917
865TRAMP_REAL_BEGIN(ppc64_runlatch_on_trampoline) 918EXC_COMMON_BEGIN(ppc64_runlatch_on_trampoline)
866 b __ppc64_runlatch_on 919 b __ppc64_runlatch_on
867 920
868/* 921/*
@@ -1070,6 +1123,7 @@ __TRAMP_REAL_VIRT_OOL(vsx_unavailable, 0xf40)
1070__TRAMP_REAL_VIRT_OOL(facility_unavailable, 0xf60) 1123__TRAMP_REAL_VIRT_OOL(facility_unavailable, 0xf60)
1071__TRAMP_REAL_VIRT_OOL_HV(h_facility_unavailable, 0xf80) 1124__TRAMP_REAL_VIRT_OOL_HV(h_facility_unavailable, 0xf80)
1072 1125
1126USE_FIXED_SECTION(virt_trampolines)
1073 /* 1127 /*
1074 * The __end_interrupts marker must be past the out-of-line (OOL) 1128 * The __end_interrupts marker must be past the out-of-line (OOL)
1075 * handlers, so that they are copied to real address 0x100 when running 1129 * handlers, so that they are copied to real address 0x100 when running
@@ -1080,21 +1134,7 @@ __TRAMP_REAL_VIRT_OOL_HV(h_facility_unavailable, 0xf80)
1080 .align 7 1134 .align 7
1081 .globl __end_interrupts 1135 .globl __end_interrupts
1082__end_interrupts: 1136__end_interrupts:
1083 1137DEFINE_FIXED_SYMBOL(__end_interrupts)
1084#if defined(CONFIG_PPC_PSERIES) || defined(CONFIG_PPC_POWERNV)
1085/*
1086 * Data area reserved for FWNMI option.
1087 * This address (0x7000) is fixed by the RPA.
1088 */
1089 .= 0x7000
1090 .globl fwnmi_data_area
1091fwnmi_data_area:
1092
1093 /* pseries and powernv need to keep the whole page from
1094 * 0x7000 to 0x8000 free for use by the firmware
1095 */
1096 . = 0x8000
1097#endif /* defined(CONFIG_PPC_PSERIES) || defined(CONFIG_PPC_POWERNV) */
1098 1138
1099EXC_COMMON(facility_unavailable_common, 0xf60, facility_unavailable_exception) 1139EXC_COMMON(facility_unavailable_common, 0xf60, facility_unavailable_exception)
1100EXC_COMMON(h_facility_unavailable_common, 0xf80, facility_unavailable_exception) 1140EXC_COMMON(h_facility_unavailable_common, 0xf80, facility_unavailable_exception)
@@ -1106,7 +1146,7 @@ EXC_COMMON(cbe_thermal_common, 0x1800, cbe_thermal_exception)
1106#endif /* CONFIG_CBE_RAS */ 1146#endif /* CONFIG_CBE_RAS */
1107 1147
1108 1148
1109EXC_COMMON_BEGIN(hmi_exception_early) 1149TRAMP_REAL_BEGIN(hmi_exception_early)
1110 EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_HV, 0xe60) 1150 EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_HV, 0xe60)
1111 mr r10,r1 /* Save r1 */ 1151 mr r10,r1 /* Save r1 */
1112 ld r1,PACAEMERGSP(r13) /* Use emergency stack */ 1152 ld r1,PACAEMERGSP(r13) /* Use emergency stack */
@@ -1430,6 +1470,13 @@ TRAMP_REAL_BEGIN(power4_fixup_nap)
1430 blr 1470 blr
1431#endif 1471#endif
1432 1472
1473CLOSE_FIXED_SECTION(real_vectors);
1474CLOSE_FIXED_SECTION(real_trampolines);
1475CLOSE_FIXED_SECTION(virt_vectors);
1476CLOSE_FIXED_SECTION(virt_trampolines);
1477
1478USE_TEXT_SECTION()
1479
1433/* 1480/*
1434 * Hash table stuff 1481 * Hash table stuff
1435 */ 1482 */
diff --git a/arch/powerpc/kernel/head_64.S b/arch/powerpc/kernel/head_64.S
index 6e21812ee672..79da0641bae2 100644
--- a/arch/powerpc/kernel/head_64.S
+++ b/arch/powerpc/kernel/head_64.S
@@ -28,6 +28,7 @@
28#include <asm/page.h> 28#include <asm/page.h>
29#include <asm/mmu.h> 29#include <asm/mmu.h>
30#include <asm/ppc_asm.h> 30#include <asm/ppc_asm.h>
31#include <asm/head-64.h>
31#include <asm/asm-offsets.h> 32#include <asm/asm-offsets.h>
32#include <asm/bug.h> 33#include <asm/bug.h>
33#include <asm/cputable.h> 34#include <asm/cputable.h>
@@ -65,9 +66,14 @@
65 * 2. The kernel is entered at __start 66 * 2. The kernel is entered at __start
66 */ 67 */
67 68
68 .text 69OPEN_FIXED_SECTION(first_256B, 0x0, 0x100)
69 .globl _stext 70USE_FIXED_SECTION(first_256B)
70_stext: 71 /*
72 * Offsets are relative from the start of fixed section, and
73 * first_256B starts at 0. Offsets are a bit easier to use here
74 * than the fixed section entry macros.
75 */
76 . = 0x0
71_GLOBAL(__start) 77_GLOBAL(__start)
72 /* NOP this out unconditionally */ 78 /* NOP this out unconditionally */
73BEGIN_FTR_SECTION 79BEGIN_FTR_SECTION
@@ -104,6 +110,7 @@ __secondary_hold_acknowledge:
104 . = 0x5c 110 . = 0x5c
105 .globl __run_at_load 111 .globl __run_at_load
106__run_at_load: 112__run_at_load:
113DEFINE_FIXED_SYMBOL(__run_at_load)
107 .long 0x72756e30 /* "run0" -- relocate to 0 by default */ 114 .long 0x72756e30 /* "run0" -- relocate to 0 by default */
108#endif 115#endif
109 116
@@ -133,7 +140,7 @@ __secondary_hold:
133 /* Tell the master cpu we're here */ 140 /* Tell the master cpu we're here */
134 /* Relocation is off & we are located at an address less */ 141 /* Relocation is off & we are located at an address less */
135 /* than 0x100, so only need to grab low order offset. */ 142 /* than 0x100, so only need to grab low order offset. */
136 std r24,__secondary_hold_acknowledge-_stext(0) 143 std r24,(ABS_ADDR(__secondary_hold_acknowledge))(0)
137 sync 144 sync
138 145
139 li r26,0 146 li r26,0
@@ -141,7 +148,7 @@ __secondary_hold:
141 tovirt(r26,r26) 148 tovirt(r26,r26)
142#endif 149#endif
143 /* All secondary cpus wait here until told to start. */ 150 /* All secondary cpus wait here until told to start. */
144100: ld r12,__secondary_hold_spinloop-_stext(r26) 151100: ld r12,(ABS_ADDR(__secondary_hold_spinloop))(r26)
145 cmpdi 0,r12,0 152 cmpdi 0,r12,0
146 beq 100b 153 beq 100b
147 154
@@ -166,12 +173,13 @@ __secondary_hold:
166#else 173#else
167 BUG_OPCODE 174 BUG_OPCODE
168#endif 175#endif
176CLOSE_FIXED_SECTION(first_256B)
169 177
170/* This value is used to mark exception frames on the stack. */ 178/* This value is used to mark exception frames on the stack. */
171 .section ".toc","aw" 179 .section ".toc","aw"
172exception_marker: 180exception_marker:
173 .tc ID_72656773_68657265[TC],0x7265677368657265 181 .tc ID_72656773_68657265[TC],0x7265677368657265
174 .text 182 .previous
175 183
176/* 184/*
177 * On server, we include the exception vectors code here as it 185 * On server, we include the exception vectors code here as it
@@ -180,8 +188,12 @@ exception_marker:
180 */ 188 */
181#ifdef CONFIG_PPC_BOOK3S 189#ifdef CONFIG_PPC_BOOK3S
182#include "exceptions-64s.S" 190#include "exceptions-64s.S"
191#else
192OPEN_TEXT_SECTION(0x100)
183#endif 193#endif
184 194
195USE_TEXT_SECTION()
196
185#ifdef CONFIG_PPC_BOOK3E 197#ifdef CONFIG_PPC_BOOK3E
186/* 198/*
187 * The booting_thread_hwid holds the thread id we want to boot in cpu 199 * The booting_thread_hwid holds the thread id we want to boot in cpu
@@ -558,7 +570,7 @@ __after_prom_start:
558#if defined(CONFIG_PPC_BOOK3E) 570#if defined(CONFIG_PPC_BOOK3E)
559 tovirt(r26,r26) /* on booke, we already run at PAGE_OFFSET */ 571 tovirt(r26,r26) /* on booke, we already run at PAGE_OFFSET */
560#endif 572#endif
561 lwz r7,__run_at_load-_stext(r26) 573 lwz r7,(FIXED_SYMBOL_ABS_ADDR(__run_at_load))(r26)
562#if defined(CONFIG_PPC_BOOK3E) 574#if defined(CONFIG_PPC_BOOK3E)
563 tophys(r26,r26) 575 tophys(r26,r26)
564#endif 576#endif
@@ -601,7 +613,7 @@ __after_prom_start:
601#if defined(CONFIG_PPC_BOOK3E) 613#if defined(CONFIG_PPC_BOOK3E)
602 tovirt(r26,r26) /* on booke, we already run at PAGE_OFFSET */ 614 tovirt(r26,r26) /* on booke, we already run at PAGE_OFFSET */
603#endif 615#endif
604 lwz r7,__run_at_load-_stext(r26) 616 lwz r7,(FIXED_SYMBOL_ABS_ADDR(__run_at_load))(r26)
605 cmplwi cr0,r7,1 617 cmplwi cr0,r7,1
606 bne 3f 618 bne 3f
607 619
@@ -611,19 +623,21 @@ __after_prom_start:
611 sub r5,r5,r11 623 sub r5,r5,r11
612#else 624#else
613 /* just copy interrupts */ 625 /* just copy interrupts */
614 LOAD_REG_IMMEDIATE(r5, __end_interrupts - _stext) 626 LOAD_REG_IMMEDIATE(r5, FIXED_SYMBOL_ABS_ADDR(__end_interrupts))
615#endif 627#endif
616 b 5f 628 b 5f
6173: 6293:
618#endif 630#endif
619 lis r5,(copy_to_here - _stext)@ha 631 /* # bytes of memory to copy */
620 addi r5,r5,(copy_to_here - _stext)@l /* # bytes of memory to copy */ 632 lis r5,(ABS_ADDR(copy_to_here))@ha
633 addi r5,r5,(ABS_ADDR(copy_to_here))@l
621 634
622 bl copy_and_flush /* copy the first n bytes */ 635 bl copy_and_flush /* copy the first n bytes */
623 /* this includes the code being */ 636 /* this includes the code being */
624 /* executed here. */ 637 /* executed here. */
625 addis r8,r3,(4f - _stext)@ha /* Jump to the copy of this code */ 638 /* Jump to the copy of this code that we just made */
626 addi r12,r8,(4f - _stext)@l /* that we just made */ 639 addis r8,r3,(ABS_ADDR(4f))@ha
640 addi r12,r8,(ABS_ADDR(4f))@l
627 mtctr r12 641 mtctr r12
628 bctr 642 bctr
629 643
@@ -635,8 +649,8 @@ p_end: .llong _end - copy_to_here
635 * Now copy the rest of the kernel up to _end, add 649 * Now copy the rest of the kernel up to _end, add
636 * _end - copy_to_here to the copy limit and run again. 650 * _end - copy_to_here to the copy limit and run again.
637 */ 651 */
638 addis r8,r26,(p_end - _stext)@ha 652 addis r8,r26,(ABS_ADDR(p_end))@ha
639 ld r8,(p_end - _stext)@l(r8) 653 ld r8,(ABS_ADDR(p_end))@l(r8)
640 add r5,r5,r8 654 add r5,r5,r8
6415: bl copy_and_flush /* copy the rest */ 6555: bl copy_and_flush /* copy the rest */
642 656
diff --git a/arch/powerpc/kernel/vmlinux.lds.S b/arch/powerpc/kernel/vmlinux.lds.S
index b59d75e194a5..2d1cfafd1404 100644
--- a/arch/powerpc/kernel/vmlinux.lds.S
+++ b/arch/powerpc/kernel/vmlinux.lds.S
@@ -44,11 +44,58 @@ SECTIONS
44 * Text, read only data and other permanent read-only sections 44 * Text, read only data and other permanent read-only sections
45 */ 45 */
46 46
47 /* Text and gots */ 47 _text = .;
48 _stext = .;
49
50 /*
51 * Head text.
52 * This needs to be in its own output section to avoid ld placing
53 * branch trampoline stubs randomly throughout the fixed sections,
54 * which it will do (even if the branch comes from another section)
55 * in order to optimize stub generation.
56 */
57 .head.text : AT(ADDR(.head.text) - LOAD_OFFSET) {
58#ifdef CONFIG_PPC64
59 KEEP(*(.head.text.first_256B));
60#ifdef CONFIG_PPC_BOOK3E
61# define END_FIXED 0x100
62#else
63 KEEP(*(.head.text.real_vectors));
64 *(.head.text.real_trampolines);
65 KEEP(*(.head.text.virt_vectors));
66 *(.head.text.virt_trampolines);
67# if defined(CONFIG_PPC_PSERIES) || defined(CONFIG_PPC_POWERNV)
68 KEEP(*(.head.data.fwnmi_page));
69# define END_FIXED 0x8000
70# else
71# define END_FIXED 0x7000
72# endif
73#endif
74 ASSERT((. == END_FIXED), "vmlinux.lds.S: fixed section overflow error");
75#else /* !CONFIG_PPC64 */
76 HEAD_TEXT
77#endif
78 } :kernel
79
80 /*
81 * If the build dies here, it's likely code in head_64.S is referencing
82 * labels it can't reach, and the linker inserting stubs without the
83 * assembler's knowledge. To debug, remove the above assert and
84 * rebuild. Look for branch stubs in the fixed section region.
85 *
86 * Linker stub generation could be allowed in "trampoline"
87 * sections if absolutely necessary, but this would require
88 * some rework of the fixed sections. Before resorting to this,
89 * consider references that have sufficient addressing range,
90 * (e.g., hand coded trampolines) so the linker does not have
91 * to add stubs.
92 *
93 * Linker stubs at the top of the main text section are currently not
94 * detected, and will result in a crash at boot due to offsets being
95 * wrong.
96 */
48 .text : AT(ADDR(.text) - LOAD_OFFSET) { 97 .text : AT(ADDR(.text) - LOAD_OFFSET) {
49 ALIGN_FUNCTION(); 98 ALIGN_FUNCTION();
50 HEAD_TEXT
51 _text = .;
52 /* careful! __ftr_alt_* sections need to be close to .text */ 99 /* careful! __ftr_alt_* sections need to be close to .text */
53 *(.text .fixup __ftr_alt_* .ref.text) 100 *(.text .fixup __ftr_alt_* .ref.text)
54 SCHED_TEXT 101 SCHED_TEXT