aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/kernel/cputable.c11
-rw-r--r--arch/powerpc/kernel/setup_32.c2
-rw-r--r--arch/powerpc/kernel/setup_64.c4
-rw-r--r--arch/powerpc/kernel/vdso.c43
-rw-r--r--arch/powerpc/kernel/vdso32/vdso32.lds.S12
-rw-r--r--arch/powerpc/kernel/vdso64/vdso64.lds.S10
-rw-r--r--arch/ppc/kernel/setup.c2
-rw-r--r--include/asm-powerpc/asm-compat.h52
-rw-r--r--include/asm-powerpc/cputable.h31
-rw-r--r--include/asm-powerpc/firmware.h15
-rw-r--r--include/asm-powerpc/timex.h8
11 files changed, 141 insertions, 49 deletions
diff --git a/arch/powerpc/kernel/cputable.c b/arch/powerpc/kernel/cputable.c
index 6fdfaa4a82b8..bfd499ee3753 100644
--- a/arch/powerpc/kernel/cputable.c
+++ b/arch/powerpc/kernel/cputable.c
@@ -1202,14 +1202,13 @@ struct cpu_spec *identify_cpu(unsigned long offset)
1202 return NULL; 1202 return NULL;
1203} 1203}
1204 1204
1205void do_feature_fixups(unsigned long offset, unsigned long value, 1205void do_feature_fixups(unsigned long value, void *fixup_start, void *fixup_end)
1206 void *fixup_start, void *fixup_end)
1207{ 1206{
1208 struct fixup_entry { 1207 struct fixup_entry {
1209 unsigned long mask; 1208 unsigned long mask;
1210 unsigned long value; 1209 unsigned long value;
1211 unsigned int *start; 1210 long start_off;
1212 unsigned int *end; 1211 long end_off;
1213 } *fcur, *fend; 1212 } *fcur, *fend;
1214 1213
1215 fcur = fixup_start; 1214 fcur = fixup_start;
@@ -1224,8 +1223,8 @@ void do_feature_fixups(unsigned long offset, unsigned long value,
1224 /* These PTRRELOCs will disappear once the new scheme for 1223 /* These PTRRELOCs will disappear once the new scheme for
1225 * modules and vdso is implemented 1224 * modules and vdso is implemented
1226 */ 1225 */
1227 pstart = PTRRELOC(fcur->start); 1226 pstart = ((unsigned int *)fcur) + (fcur->start_off / 4);
1228 pend = PTRRELOC(fcur->end); 1227 pend = ((unsigned int *)fcur) + (fcur->end_off / 4);
1229 1228
1230 for (p = pstart; p < pend; p++) { 1229 for (p = pstart; p < pend; p++) {
1231 *p = 0x60000000u; 1230 *p = 0x60000000u;
diff --git a/arch/powerpc/kernel/setup_32.c b/arch/powerpc/kernel/setup_32.c
index 769e511783b0..a4c2964a3ca6 100644
--- a/arch/powerpc/kernel/setup_32.c
+++ b/arch/powerpc/kernel/setup_32.c
@@ -103,7 +103,7 @@ unsigned long __init early_init(unsigned long dt_ptr)
103 */ 103 */
104 spec = identify_cpu(offset); 104 spec = identify_cpu(offset);
105 105
106 do_feature_fixups(offset, spec->cpu_features, 106 do_feature_fixups(spec->cpu_features,
107 PTRRELOC(&__start___ftr_fixup), 107 PTRRELOC(&__start___ftr_fixup),
108 PTRRELOC(&__stop___ftr_fixup)); 108 PTRRELOC(&__stop___ftr_fixup));
109 109
diff --git a/arch/powerpc/kernel/setup_64.c b/arch/powerpc/kernel/setup_64.c
index 1969b5686eee..16278968dab6 100644
--- a/arch/powerpc/kernel/setup_64.c
+++ b/arch/powerpc/kernel/setup_64.c
@@ -354,9 +354,9 @@ void __init setup_system(void)
354 /* Apply the CPUs-specific and firmware specific fixups to kernel 354 /* Apply the CPUs-specific and firmware specific fixups to kernel
355 * text (nop out sections not relevant to this CPU or this firmware) 355 * text (nop out sections not relevant to this CPU or this firmware)
356 */ 356 */
357 do_feature_fixups(0, cur_cpu_spec->cpu_features, 357 do_feature_fixups(cur_cpu_spec->cpu_features,
358 &__start___ftr_fixup, &__stop___ftr_fixup); 358 &__start___ftr_fixup, &__stop___ftr_fixup);
359 do_feature_fixups(0, powerpc_firmware_features, 359 do_feature_fixups(powerpc_firmware_features,
360 &__start___fw_ftr_fixup, &__stop___fw_ftr_fixup); 360 &__start___fw_ftr_fixup, &__stop___fw_ftr_fixup);
361 361
362 /* 362 /*
diff --git a/arch/powerpc/kernel/vdso.c b/arch/powerpc/kernel/vdso.c
index 1a7e19cdab39..c913ad5cad29 100644
--- a/arch/powerpc/kernel/vdso.c
+++ b/arch/powerpc/kernel/vdso.c
@@ -36,6 +36,8 @@
36#include <asm/vdso.h> 36#include <asm/vdso.h>
37#include <asm/vdso_datapage.h> 37#include <asm/vdso_datapage.h>
38 38
39#include "setup.h"
40
39#undef DEBUG 41#undef DEBUG
40 42
41#ifdef DEBUG 43#ifdef DEBUG
@@ -586,6 +588,43 @@ static __init int vdso_fixup_datapage(struct lib32_elfinfo *v32,
586 return 0; 588 return 0;
587} 589}
588 590
591
592static __init int vdso_fixup_features(struct lib32_elfinfo *v32,
593 struct lib64_elfinfo *v64)
594{
595 void *start32;
596 unsigned long size32;
597
598#ifdef CONFIG_PPC64
599 void *start64;
600 unsigned long size64;
601
602 start64 = find_section64(v64->hdr, "__ftr_fixup", &size64);
603 if (start64)
604 do_feature_fixups(cur_cpu_spec->cpu_features,
605 start64, start64 + size64);
606
607 start64 = find_section64(v64->hdr, "__fw_ftr_fixup", &size64);
608 if (start64)
609 do_feature_fixups(powerpc_firmware_features,
610 start64, start64 + size64);
611#endif /* CONFIG_PPC64 */
612
613 start32 = find_section32(v32->hdr, "__ftr_fixup", &size32);
614 if (start32)
615 do_feature_fixups(cur_cpu_spec->cpu_features,
616 start32, start32 + size32);
617
618#ifdef CONFIG_PPC64
619 start32 = find_section32(v32->hdr, "__fw_ftr_fixup", &size32);
620 if (start32)
621 do_feature_fixups(powerpc_firmware_features,
622 start32, start32 + size32);
623#endif /* CONFIG_PPC64 */
624
625 return 0;
626}
627
589static __init int vdso_fixup_alt_funcs(struct lib32_elfinfo *v32, 628static __init int vdso_fixup_alt_funcs(struct lib32_elfinfo *v32,
590 struct lib64_elfinfo *v64) 629 struct lib64_elfinfo *v64)
591{ 630{
@@ -634,6 +673,9 @@ static __init int vdso_setup(void)
634 if (vdso_fixup_datapage(&v32, &v64)) 673 if (vdso_fixup_datapage(&v32, &v64))
635 return -1; 674 return -1;
636 675
676 if (vdso_fixup_features(&v32, &v64))
677 return -1;
678
637 if (vdso_fixup_alt_funcs(&v32, &v64)) 679 if (vdso_fixup_alt_funcs(&v32, &v64))
638 return -1; 680 return -1;
639 681
@@ -714,6 +756,7 @@ void __init vdso_init(void)
714 * Setup the syscall map in the vDOS 756 * Setup the syscall map in the vDOS
715 */ 757 */
716 vdso_setup_syscall_map(); 758 vdso_setup_syscall_map();
759
717 /* 760 /*
718 * Initialize the vDSO images in memory, that is do necessary 761 * Initialize the vDSO images in memory, that is do necessary
719 * fixups of vDSO symbols, locate trampolines, etc... 762 * fixups of vDSO symbols, locate trampolines, etc...
diff --git a/arch/powerpc/kernel/vdso32/vdso32.lds.S b/arch/powerpc/kernel/vdso32/vdso32.lds.S
index 6187af2d54c3..26e138c4ce17 100644
--- a/arch/powerpc/kernel/vdso32/vdso32.lds.S
+++ b/arch/powerpc/kernel/vdso32/vdso32.lds.S
@@ -32,6 +32,18 @@ SECTIONS
32 PROVIDE (_etext = .); 32 PROVIDE (_etext = .);
33 PROVIDE (etext = .); 33 PROVIDE (etext = .);
34 34
35 . = ALIGN(8);
36 __ftr_fixup : {
37 *(__ftr_fixup)
38 }
39
40#ifdef CONFIG_PPC64
41 . = ALIGN(8);
42 __fw_ftr_fixup : {
43 *(__fw_ftr_fixup)
44 }
45#endif
46
35 /* Other stuff is appended to the text segment: */ 47 /* Other stuff is appended to the text segment: */
36 .rodata : { *(.rodata .rodata.* .gnu.linkonce.r.*) } 48 .rodata : { *(.rodata .rodata.* .gnu.linkonce.r.*) }
37 .rodata1 : { *(.rodata1) } 49 .rodata1 : { *(.rodata1) }
diff --git a/arch/powerpc/kernel/vdso64/vdso64.lds.S b/arch/powerpc/kernel/vdso64/vdso64.lds.S
index 4a2b6dc0960c..2d70f35d50b5 100644
--- a/arch/powerpc/kernel/vdso64/vdso64.lds.S
+++ b/arch/powerpc/kernel/vdso64/vdso64.lds.S
@@ -31,6 +31,16 @@ SECTIONS
31 PROVIDE (_etext = .); 31 PROVIDE (_etext = .);
32 PROVIDE (etext = .); 32 PROVIDE (etext = .);
33 33
34 . = ALIGN(8);
35 __ftr_fixup : {
36 *(__ftr_fixup)
37 }
38
39 . = ALIGN(8);
40 __fw_ftr_fixup : {
41 *(__fw_ftr_fixup)
42 }
43
34 /* Other stuff is appended to the text segment: */ 44 /* Other stuff is appended to the text segment: */
35 .rodata : { *(.rodata .rodata.* .gnu.linkonce.r.*) } 45 .rodata : { *(.rodata .rodata.* .gnu.linkonce.r.*) }
36 .rodata1 : { *(.rodata1) } 46 .rodata1 : { *(.rodata1) }
diff --git a/arch/ppc/kernel/setup.c b/arch/ppc/kernel/setup.c
index 41a640f16bdd..27faeca2c7a2 100644
--- a/arch/ppc/kernel/setup.c
+++ b/arch/ppc/kernel/setup.c
@@ -314,7 +314,7 @@ early_init(int r3, int r4, int r5)
314 * that depend on which cpu we have. 314 * that depend on which cpu we have.
315 */ 315 */
316 spec = identify_cpu(offset); 316 spec = identify_cpu(offset);
317 do_feature_fixups(offset, spec->cpu_features, 317 do_feature_fixups(spec->cpu_features,
318 PTRRELOC(&__start___ftr_fixup), 318 PTRRELOC(&__start___ftr_fixup),
319 PTRRELOC(&__stop___ftr_fixup)); 319 PTRRELOC(&__stop___ftr_fixup));
320 320
diff --git a/include/asm-powerpc/asm-compat.h b/include/asm-powerpc/asm-compat.h
index 8e64be0cc47d..c89bd58ee283 100644
--- a/include/asm-powerpc/asm-compat.h
+++ b/include/asm-powerpc/asm-compat.h
@@ -14,6 +14,58 @@
14# define ASM_CONST(x) __ASM_CONST(x) 14# define ASM_CONST(x) __ASM_CONST(x)
15#endif 15#endif
16 16
17
18/*
19 * Feature section common macros
20 *
21 * Note that the entries now contain offsets between the table entry
22 * and the code rather than absolute code pointers in order to be
23 * useable with the vdso shared library. There is also an assumption
24 * that values will be negative, that is, the fixup table has to be
25 * located after the code it fixes up.
26 */
27#ifdef CONFIG_PPC64
28#ifdef __powerpc64__
29/* 64 bits kernel, 64 bits code */
30#define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
3199: \
32 .section sect,"a"; \
33 .align 3; \
3498: \
35 .llong msk; \
36 .llong val; \
37 .llong label##b-98b; \
38 .llong 99b-98b; \
39 .previous
40#else /* __powerpc64__ */
41/* 64 bits kernel, 32 bits code (ie. vdso32) */
42#define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
4399: \
44 .section sect,"a"; \
45 .align 3; \
4698: \
47 .llong msk; \
48 .llong val; \
49 .long 0xffffffff; \
50 .long label##b-98b; \
51 .long 0xffffffff; \
52 .long 99b-98b; \
53 .previous
54#endif /* !__powerpc64__ */
55#else /* CONFIG_PPC64 */
56/* 32 bits kernel, 32 bits code */
57#define MAKE_FTR_SECTION_ENTRY(msk, val, label, sect) \
5899: \
59 .section sect,"a"; \
60 .align 2; \
6198: \
62 .long msk; \
63 .long val; \
64 .long label##b-98b; \
65 .long 99b-98b; \
66 .previous
67#endif /* !CONFIG_PPC64 */
68
17#ifdef __powerpc64__ 69#ifdef __powerpc64__
18 70
19/* operations for longs and pointers */ 71/* operations for longs and pointers */
diff --git a/include/asm-powerpc/cputable.h b/include/asm-powerpc/cputable.h
index 65faf322ace0..02e52d68cbbe 100644
--- a/include/asm-powerpc/cputable.h
+++ b/include/asm-powerpc/cputable.h
@@ -92,8 +92,8 @@ extern struct cpu_spec *cur_cpu_spec;
92extern unsigned int __start___ftr_fixup, __stop___ftr_fixup; 92extern unsigned int __start___ftr_fixup, __stop___ftr_fixup;
93 93
94extern struct cpu_spec *identify_cpu(unsigned long offset); 94extern struct cpu_spec *identify_cpu(unsigned long offset);
95extern void do_feature_fixups(unsigned long offset, unsigned long value, 95extern void do_feature_fixups(unsigned long value, void *fixup_start,
96 void *fixup_start, void *fixup_end); 96 void *fixup_end);
97 97
98#endif /* __ASSEMBLY__ */ 98#endif /* __ASSEMBLY__ */
99 99
@@ -435,32 +435,11 @@ static inline int cpu_has_feature(unsigned long feature)
435#ifdef __ASSEMBLY__ 435#ifdef __ASSEMBLY__
436 436
437#define BEGIN_FTR_SECTION_NESTED(label) label: 437#define BEGIN_FTR_SECTION_NESTED(label) label:
438#define BEGIN_FTR_SECTION BEGIN_FTR_SECTION_NESTED(98) 438#define BEGIN_FTR_SECTION BEGIN_FTR_SECTION_NESTED(97)
439
440#ifndef __powerpc64__
441#define END_FTR_SECTION_NESTED(msk, val, label) \
44299: \
443 .section __ftr_fixup,"a"; \
444 .align 2; \
445 .long msk; \
446 .long val; \
447 .long label##b; \
448 .long 99b; \
449 .previous
450#else /* __powerpc64__ */
451#define END_FTR_SECTION_NESTED(msk, val, label) \ 439#define END_FTR_SECTION_NESTED(msk, val, label) \
45299: \ 440 MAKE_FTR_SECTION_ENTRY(msk, val, label, __ftr_fixup)
453 .section __ftr_fixup,"a"; \
454 .align 3; \
455 .llong msk; \
456 .llong val; \
457 .llong label##b; \
458 .llong 99b; \
459 .previous
460#endif /* __powerpc64__ */
461
462#define END_FTR_SECTION(msk, val) \ 441#define END_FTR_SECTION(msk, val) \
463 END_FTR_SECTION_NESTED(msk, val, 98) 442 END_FTR_SECTION_NESTED(msk, val, 97)
464 443
465#define END_FTR_SECTION_IFSET(msk) END_FTR_SECTION((msk), (msk)) 444#define END_FTR_SECTION_IFSET(msk) END_FTR_SECTION((msk), (msk))
466#define END_FTR_SECTION_IFCLR(msk) END_FTR_SECTION((msk), 0) 445#define END_FTR_SECTION_IFCLR(msk) END_FTR_SECTION((msk), 0)
diff --git a/include/asm-powerpc/firmware.h b/include/asm-powerpc/firmware.h
index c16e0a6b9dab..fdf9aff71150 100644
--- a/include/asm-powerpc/firmware.h
+++ b/include/asm-powerpc/firmware.h
@@ -100,17 +100,12 @@ extern unsigned int __start___fw_ftr_fixup, __stop___fw_ftr_fixup;
100 100
101#else /* __ASSEMBLY__ */ 101#else /* __ASSEMBLY__ */
102 102
103#define BEGIN_FW_FTR_SECTION 96: 103#define BEGIN_FW_FTR_SECTION_NESTED(label) label:
104 104#define BEGIN_FW_FTR_SECTION BEGIN_FW_FTR_SECTION_NESTED(97)
105#define END_FW_FTR_SECTION_NESTED(msk, val, label) \
106 MAKE_FTR_SECTION_ENTRY(msk, val, label, __fw_ftr_fixup)
105#define END_FW_FTR_SECTION(msk, val) \ 107#define END_FW_FTR_SECTION(msk, val) \
10697: \ 108 END_FW_FTR_SECTION_NESTED(msk, val, 97)
107 .section __fw_ftr_fixup,"a"; \
108 .align 3; \
109 .llong msk; \
110 .llong val; \
111 .llong 96b; \
112 .llong 97b; \
113 .previous
114 109
115#define END_FW_FTR_SECTION_IFSET(msk) END_FW_FTR_SECTION((msk), (msk)) 110#define END_FW_FTR_SECTION_IFSET(msk) END_FW_FTR_SECTION((msk), (msk))
116#define END_FW_FTR_SECTION_IFCLR(msk) END_FW_FTR_SECTION((msk), 0) 111#define END_FW_FTR_SECTION_IFCLR(msk) END_FW_FTR_SECTION((msk), 0)
diff --git a/include/asm-powerpc/timex.h b/include/asm-powerpc/timex.h
index 3b9a8e786806..e3f08cf91486 100644
--- a/include/asm-powerpc/timex.h
+++ b/include/asm-powerpc/timex.h
@@ -30,13 +30,15 @@ static inline cycles_t get_cycles(void)
30 ret = 0; 30 ret = 0;
31 31
32 __asm__ __volatile__( 32 __asm__ __volatile__(
33 "98: mftb %0\n" 33 "97: mftb %0\n"
34 "99:\n" 34 "99:\n"
35 ".section __ftr_fixup,\"a\"\n" 35 ".section __ftr_fixup,\"a\"\n"
36 ".align 2\n"
37 "98:\n"
36 " .long %1\n" 38 " .long %1\n"
37 " .long 0\n" 39 " .long 0\n"
38 " .long 98b\n" 40 " .long 97b-98b\n"
39 " .long 99b\n" 41 " .long 99b-98b\n"
40 ".previous" 42 ".previous"
41 : "=r" (ret) : "i" (CPU_FTR_601)); 43 : "=r" (ret) : "i" (CPU_FTR_601));
42#endif 44#endif