diff options
author | Nicholas Piggin <npiggin@gmail.com> | 2018-07-05 04:47:00 -0400 |
---|---|---|
committer | Michael Ellerman <mpe@ellerman.id.au> | 2018-07-15 21:37:21 -0400 |
commit | 2bf1071a8d50928a4ae366bb3108833166c2b70c (patch) | |
tree | ebffef07f7ebbb9bb1ba231c4cb8ff00cc6fd795 /arch/powerpc/perf/isa207-common.c | |
parent | ce397d215ccd07b8ae3f71db689aedb85d56ab40 (diff) |
powerpc/64s: Remove POWER9 DD1 support
POWER9 DD1 was never a product. It is no longer supported by upstream
firmware, and it is not effectively supported in Linux due to lack of
testing.
Signed-off-by: Nicholas Piggin <npiggin@gmail.com>
Reviewed-by: Michael Ellerman <mpe@ellerman.id.au>
[mpe: Remove arch_make_huge_pte() entirely]
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/perf/isa207-common.c')
-rw-r--r-- | arch/powerpc/perf/isa207-common.c | 12 |
1 files changed, 5 insertions, 7 deletions
diff --git a/arch/powerpc/perf/isa207-common.c b/arch/powerpc/perf/isa207-common.c index 2efee3f196f5..177de814286f 100644 --- a/arch/powerpc/perf/isa207-common.c +++ b/arch/powerpc/perf/isa207-common.c | |||
@@ -59,7 +59,7 @@ static bool is_event_valid(u64 event) | |||
59 | { | 59 | { |
60 | u64 valid_mask = EVENT_VALID_MASK; | 60 | u64 valid_mask = EVENT_VALID_MASK; |
61 | 61 | ||
62 | if (cpu_has_feature(CPU_FTR_ARCH_300) && !cpu_has_feature(CPU_FTR_POWER9_DD1)) | 62 | if (cpu_has_feature(CPU_FTR_ARCH_300)) |
63 | valid_mask = p9_EVENT_VALID_MASK; | 63 | valid_mask = p9_EVENT_VALID_MASK; |
64 | 64 | ||
65 | return !(event & ~valid_mask); | 65 | return !(event & ~valid_mask); |
@@ -86,8 +86,6 @@ static void mmcra_sdar_mode(u64 event, unsigned long *mmcra) | |||
86 | * Incase of Power9: | 86 | * Incase of Power9: |
87 | * Marked event: MMCRA[SDAR_MODE] will be set to 0b00 ('No Updates'), | 87 | * Marked event: MMCRA[SDAR_MODE] will be set to 0b00 ('No Updates'), |
88 | * or if group already have any marked events. | 88 | * or if group already have any marked events. |
89 | * Non-Marked events (for DD1): | ||
90 | * MMCRA[SDAR_MODE] will be set to 0b01 | ||
91 | * For rest | 89 | * For rest |
92 | * MMCRA[SDAR_MODE] will be set from event code. | 90 | * MMCRA[SDAR_MODE] will be set from event code. |
93 | * If sdar_mode from event is zero, default to 0b01. Hardware | 91 | * If sdar_mode from event is zero, default to 0b01. Hardware |
@@ -96,7 +94,7 @@ static void mmcra_sdar_mode(u64 event, unsigned long *mmcra) | |||
96 | if (cpu_has_feature(CPU_FTR_ARCH_300)) { | 94 | if (cpu_has_feature(CPU_FTR_ARCH_300)) { |
97 | if (is_event_marked(event) || (*mmcra & MMCRA_SAMPLE_ENABLE)) | 95 | if (is_event_marked(event) || (*mmcra & MMCRA_SAMPLE_ENABLE)) |
98 | *mmcra &= MMCRA_SDAR_MODE_NO_UPDATES; | 96 | *mmcra &= MMCRA_SDAR_MODE_NO_UPDATES; |
99 | else if (!cpu_has_feature(CPU_FTR_POWER9_DD1) && p9_SDAR_MODE(event)) | 97 | else if (p9_SDAR_MODE(event)) |
100 | *mmcra |= p9_SDAR_MODE(event) << MMCRA_SDAR_MODE_SHIFT; | 98 | *mmcra |= p9_SDAR_MODE(event) << MMCRA_SDAR_MODE_SHIFT; |
101 | else | 99 | else |
102 | *mmcra |= MMCRA_SDAR_MODE_DCACHE; | 100 | *mmcra |= MMCRA_SDAR_MODE_DCACHE; |
@@ -106,7 +104,7 @@ static void mmcra_sdar_mode(u64 event, unsigned long *mmcra) | |||
106 | 104 | ||
107 | static u64 thresh_cmp_val(u64 value) | 105 | static u64 thresh_cmp_val(u64 value) |
108 | { | 106 | { |
109 | if (cpu_has_feature(CPU_FTR_ARCH_300) && !cpu_has_feature(CPU_FTR_POWER9_DD1)) | 107 | if (cpu_has_feature(CPU_FTR_ARCH_300)) |
110 | return value << p9_MMCRA_THR_CMP_SHIFT; | 108 | return value << p9_MMCRA_THR_CMP_SHIFT; |
111 | 109 | ||
112 | return value << MMCRA_THR_CMP_SHIFT; | 110 | return value << MMCRA_THR_CMP_SHIFT; |
@@ -114,7 +112,7 @@ static u64 thresh_cmp_val(u64 value) | |||
114 | 112 | ||
115 | static unsigned long combine_from_event(u64 event) | 113 | static unsigned long combine_from_event(u64 event) |
116 | { | 114 | { |
117 | if (cpu_has_feature(CPU_FTR_ARCH_300) && !cpu_has_feature(CPU_FTR_POWER9_DD1)) | 115 | if (cpu_has_feature(CPU_FTR_ARCH_300)) |
118 | return p9_EVENT_COMBINE(event); | 116 | return p9_EVENT_COMBINE(event); |
119 | 117 | ||
120 | return EVENT_COMBINE(event); | 118 | return EVENT_COMBINE(event); |
@@ -122,7 +120,7 @@ static unsigned long combine_from_event(u64 event) | |||
122 | 120 | ||
123 | static unsigned long combine_shift(unsigned long pmc) | 121 | static unsigned long combine_shift(unsigned long pmc) |
124 | { | 122 | { |
125 | if (cpu_has_feature(CPU_FTR_ARCH_300) && !cpu_has_feature(CPU_FTR_POWER9_DD1)) | 123 | if (cpu_has_feature(CPU_FTR_ARCH_300)) |
126 | return p9_MMCR1_COMBINE_SHIFT(pmc); | 124 | return p9_MMCR1_COMBINE_SHIFT(pmc); |
127 | 125 | ||
128 | return MMCR1_COMBINE_SHIFT(pmc); | 126 | return MMCR1_COMBINE_SHIFT(pmc); |