diff options
| -rw-r--r-- | arch/x86/kernel/cpu/mtrr/generic.c | 84 |
1 files changed, 77 insertions, 7 deletions
diff --git a/arch/x86/kernel/cpu/mtrr/generic.c b/arch/x86/kernel/cpu/mtrr/generic.c index 14f4f0c0329a..9f27228ceffd 100644 --- a/arch/x86/kernel/cpu/mtrr/generic.c +++ b/arch/x86/kernel/cpu/mtrr/generic.c | |||
| @@ -64,6 +64,18 @@ static inline void k8_check_syscfg_dram_mod_en(void) | |||
| 64 | } | 64 | } |
| 65 | } | 65 | } |
| 66 | 66 | ||
| 67 | /* Get the size of contiguous MTRR range */ | ||
| 68 | static u64 get_mtrr_size(u64 mask) | ||
| 69 | { | ||
| 70 | u64 size; | ||
| 71 | |||
| 72 | mask >>= PAGE_SHIFT; | ||
| 73 | mask |= size_or_mask; | ||
| 74 | size = -mask; | ||
| 75 | size <<= PAGE_SHIFT; | ||
| 76 | return size; | ||
| 77 | } | ||
| 78 | |||
| 67 | /* | 79 | /* |
| 68 | * Check and return the effective type for MTRR-MTRR type overlap. | 80 | * Check and return the effective type for MTRR-MTRR type overlap. |
| 69 | * Returns 1 if the effective type is UNCACHEABLE, else returns 0 | 81 | * Returns 1 if the effective type is UNCACHEABLE, else returns 0 |
| @@ -92,17 +104,19 @@ static int check_type_overlap(u8 *prev, u8 *curr) | |||
| 92 | } | 104 | } |
| 93 | 105 | ||
| 94 | /* | 106 | /* |
| 95 | * Returns the effective MTRR type for the region | 107 | * Error/Semi-error returns: |
| 96 | * Error returns: | 108 | * 0xFF - when MTRR is not enabled |
| 97 | * - 0xFE - when the range is "not entirely covered" by _any_ var range MTRR | 109 | * *repeat == 1 implies [start:end] spanned across MTRR range and type returned |
| 98 | * - 0xFF - when MTRR is not enabled | 110 | * corresponds only to [start:*partial_end]. |
| 111 | * Caller has to lookup again for [*partial_end:end]. | ||
| 99 | */ | 112 | */ |
| 100 | u8 mtrr_type_lookup(u64 start, u64 end) | 113 | static u8 __mtrr_type_lookup(u64 start, u64 end, u64 *partial_end, int *repeat) |
| 101 | { | 114 | { |
| 102 | int i; | 115 | int i; |
| 103 | u64 base, mask; | 116 | u64 base, mask; |
| 104 | u8 prev_match, curr_match; | 117 | u8 prev_match, curr_match; |
| 105 | 118 | ||
| 119 | *repeat = 0; | ||
| 106 | if (!mtrr_state_set) | 120 | if (!mtrr_state_set) |
| 107 | return 0xFF; | 121 | return 0xFF; |
| 108 | 122 | ||
| @@ -153,8 +167,34 @@ u8 mtrr_type_lookup(u64 start, u64 end) | |||
| 153 | 167 | ||
| 154 | start_state = ((start & mask) == (base & mask)); | 168 | start_state = ((start & mask) == (base & mask)); |
| 155 | end_state = ((end & mask) == (base & mask)); | 169 | end_state = ((end & mask) == (base & mask)); |
| 156 | if (start_state != end_state) | 170 | |
| 157 | return 0xFE; | 171 | if (start_state != end_state) { |
| 172 | /* | ||
| 173 | * We have start:end spanning across an MTRR. | ||
| 174 | * We split the region into | ||
| 175 | * either | ||
| 176 | * (start:mtrr_end) (mtrr_end:end) | ||
| 177 | * or | ||
| 178 | * (start:mtrr_start) (mtrr_start:end) | ||
| 179 | * depending on kind of overlap. | ||
| 180 | * Return the type for first region and a pointer to | ||
| 181 | * the start of second region so that caller will | ||
| 182 | * lookup again on the second region. | ||
| 183 | * Note: This way we handle multiple overlaps as well. | ||
| 184 | */ | ||
| 185 | if (start_state) | ||
| 186 | *partial_end = base + get_mtrr_size(mask); | ||
| 187 | else | ||
| 188 | *partial_end = base; | ||
| 189 | |||
| 190 | if (unlikely(*partial_end <= start)) { | ||
| 191 | WARN_ON(1); | ||
| 192 | *partial_end = start + PAGE_SIZE; | ||
| 193 | } | ||
| 194 | |||
| 195 | end = *partial_end - 1; /* end is inclusive */ | ||
| 196 | *repeat = 1; | ||
| 197 | } | ||
| 158 | 198 | ||
| 159 | if ((start & mask) != (base & mask)) | 199 | if ((start & mask) != (base & mask)) |
| 160 | continue; | 200 | continue; |
| @@ -180,6 +220,36 @@ u8 mtrr_type_lookup(u64 start, u64 end) | |||
| 180 | return mtrr_state.def_type; | 220 | return mtrr_state.def_type; |
| 181 | } | 221 | } |
| 182 | 222 | ||
| 223 | /* | ||
| 224 | * Returns the effective MTRR type for the region | ||
| 225 | * Error return: | ||
| 226 | * 0xFF - when MTRR is not enabled | ||
| 227 | */ | ||
| 228 | u8 mtrr_type_lookup(u64 start, u64 end) | ||
| 229 | { | ||
| 230 | u8 type, prev_type; | ||
| 231 | int repeat; | ||
| 232 | u64 partial_end; | ||
| 233 | |||
| 234 | type = __mtrr_type_lookup(start, end, &partial_end, &repeat); | ||
| 235 | |||
| 236 | /* | ||
| 237 | * Common path is with repeat = 0. | ||
| 238 | * However, we can have cases where [start:end] spans across some | ||
| 239 | * MTRR range. Do repeated lookups for that case here. | ||
| 240 | */ | ||
| 241 | while (repeat) { | ||
| 242 | prev_type = type; | ||
| 243 | start = partial_end; | ||
| 244 | type = __mtrr_type_lookup(start, end, &partial_end, &repeat); | ||
| 245 | |||
| 246 | if (check_type_overlap(&prev_type, &type)) | ||
| 247 | return type; | ||
| 248 | } | ||
| 249 | |||
| 250 | return type; | ||
| 251 | } | ||
| 252 | |||
| 183 | /* Get the MSR pair relating to a var range */ | 253 | /* Get the MSR pair relating to a var range */ |
| 184 | static void | 254 | static void |
| 185 | get_mtrr_var_range(unsigned int index, struct mtrr_var_range *vr) | 255 | get_mtrr_var_range(unsigned int index, struct mtrr_var_range *vr) |
