7 #include <linux/module.h>
13 #include <asm/cpufeature.h>
14 #include <asm/tlbflush.h>
33 static unsigned long smp_changes_mask;
34 static int mtrr_state_set;
48 static inline void k8_check_syscfg_dram_mod_en(
void)
59 " not cleared by BIOS, clearing this bit\n",
61 lo &= ~K8_MTRRFIXRANGE_DRAM_MODIFY;
116 u8 prev_match, curr_match;
129 if (
mtrr_state.have_fixed && (start < 0x100000)) {
132 if (start < 0x80000) {
134 idx += (start >> 16);
136 }
else if (start < 0xC0000) {
138 idx += ((start - 0x80000) >> 14);
140 }
else if (start < 0x1000000) {
142 idx += ((start - 0xC0000) >> 12);
159 if (!(
mtrr_state.var_ranges[i].mask_lo & (1 << 11)))
165 (
mtrr_state.var_ranges[i].mask_lo & PAGE_MASK);
167 start_state = ((start &
mask) == (base & mask));
168 end_state = ((end &
mask) == (base & mask));
170 if (start_state != end_state) {
185 *partial_end = base + get_mtrr_size(mask);
189 if (
unlikely(*partial_end <= start)) {
194 end = *partial_end - 1;
198 if ((start & mask) != (base & mask))
201 curr_match =
mtrr_state.var_ranges[
i].base_lo & 0xff;
202 if (prev_match == 0xFF) {
203 prev_match = curr_match;
207 if (check_type_overlap(&prev_match, &curr_match))
212 if (start >= (1ULL<<32) && (end <
mtrr_tom2))
216 if (prev_match != 0xFF)
233 type = __mtrr_type_lookup(start, end, &partial_end, &repeat);
243 type = __mtrr_type_lookup(start, end, &partial_end, &repeat);
245 if (check_type_overlap(&prev_type, &type))
274 static void get_fixed_ranges(
mtrr_type *frs)
276 unsigned int *
p = (
unsigned int *)frs;
279 k8_check_syscfg_dram_mod_en();
283 for (i = 0; i < 2; i++)
285 for (i = 0; i < 8; i++)
299 static void __init print_fixed_last(
void)
304 pr_debug(
" %05X-%05X %s\n", last_fixed_start,
310 static void __init update_fixed_last(
unsigned base,
unsigned end,
313 last_fixed_start = base;
314 last_fixed_end =
end;
315 last_fixed_type =
type;
319 print_fixed(
unsigned base,
unsigned step,
const mtrr_type *types)
323 for (i = 0; i < 8; ++
i, ++types, base +=
step) {
324 if (last_fixed_end == 0) {
325 update_fixed_last(base, base + step, *types);
328 if (last_fixed_end == base && last_fixed_type == *types) {
329 last_fixed_end = base +
step;
334 update_fixed_last(base, base + step, *types);
338 static void prepare_set(
void);
339 static void post_set(
void);
341 static void __init print_mtrr_state(
void)
349 pr_debug(
"MTRR fixed ranges %sabled:\n",
351 print_fixed(0x00000, 0x10000,
mtrr_state.fixed_ranges + 0);
352 for (i = 0; i < 2; ++
i)
353 print_fixed(0x80000 + i * 0x20000, 0x04000,
355 for (i = 0; i < 8; ++
i)
356 print_fixed(0xC0000 + i * 0x08000, 0x01000,
362 pr_debug(
"MTRR variable ranges %sabled:\n",
367 if (
mtrr_state.var_ranges[i].mask_lo & (1 << 11))
368 pr_debug(
" %u base %0*X%05X000 mask %0*X%05X000 %s\n",
398 get_mtrr_var_range(i, &vrs[i]);
434 unsigned long mask = smp_changes_mask;
439 pr_warning(
"mtrr: your CPUs had inconsistent fixed MTRR settings\n");
441 pr_warning(
"mtrr: your CPUs had inconsistent variable MTRR settings\n");
443 pr_warning(
"mtrr: your CPUs had inconsistent MTRRdefType settings\n");
445 printk(
KERN_INFO "mtrr: probably your BIOS does not setup all CPUs.\n");
456 if (wrmsr_safe(msr, a, b) < 0) {
458 "MTRR: CPU %u: Writing MSR %x to %x:%x failed\n",
470 static void set_fixed_range(
int msr,
bool *
changed,
unsigned int *msrwords)
476 if (lo != msrwords[0] || hi != msrwords[1]) {
493 unsigned long lbase, lsize;
498 if (replace_reg >= 0 && replace_reg < max)
501 for (i = 0; i <
max; ++
i) {
502 mtrr_if->get(i, &lbase, &lsize, <ype);
510 static void generic_get_mtrr(
unsigned int reg,
unsigned long *base,
514 unsigned int tmp, hi;
524 if ((mask_lo & 0x800) == 0) {
541 tmp |= ~((1<<(hi - 1)) - 1);
543 if (tmp != mask_lo) {
544 printk(
KERN_WARNING "mtrr: your BIOS has configured an incorrect mask, fixing it.\n");
556 *type = base_lo & 0xff;
567 static int set_fixed_ranges(
mtrr_type *frs)
569 unsigned long long *saved = (
unsigned long long *)frs;
570 bool changed =
false;
573 k8_check_syscfg_dram_mod_en();
575 while (fixed_range_blocks[++block].ranges) {
577 set_fixed_range(fixed_range_blocks[block].base_msr +
range,
578 &changed, (
unsigned int *)saved++);
588 static bool set_mtrr_var_ranges(
unsigned int index,
struct mtrr_var_range *vr)
591 bool changed =
false;
594 if ((vr->
base_lo & 0xfffff0ffUL) != (lo & 0xfffff0ffUL)
604 if ((vr->
mask_lo & 0xfffff800UL) != (lo & 0xfffff800UL)
613 static u32 deftype_lo, deftype_hi;
621 static unsigned long set_mtrr_state(
void)
623 unsigned long change_mask = 0;
627 if (set_mtrr_var_ranges(i, &
mtrr_state.var_ranges[i]))
638 if ((deftype_lo & 0xff) !=
mtrr_state.def_type
639 || ((deftype_lo & 0xc00) >> 10) !=
mtrr_state.enabled) {
641 deftype_lo = (deftype_lo & ~0xcff) |
mtrr_state.def_type |
650 static unsigned long cr4;
660 static void prepare_set(
void)
__acquires(set_atomicity_lock)
695 static void post_set(
void)
__releases(set_atomicity_lock)
704 write_cr0(read_cr0() & 0xbfffffff);
712 static void generic_set_all(
void)
721 mask = set_mtrr_state();
730 for (count = 0; count <
sizeof mask * 8; ++
count) {
732 set_bit(count, &smp_changes_mask);
748 static void generic_set_mtrr(
unsigned int reg,
unsigned long base,
783 unsigned long lbase,
last;
793 pr_warning(
"mtrr: base(0x%lx000) is not 4 MiB aligned\n", base);
796 if (!(base + size < 0x70000 || base > 0x7003F) &&
799 pr_warning(
"mtrr: writable mtrr between 0x70000000 and 0x7003FFFF may hang the CPU.\n");
808 last = base + size - 1;
809 for (lbase = base; !(lbase & 1) && (last & 1);
810 lbase = lbase >> 1, last = last >> 1)
813 pr_warning(
"mtrr: base(0x%lx000) is not aligned on a size(0x%lx000) boundary\n", base, size);
819 static int generic_have_wrcomb(
void)
823 return config & (1 << 10);
836 .set_all = generic_set_all,
837 .get = generic_get_mtrr,
839 .set = generic_set_mtrr,
841 .have_wrcomb = generic_have_wrcomb,