Merge branch 'x86-mtrr-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip

* 'x86-mtrr-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip:
x86, mtrr: Support mtrr lookup for range spanning across MTRR range
x86, mtrr: Refactor MTRR type overlap check code

+106 -22
+106 -22
arch/x86/kernel/cpu/mtrr/generic.c
··· 64 64 } 65 65 } 66 66 67 + /* Get the size of contiguous MTRR range */ 68 + static u64 get_mtrr_size(u64 mask) 69 + { 70 + u64 size; 71 + 72 + mask >>= PAGE_SHIFT; 73 + mask |= size_or_mask; 74 + size = -mask; 75 + size <<= PAGE_SHIFT; 76 + return size; 77 + } 78 + 67 79 /* 68 - * Returns the effective MTRR type for the region 69 - * Error returns: 70 - * - 0xFE - when the range is "not entirely covered" by _any_ var range MTRR 71 - * - 0xFF - when MTRR is not enabled 80 + * Check and return the effective type for MTRR-MTRR type overlap. 81 + * Returns 1 if the effective type is UNCACHEABLE, else returns 0 72 82 */ 73 - u8 mtrr_type_lookup(u64 start, u64 end) 83 + static int check_type_overlap(u8 *prev, u8 *curr) 84 + { 85 + if (*prev == MTRR_TYPE_UNCACHABLE || *curr == MTRR_TYPE_UNCACHABLE) { 86 + *prev = MTRR_TYPE_UNCACHABLE; 87 + *curr = MTRR_TYPE_UNCACHABLE; 88 + return 1; 89 + } 90 + 91 + if ((*prev == MTRR_TYPE_WRBACK && *curr == MTRR_TYPE_WRTHROUGH) || 92 + (*prev == MTRR_TYPE_WRTHROUGH && *curr == MTRR_TYPE_WRBACK)) { 93 + *prev = MTRR_TYPE_WRTHROUGH; 94 + *curr = MTRR_TYPE_WRTHROUGH; 95 + } 96 + 97 + if (*prev != *curr) { 98 + *prev = MTRR_TYPE_UNCACHABLE; 99 + *curr = MTRR_TYPE_UNCACHABLE; 100 + return 1; 101 + } 102 + 103 + return 0; 104 + } 105 + 106 + /* 107 + * Error/Semi-error returns: 108 + * 0xFF - when MTRR is not enabled 109 + * *repeat == 1 implies [start:end] spanned across MTRR range and type returned 110 + * corresponds only to [start:*partial_end]. 111 + * Caller has to lookup again for [*partial_end:end]. 112 + */ 113 + static u8 __mtrr_type_lookup(u64 start, u64 end, u64 *partial_end, int *repeat) 74 114 { 75 115 int i; 76 116 u64 base, mask; 77 117 u8 prev_match, curr_match; 78 118 119 + *repeat = 0; 79 120 if (!mtrr_state_set) 80 121 return 0xFF; 81 122 ··· 167 126 168 127 start_state = ((start & mask) == (base & mask)); 169 128 end_state = ((end & mask) == (base & mask)); 170 - if (start_state != end_state) 171 - return 0xFE; 129 + 130 + if (start_state != end_state) { 131 + /* 132 + * We have start:end spanning across an MTRR. 133 + * We split the region into 134 + * either 135 + * (start:mtrr_end) (mtrr_end:end) 136 + * or 137 + * (start:mtrr_start) (mtrr_start:end) 138 + * depending on kind of overlap. 139 + * Return the type for first region and a pointer to 140 + * the start of second region so that caller will 141 + * lookup again on the second region. 142 + * Note: This way we handle multiple overlaps as well. 143 + */ 144 + if (start_state) 145 + *partial_end = base + get_mtrr_size(mask); 146 + else 147 + *partial_end = base; 148 + 149 + if (unlikely(*partial_end <= start)) { 150 + WARN_ON(1); 151 + *partial_end = start + PAGE_SIZE; 152 + } 153 + 154 + end = *partial_end - 1; /* end is inclusive */ 155 + *repeat = 1; 156 + } 172 157 173 158 if ((start & mask) != (base & mask)) 174 159 continue; ··· 205 138 continue; 206 139 } 207 140 208 - if (prev_match == MTRR_TYPE_UNCACHABLE || 209 - curr_match == MTRR_TYPE_UNCACHABLE) { 210 - return MTRR_TYPE_UNCACHABLE; 211 - } 212 - 213 - if ((prev_match == MTRR_TYPE_WRBACK && 214 - curr_match == MTRR_TYPE_WRTHROUGH) || 215 - (prev_match == MTRR_TYPE_WRTHROUGH && 216 - curr_match == MTRR_TYPE_WRBACK)) { 217 - prev_match = MTRR_TYPE_WRTHROUGH; 218 - curr_match = MTRR_TYPE_WRTHROUGH; 219 - } 220 - 221 - if (prev_match != curr_match) 222 - return MTRR_TYPE_UNCACHABLE; 141 + if (check_type_overlap(&prev_match, &curr_match)) 142 + return curr_match; 223 143 } 224 144 225 145 if (mtrr_tom2) { ··· 218 164 return prev_match; 219 165 220 166 return mtrr_state.def_type; 167 + } 168 + 169 + /* 170 + * Returns the effective MTRR type for the region 171 + * Error return: 172 + * 0xFF - when MTRR is not enabled 173 + */ 174 + u8 mtrr_type_lookup(u64 start, u64 end) 175 + { 176 + u8 type, prev_type; 177 + int repeat; 178 + u64 partial_end; 179 + 180 + type = __mtrr_type_lookup(start, end, &partial_end, &repeat); 181 + 182 + /* 183 + * Common path is with repeat = 0. 184 + * However, we can have cases where [start:end] spans across some 185 + * MTRR range. Do repeated lookups for that case here. 186 + */ 187 + while (repeat) { 188 + prev_type = type; 189 + start = partial_end; 190 + type = __mtrr_type_lookup(start, end, &partial_end, &repeat); 191 + 192 + if (check_type_overlap(&prev_type, &type)) 193 + return type; 194 + } 195 + 196 + return type; 221 197 } 222 198 223 199 /* Get the MSR pair relating to a var range */