These changes are the raw update to linux-4.4.6-rt14. Kernel sources
[kvmfornfv.git] / kernel / arch / x86 / kernel / cpu / mtrr / generic.c
index 7d74f7b..3b533cf 100644 (file)
@@ -102,59 +102,76 @@ static int check_type_overlap(u8 *prev, u8 *curr)
        return 0;
 }
 
-/*
- * Error/Semi-error returns:
- * 0xFF - when MTRR is not enabled
- * *repeat == 1 implies [start:end] spanned across MTRR range and type returned
- *             corresponds only to [start:*partial_end].
- *             Caller has to lookup again for [*partial_end:end].
+/**
+ * mtrr_type_lookup_fixed - look up memory type in MTRR fixed entries
+ *
+ * Return the MTRR fixed memory type of 'start'.
+ *
+ * MTRR fixed entries are divided into the following ways:
+ *  0x00000 - 0x7FFFF : This range is divided into eight 64KB sub-ranges
+ *  0x80000 - 0xBFFFF : This range is divided into sixteen 16KB sub-ranges
+ *  0xC0000 - 0xFFFFF : This range is divided into sixty-four 4KB sub-ranges
+ *
+ * Return Values:
+ * MTRR_TYPE_(type)  - Matched memory type
+ * MTRR_TYPE_INVALID - Unmatched
+ */
+static u8 mtrr_type_lookup_fixed(u64 start, u64 end)
+{
+       int idx;
+
+       if (start >= 0x100000)
+               return MTRR_TYPE_INVALID;
+
+       /* 0x0 - 0x7FFFF */
+       if (start < 0x80000) {
+               idx = 0;
+               idx += (start >> 16);
+               return mtrr_state.fixed_ranges[idx];
+       /* 0x80000 - 0xBFFFF */
+       } else if (start < 0xC0000) {
+               idx = 1 * 8;
+               idx += ((start - 0x80000) >> 14);
+               return mtrr_state.fixed_ranges[idx];
+       }
+
+       /* 0xC0000 - 0xFFFFF */
+       idx = 3 * 8;
+       idx += ((start - 0xC0000) >> 12);
+       return mtrr_state.fixed_ranges[idx];
+}
+
+/**
+ * mtrr_type_lookup_variable - look up memory type in MTRR variable entries
+ *
+ * Return Value:
+ * MTRR_TYPE_(type) - Matched memory type or default memory type (unmatched)
+ *
+ * Output Arguments:
+ * repeat - Set to 1 when [start:end] spanned across MTRR range and type
+ *         returned corresponds only to [start:*partial_end].  Caller has
+ *         to lookup again for [*partial_end:end].
+ *
+ * uniform - Set to 1 when an MTRR covers the region uniformly, i.e. the
+ *          region is fully covered by a single MTRR entry or the default
+ *          type.
  */
-static u8 __mtrr_type_lookup(u64 start, u64 end, u64 *partial_end, int *repeat)
+static u8 mtrr_type_lookup_variable(u64 start, u64 end, u64 *partial_end,
+                                   int *repeat, u8 *uniform)
 {
        int i;
        u64 base, mask;
        u8 prev_match, curr_match;
 
        *repeat = 0;
-       if (!mtrr_state_set)
-               return 0xFF;
-
-       if (!mtrr_state.enabled)
-               return 0xFF;
+       *uniform = 1;
 
-       /* Make end inclusive end, instead of exclusive */
+       /* Make end inclusive instead of exclusive */
        end--;
 
-       /* Look in fixed ranges. Just return the type as per start */
-       if (mtrr_state.have_fixed && (start < 0x100000)) {
-               int idx;
-
-               if (start < 0x80000) {
-                       idx = 0;
-                       idx += (start >> 16);
-                       return mtrr_state.fixed_ranges[idx];
-               } else if (start < 0xC0000) {
-                       idx = 1 * 8;
-                       idx += ((start - 0x80000) >> 14);
-                       return mtrr_state.fixed_ranges[idx];
-               } else if (start < 0x1000000) {
-                       idx = 3 * 8;
-                       idx += ((start - 0xC0000) >> 12);
-                       return mtrr_state.fixed_ranges[idx];
-               }
-       }
-
-       /*
-        * Look in variable ranges
-        * Look of multiple ranges matching this address and pick type
-        * as per MTRR precedence
-        */
-       if (!(mtrr_state.enabled & 2))
-               return mtrr_state.def_type;
-
-       prev_match = 0xFF;
+       prev_match = MTRR_TYPE_INVALID;
        for (i = 0; i < num_var_ranges; ++i) {
-               unsigned short start_state, end_state;
+               unsigned short start_state, end_state, inclusive;
 
                if (!(mtrr_state.var_ranges[i].mask_lo & (1 << 11)))
                        continue;
@@ -166,20 +183,29 @@ static u8 __mtrr_type_lookup(u64 start, u64 end, u64 *partial_end, int *repeat)
 
                start_state = ((start & mask) == (base & mask));
                end_state = ((end & mask) == (base & mask));
+               inclusive = ((start < base) && (end > base));
 
-               if (start_state != end_state) {
+               if ((start_state != end_state) || inclusive) {
                        /*
                         * We have start:end spanning across an MTRR.
-                        * We split the region into
-                        * either
-                        * (start:mtrr_end) (mtrr_end:end)
-                        * or
-                        * (start:mtrr_start) (mtrr_start:end)
+                        * We split the region into either
+                        *
+                        * - start_state:1
+                        * (start:mtrr_end)(mtrr_end:end)
+                        * - end_state:1
+                        * (start:mtrr_start)(mtrr_start:end)
+                        * - inclusive:1
+                        * (start:mtrr_start)(mtrr_start:mtrr_end)(mtrr_end:end)
+                        *
                         * depending on kind of overlap.
-                        * Return the type for first region and a pointer to
-                        * the start of second region so that caller will
-                        * lookup again on the second region.
-                        * Note: This way we handle multiple overlaps as well.
+                        *
+                        * Return the type of the first region and a pointer
+                        * to the start of next region so that caller will be
+                        * advised to lookup again after having adjusted start
+                        * and end.
+                        *
+                        * Note: This way we handle overlaps with multiple
+                        * entries and the default type properly.
                         */
                        if (start_state)
                                *partial_end = base + get_mtrr_size(mask);
@@ -193,59 +219,94 @@ static u8 __mtrr_type_lookup(u64 start, u64 end, u64 *partial_end, int *repeat)
 
                        end = *partial_end - 1; /* end is inclusive */
                        *repeat = 1;
+                       *uniform = 0;
                }
 
                if ((start & mask) != (base & mask))
                        continue;
 
                curr_match = mtrr_state.var_ranges[i].base_lo & 0xff;
-               if (prev_match == 0xFF) {
+               if (prev_match == MTRR_TYPE_INVALID) {
                        prev_match = curr_match;
                        continue;
                }
 
+               *uniform = 0;
                if (check_type_overlap(&prev_match, &curr_match))
                        return curr_match;
        }
 
-       if (mtrr_tom2) {
-               if (start >= (1ULL<<32) && (end < mtrr_tom2))
-                       return MTRR_TYPE_WRBACK;
-       }
-
-       if (prev_match != 0xFF)
+       if (prev_match != MTRR_TYPE_INVALID)
                return prev_match;
 
        return mtrr_state.def_type;
 }
 
-/*
- * Returns the effective MTRR type for the region
- * Error return:
- * 0xFF - when MTRR is not enabled
+/**
+ * mtrr_type_lookup - look up memory type in MTRR
+ *
+ * Return Values:
+ * MTRR_TYPE_(type)  - The effective MTRR type for the region
+ * MTRR_TYPE_INVALID - MTRR is disabled
+ *
+ * Output Argument:
+ * uniform - Set to 1 when an MTRR covers the region uniformly, i.e. the
+ *          region is fully covered by a single MTRR entry or the default
+ *          type.
  */
-u8 mtrr_type_lookup(u64 start, u64 end)
+u8 mtrr_type_lookup(u64 start, u64 end, u8 *uniform)
 {
-       u8 type, prev_type;
+       u8 type, prev_type, is_uniform = 1, dummy;
        int repeat;
        u64 partial_end;
 
-       type = __mtrr_type_lookup(start, end, &partial_end, &repeat);
+       if (!mtrr_state_set)
+               return MTRR_TYPE_INVALID;
+
+       if (!(mtrr_state.enabled & MTRR_STATE_MTRR_ENABLED))
+               return MTRR_TYPE_INVALID;
+
+       /*
+        * Look up the fixed ranges first, which take priority over
+        * the variable ranges.
+        */
+       if ((start < 0x100000) &&
+           (mtrr_state.have_fixed) &&
+           (mtrr_state.enabled & MTRR_STATE_MTRR_FIXED_ENABLED)) {
+               is_uniform = 0;
+               type = mtrr_type_lookup_fixed(start, end);
+               goto out;
+       }
+
+       /*
+        * Look up the variable ranges.  Look of multiple ranges matching
+        * this address and pick type as per MTRR precedence.
+        */
+       type = mtrr_type_lookup_variable(start, end, &partial_end,
+                                        &repeat, &is_uniform);
 
        /*
         * Common path is with repeat = 0.
         * However, we can have cases where [start:end] spans across some
-        * MTRR range. Do repeated lookups for that case here.
+        * MTRR ranges and/or the default type.  Do repeated lookups for
+        * that case here.
         */
        while (repeat) {
                prev_type = type;
                start = partial_end;
-               type = __mtrr_type_lookup(start, end, &partial_end, &repeat);
+               is_uniform = 0;
+               type = mtrr_type_lookup_variable(start, end, &partial_end,
+                                                &repeat, &dummy);
 
                if (check_type_overlap(&prev_type, &type))
-                       return type;
+                       goto out;
        }
 
+       if (mtrr_tom2 && (start >= (1ULL<<32)) && (end < mtrr_tom2))
+               type = MTRR_TYPE_WRBACK;
+
+out:
+       *uniform = is_uniform;
        return type;
 }
 
@@ -347,7 +408,9 @@ static void __init print_mtrr_state(void)
                 mtrr_attrib_to_str(mtrr_state.def_type));
        if (mtrr_state.have_fixed) {
                pr_debug("MTRR fixed ranges %sabled:\n",
-                        mtrr_state.enabled & 1 ? "en" : "dis");
+                       ((mtrr_state.enabled & MTRR_STATE_MTRR_ENABLED) &&
+                        (mtrr_state.enabled & MTRR_STATE_MTRR_FIXED_ENABLED)) ?
+                        "en" : "dis");
                print_fixed(0x00000, 0x10000, mtrr_state.fixed_ranges + 0);
                for (i = 0; i < 2; ++i)
                        print_fixed(0x80000 + i * 0x20000, 0x04000,
@@ -360,7 +423,7 @@ static void __init print_mtrr_state(void)
                print_fixed_last();
        }
        pr_debug("MTRR variable ranges %sabled:\n",
-                mtrr_state.enabled & 2 ? "en" : "dis");
+                mtrr_state.enabled & MTRR_STATE_MTRR_ENABLED ? "en" : "dis");
        high_width = (__ffs64(size_or_mask) - (32 - PAGE_SHIFT) + 3) / 4;
 
        for (i = 0; i < num_var_ranges; ++i) {
@@ -382,7 +445,7 @@ static void __init print_mtrr_state(void)
 }
 
 /* Grab all of the MTRR state for this CPU into *state */
-void __init get_mtrr_state(void)
+bool __init get_mtrr_state(void)
 {
        struct mtrr_var_range *vrs;
        unsigned long flags;
@@ -426,6 +489,8 @@ void __init get_mtrr_state(void)
 
        post_set();
        local_irq_restore(flags);
+
+       return !!(mtrr_state.enabled & MTRR_STATE_MTRR_ENABLED);
 }
 
 /* Some BIOS's are messed up and don't set all MTRRs the same! */