2 * (C) Copyright 2014 Google, Inc
4 * SPDX-License-Identifier: GPL-2.0+
6 * Memory Type Range Regsters - these are used to tell the CPU whether
7 * memory is cacheable and if so the cache write mode to use.
9 * These can speed up booting. See the mtrr command.
11 * Reference: Intel Architecture Software Developer's Manual, Volume 3:
20 DECLARE_GLOBAL_DATA_PTR;
22 /* Prepare to adjust MTRRs */
23 void mtrr_open(struct mtrr_state *state)
25 if (!gd->arch.has_mtrr)
28 state->enable_cache = dcache_status();
30 if (state->enable_cache)
32 state->deftype = native_read_msr(MTRR_DEF_TYPE_MSR);
33 wrmsrl(MTRR_DEF_TYPE_MSR, state->deftype & ~MTRR_DEF_TYPE_EN);
36 /* Clean up after adjusting MTRRs, and enable them */
37 void mtrr_close(struct mtrr_state *state)
39 if (!gd->arch.has_mtrr)
42 wrmsrl(MTRR_DEF_TYPE_MSR, state->deftype | MTRR_DEF_TYPE_EN);
43 if (state->enable_cache)
47 int mtrr_commit(bool do_caches)
49 struct mtrr_request *req = gd->arch.mtrr_req;
50 struct mtrr_state state;
54 if (!gd->arch.has_mtrr)
58 for (i = 0; i < gd->arch.mtrr_req_count; i++, req++) {
59 mask = ~(req->size - 1);
60 mask &= (1ULL << CONFIG_CPU_ADDR_BITS) - 1;
61 wrmsrl(MTRR_PHYS_BASE_MSR(i), req->start | req->type);
62 wrmsrl(MTRR_PHYS_MASK_MSR(i), mask | MTRR_PHYS_MASK_VALID);
65 /* Clear the ones that are unused */
66 for (; i < MTRR_COUNT; i++)
67 wrmsrl(MTRR_PHYS_MASK_MSR(i), 0);
73 int mtrr_add_request(int type, uint64_t start, uint64_t size)
75 struct mtrr_request *req;
78 if (!gd->arch.has_mtrr)
81 if (gd->arch.mtrr_req_count == MAX_MTRR_REQUESTS)
83 req = &gd->arch.mtrr_req[gd->arch.mtrr_req_count++];
87 debug("%d: type=%d, %08llx %08llx\n", gd->arch.mtrr_req_count - 1,
88 req->type, req->start, req->size);
89 mask = ~(req->size - 1);
90 mask &= (1ULL << CONFIG_CPU_ADDR_BITS) - 1;
91 mask |= MTRR_PHYS_MASK_VALID;
92 debug(" %016llx %016llx\n", req->start | req->type, mask);