1 // SPDX-License-Identifier: GPL-2.0+
4 * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
7 /* for now: just dummy functions to satisfy the linker */
12 #include <asm/cache.h>
14 DECLARE_GLOBAL_DATA_PTR;
17 * Flush range from all levels of d-cache/unified-cache.
18 * Affects the range [start, start + size - 1].
20 __weak void flush_cache(unsigned long start, unsigned long size)
22 flush_dcache_range(start, start + size);
26 * Default implementation:
27 * do a range flush for the entire range
29 __weak void flush_dcache_all(void)
35 * Default implementation of enable_caches()
36 * Real implementation should be in platform code
38 __weak void enable_caches(void)
40 puts("WARNING: Caches not enabled\n");
43 __weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
45 /* An empty stub, real implementation should be in platform code */
47 __weak void flush_dcache_range(unsigned long start, unsigned long stop)
49 /* An empty stub, real implementation should be in platform code */
52 int check_cache_range(unsigned long start, unsigned long stop)
56 if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
59 if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
63 warn_non_spl("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
70 #ifdef CONFIG_SYS_NONCACHED_MEMORY
72 * Reserve one MMU section worth of address space below the malloc() area that
73 * will be mapped uncached.
75 static unsigned long noncached_start;
76 static unsigned long noncached_end;
77 static unsigned long noncached_next;
79 void noncached_set_region(void)
81 #if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
82 mmu_set_region_dcache_behaviour(noncached_start,
83 noncached_end - noncached_start,
88 void noncached_init(void)
90 phys_addr_t start, end;
93 /* If this calculation changes, update board_f.c:reserve_noncached() */
94 end = ALIGN(mem_malloc_start, MMU_SECTION_SIZE) - MMU_SECTION_SIZE;
95 size = ALIGN(CONFIG_SYS_NONCACHED_MEMORY, MMU_SECTION_SIZE);
98 debug("mapping memory %pa-%pa non-cached\n", &start, &end);
100 noncached_start = start;
102 noncached_next = start;
104 noncached_set_region();
107 phys_addr_t noncached_alloc(size_t size, size_t align)
109 phys_addr_t next = ALIGN(noncached_next, align);
111 if (next >= noncached_end || (noncached_end - next) < size)
114 debug("allocated %zu bytes of uncached memory @%pa\n", size, &next);
115 noncached_next = next + size;
119 #endif /* CONFIG_SYS_NONCACHED_MEMORY */
121 #if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
122 void invalidate_l2_cache(void)
124 unsigned int val = 0;
126 asm volatile("mcr p15, 1, %0, c15, c11, 0 @ invl l2 cache"
127 : : "r" (val) : "cc");
132 int arch_reserve_mmu(void)
134 return arm_reserve_mmu();
137 __weak int arm_reserve_mmu(void)
139 #if !(CONFIG_IS_ENABLED(SYS_ICACHE_OFF) && CONFIG_IS_ENABLED(SYS_DCACHE_OFF))
140 /* reserve TLB table */
141 gd->arch.tlb_size = PGTABLE_SIZE;
142 gd->relocaddr -= gd->arch.tlb_size;
144 /* round down to next 64 kB limit */
145 gd->relocaddr &= ~(0x10000 - 1);
147 gd->arch.tlb_addr = gd->relocaddr;
148 debug("TLB table from %08lx to %08lx\n", gd->arch.tlb_addr,
149 gd->arch.tlb_addr + gd->arch.tlb_size);
151 #ifdef CONFIG_SYS_MEM_RESERVE_SECURE
153 * Record allocated tlb_addr in case gd->tlb_addr to be overwritten
154 * with location within secure ram.
156 gd->arch.tlb_allocated = gd->arch.tlb_addr;