1 // SPDX-License-Identifier: GPL-2.0+
4 * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
10 #include <asm/cache.h>
12 volatile int *cf_icache_status = (int *)ICACHE_STATUS;
13 volatile int *cf_dcache_status = (int *)DCACHE_STATUS;
15 void flush_cache(ulong start_addr, ulong size)
17 /* Must be implemented for all M68k processors with copy-back data cache */
20 int icache_status(void)
22 return *cf_icache_status;
25 int dcache_status(void)
27 return *cf_dcache_status;
30 void icache_enable(void)
34 *cf_icache_status = 1;
36 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
37 __asm__ __volatile__("movec %0, %%acr2"::"r"(CONFIG_SYS_CACHE_ACR2));
38 __asm__ __volatile__("movec %0, %%acr3"::"r"(CONFIG_SYS_CACHE_ACR3));
39 #if defined(CONFIG_CF_V4E)
40 __asm__ __volatile__("movec %0, %%acr6"::"r"(CONFIG_SYS_CACHE_ACR6));
41 __asm__ __volatile__("movec %0, %%acr7"::"r"(CONFIG_SYS_CACHE_ACR7));
44 __asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
45 __asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
48 __asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_ICACR));
51 void icache_disable(void)
55 *cf_icache_status = 0;
58 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
59 __asm__ __volatile__("movec %0, %%acr2"::"r"(temp));
60 __asm__ __volatile__("movec %0, %%acr3"::"r"(temp));
61 #if defined(CONFIG_CF_V4E)
62 __asm__ __volatile__("movec %0, %%acr6"::"r"(temp));
63 __asm__ __volatile__("movec %0, %%acr7"::"r"(temp));
66 __asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
67 __asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
71 void icache_invalid(void)
75 temp = CONFIG_SYS_ICACHE_INV;
76 if (*cf_icache_status)
77 temp |= CONFIG_SYS_CACHE_ICACR;
79 __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
83 * data cache only for ColdFire V4 such as MCF547x_8x, MCF5445x
84 * the dcache will be dummy in ColdFire V2 and V3
86 void dcache_enable(void)
89 *cf_dcache_status = 1;
91 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
92 __asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
93 __asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
94 #if defined(CONFIG_CF_V4E)
95 __asm__ __volatile__("movec %0, %%acr4"::"r"(CONFIG_SYS_CACHE_ACR4));
96 __asm__ __volatile__("movec %0, %%acr5"::"r"(CONFIG_SYS_CACHE_ACR5));
100 __asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_DCACR));
103 void dcache_disable(void)
107 *cf_dcache_status = 0;
110 __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
112 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
113 __asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
114 __asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
115 #if defined(CONFIG_CF_V4E)
116 __asm__ __volatile__("movec %0, %%acr4"::"r"(temp));
117 __asm__ __volatile__("movec %0, %%acr5"::"r"(temp));
122 void dcache_invalid(void)
124 #if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
127 temp = CONFIG_SYS_DCACHE_INV;
128 if (*cf_dcache_status)
129 temp |= CONFIG_SYS_CACHE_DCACR;
130 if (*cf_icache_status)
131 temp |= CONFIG_SYS_CACHE_ICACR;
133 __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
137 __weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
139 /* An empty stub, real implementation should be in platform code */
141 __weak void flush_dcache_range(unsigned long start, unsigned long stop)
143 /* An empty stub, real implementation should be in platform code */