m68knommu: clean up ColdFire cache control code

The cache control code for the ColdFire CPU's is a big ugly mess
of "#ifdef"ery liberally coated with bit constants. Clean it up.

The cache controllers in the various ColdFire parts are actually quite
similar. Just differing in some bit flags and options supported. Using
the header defines now in place it is pretty easy to factor out the
small differences and use common setup and flush/invalidate code.

I have preserved the cache setups as they where in the old code
(except where obviously wrong - like in the case of the 5249). Following
from this it should be easy now to extend the possible setups used on
the CACHE controllers that support split cacheing or copy-back or
write through options.

Signed-off-by: Greg Ungerer <gerg@uclinux.org>

+80 -192
+6 -34
arch/m68k/include/asm/cacheflush_no.h
··· 2 #define _M68KNOMMU_CACHEFLUSH_H 3 4 /* 5 - * (C) Copyright 2000-2004, Greg Ungerer <gerg@snapgear.com> 6 */ 7 #include <linux/mm.h> 8 #include <asm/mcfsim.h> ··· 10 #define flush_cache_all() __flush_cache_all() 11 #define flush_cache_mm(mm) do { } while (0) 12 #define flush_cache_dup_mm(mm) do { } while (0) 13 - #define flush_cache_range(vma, start, end) __flush_cache_all() 14 #define flush_cache_page(vma, vmaddr) do { } while (0) 15 #ifndef flush_dcache_range 16 #define flush_dcache_range(start,len) __flush_cache_all() ··· 33 #ifndef __flush_cache_all 34 static inline void __flush_cache_all(void) 35 { 36 - #if defined(CONFIG_M523x) || defined(CONFIG_M527x) 37 __asm__ __volatile__ ( 38 - "movel #0x81400110, %%d0\n\t" 39 "movec %%d0, %%CACR\n\t" 40 "nop\n\t" 41 - : : : "d0" ); 42 - #endif /* CONFIG_M523x || CONFIG_M527x */ 43 - #if defined(CONFIG_M528x) 44 - __asm__ __volatile__ ( 45 - "movel #0x81000200, %%d0\n\t" 46 - "movec %%d0, %%CACR\n\t" 47 - "nop\n\t" 48 - : : : "d0" ); 49 - #endif /* CONFIG_M528x */ 50 - #if defined(CONFIG_M5206) || defined(CONFIG_M5206e) || defined(CONFIG_M5272) 51 - __asm__ __volatile__ ( 52 - "movel #0x81000100, %%d0\n\t" 53 - "movec %%d0, %%CACR\n\t" 54 - "nop\n\t" 55 - : : : "d0" ); 56 - #endif /* CONFIG_M5206 || CONFIG_M5206e || CONFIG_M5272 */ 57 - #ifdef CONFIG_M5249 58 - __asm__ __volatile__ ( 59 - "movel #0xa1000200, %%d0\n\t" 60 - "movec %%d0, %%CACR\n\t" 61 - "nop\n\t" 62 - : : : "d0" ); 63 - #endif /* CONFIG_M5249 */ 64 - #ifdef CONFIG_M532x 65 - __asm__ __volatile__ ( 66 - "movel #0x81000210, %%d0\n\t" 67 - "movec %%d0, %%CACR\n\t" 68 - "nop\n\t" 69 - : : : "d0" ); 70 - #endif /* CONFIG_M532x */ 71 } 72 #endif /* __flush_cache_all */ 73
··· 2 #define _M68KNOMMU_CACHEFLUSH_H 3 4 /* 5 + * (C) Copyright 2000-2010, Greg Ungerer <gerg@snapgear.com> 6 */ 7 #include <linux/mm.h> 8 #include <asm/mcfsim.h> ··· 10 #define flush_cache_all() __flush_cache_all() 11 #define flush_cache_mm(mm) do { } while (0) 12 #define flush_cache_dup_mm(mm) do { } while (0) 13 + #define flush_cache_range(vma, start, end) do { } while (0) 14 #define flush_cache_page(vma, vmaddr) do { } while (0) 15 #ifndef flush_dcache_range 16 #define flush_dcache_range(start,len) __flush_cache_all() ··· 33 #ifndef __flush_cache_all 34 static inline void __flush_cache_all(void) 35 { 36 + #ifdef CACHE_INVALIDATE 37 __asm__ __volatile__ ( 38 + "movel %0, %%d0\n\t" 39 "movec %%d0, %%CACR\n\t" 40 "nop\n\t" 41 + : : "i" (CACHE_INVALIDATE) : "d0" ); 42 + #endif 43 } 44 #endif /* __flush_cache_all */ 45
+27
arch/m68k/include/asm/m52xxacr.h
··· 52 #define ACR_BWE 0x00000020 /* Write buffer enabled */ 53 #define ACR_WPROTECT 0x00000004 /* Write protect region */ 54 55 /****************************************************************************/ 56 #endif /* m52xxsim_h */
··· 52 #define ACR_BWE 0x00000020 /* Write buffer enabled */ 53 #define ACR_WPROTECT 0x00000004 /* Write protect region */ 54 55 + /* 56 + * Set the cache controller settings we will use. This code is set to 57 + * only use the instruction cache, even on the controllers that support 58 + * split cache. (This setup is trying to preserve the existing behavior 59 + * for now, in the furture I hope to actually use the split cache mode). 60 + */ 61 + #if defined(CONFIG_M5206) || defined(CONFIG_M5206e) || \ 62 + defined(CONFIG_M5249) || defined(CONFIG_M5272) 63 + #define CACHE_INIT (CACR_CINV) 64 + #define CACHE_MODE (CACR_CENB + CACR_DCM) 65 + #else 66 + #ifdef CONFIG_COLDFIRE_SW_A7 67 + #define CACHE_INIT (CACR_CINV + CACR_DISD) 68 + #define CACHE_MODE (CACR_CENB + CACR_DISD + CACR_DCM) 69 + #else 70 + #define CACHE_INIT (CACR_CINV + CACR_DISD + CACR_EUSP) 71 + #define CACHE_MODE (CACR_CENB + CACR_DISD + CACR_DCM + CACR_EUSP) 72 + #endif 73 + #endif 74 + 75 + #define CACHE_INVALIDATE (CACHE_MODE + CACR_CINV) 76 + 77 + #define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \ 78 + (0x000f0000) + \ 79 + (ACR_ENABLE + ACR_ANY + ACR_CENB + ACR_BWE)) 80 + #define ACR1_MODE 0 81 + 82 /****************************************************************************/ 83 #endif /* m52xxsim_h */
+18
arch/m68k/include/asm/m53xxacr.h
··· 48 #define ACR_CM_IMPRE 0x00000060 /* Cache inhibited, imprecise */ 49 #define ACR_WPROTECT 0x00000004 /* Write protect region */ 50 51 /****************************************************************************/ 52 #endif /* m53xxsim_h */
··· 48 #define ACR_CM_IMPRE 0x00000060 /* Cache inhibited, imprecise */ 49 #define ACR_WPROTECT 0x00000004 /* Write protect region */ 50 51 + /* 52 + * Set the cache controller settings we will use. This default in the 53 + * CACR is cache inhibited, we use the ACR register to set cacheing 54 + * enabled on the regions we want (eg RAM). 55 + */ 56 + #ifdef CONFIG_COLDFIRE_SW_A7 57 + #define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE) 58 + #else 59 + #define CACHE_MODE (CACR_EC + CACR_ESB + CACR_DCM_PRE + CACR_EUSP) 60 + #endif 61 + 62 + #define CACHE_INIT CACR_CINVA 63 + 64 + #define ACR0_MODE ((CONFIG_RAMBASE & 0xff000000) + \ 65 + (0x000f0000) + \ 66 + (ACR_ENABLE + ACR_ANY + ACR_CM_CB)) 67 + #define ACR1_MODE 0 68 + 69 /****************************************************************************/ 70 #endif /* m53xxsim_h */
+8 -3
arch/m68k/include/asm/m54xxacr.h
··· 73 #else 74 #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP) 75 #endif 76 - 77 #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT) 78 - 79 #define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY) 80 81 #ifndef __ASSEMBLY__ 82 ··· 117 : "i" (CACHE_LINE_SIZE), 118 "i" (DCACHE_SIZE / CACHE_WAYS), 119 "i" (CACHE_WAYS), 120 - "i" (CACHE_MODE|CACR_DCINVA|CACR_BCINVA|CACR_ICINVA) 121 : "d0", "a0" ); 122 } 123
··· 73 #else 74 #define CACHE_MODE (CACR_DEC+CACR_DESB+CACR_DDCM_P+CACR_BEC+CACR_IEC+CACR_EUSP) 75 #endif 76 #define DATA_CACHE_MODE (ACR_ENABLE+ACR_ANY+ACR_CM_WT) 77 #define INSN_CACHE_MODE (ACR_ENABLE+ACR_ANY) 78 + 79 + #define CACHE_INIT (CACR_DCINVA+CACR_BCINVA+CACR_ICINVA) 80 + #define CACHE_INVALIDATE (CACHE_MODE+CACR_DCINVA+CACR_BCINVA+CACR_ICINVA) 81 + #define ACR0_MODE (0x000f0000+DATA_CACHE_MODE) 82 + #define ACR1_MODE 0 83 + #define ACR2_MODE (0x000f0000+INSN_CACHE_MODE) 84 + #define ACR3_MODE 0 85 86 #ifndef __ASSEMBLY__ 87 ··· 112 : "i" (CACHE_LINE_SIZE), 113 "i" (DCACHE_SIZE / CACHE_WAYS), 114 "i" (CACHE_WAYS), 115 + "i" (CACHE_INVALIDATE) 116 : "d0", "a0" ); 117 } 118
-150
arch/m68k/include/asm/mcfcache.h
··· 1 - /****************************************************************************/ 2 - 3 - /* 4 - * mcfcache.h -- ColdFire CPU cache support code 5 - * 6 - * (C) Copyright 2004, Greg Ungerer <gerg@snapgear.com> 7 - */ 8 - 9 - /****************************************************************************/ 10 - #ifndef __M68KNOMMU_MCFCACHE_H 11 - #define __M68KNOMMU_MCFCACHE_H 12 - /****************************************************************************/ 13 - 14 - 15 - /* 16 - * The different ColdFire families have different cache arrangments. 17 - * Everything from a small instruction only cache, to configurable 18 - * data and/or instruction cache, to unified instruction/data, to 19 - * harvard style separate instruction and data caches. 20 - */ 21 - 22 - #if defined(CONFIG_M5206) || defined(CONFIG_M5206e) || defined(CONFIG_M5272) 23 - /* 24 - * Simple version 2 core cache. These have instruction cache only, 25 - * we just need to invalidate it and enable it. 26 - */ 27 - .macro CACHE_ENABLE 28 - movel #0x01000000,%d0 /* invalidate cache cmd */ 29 - movec %d0,%CACR /* do invalidate cache */ 30 - movel #0x80000100,%d0 /* setup cache mask */ 31 - movec %d0,%CACR /* enable cache */ 32 - .endm 33 - #endif /* CONFIG_M5206 || CONFIG_M5206e || CONFIG_M5272 */ 34 - 35 - #if defined(CONFIG_M523x) || defined(CONFIG_M527x) 36 - /* 37 - * New version 2 cores have a configurable split cache arrangement. 38 - * For now I am just enabling instruction cache - but ultimately I 39 - * think a split instruction/data cache would be better. 40 - */ 41 - .macro CACHE_ENABLE 42 - movel #0x01400000,%d0 43 - movec %d0,%CACR /* invalidate cache */ 44 - nop 45 - movel #0x0000c000,%d0 /* set SDRAM cached only */ 46 - movec %d0,%ACR0 47 - movel #0x00000000,%d0 /* no other regions cached */ 48 - movec %d0,%ACR1 49 - movel #0x80400110,%d0 /* configure cache */ 50 - movec %d0,%CACR /* enable cache */ 51 - nop 52 - .endm 53 - #endif /* CONFIG_M523x || CONFIG_M527x */ 54 - 55 - #if defined(CONFIG_M528x) 56 - .macro CACHE_ENABLE 57 - nop 58 - movel #0x01000000, %d0 59 - movec %d0, %CACR /* Invalidate cache */ 60 - nop 61 - movel #0x0000c020, %d0 /* Set SDRAM cached only */ 62 - movec %d0, %ACR0 63 - movel #0x00000000, %d0 /* No other regions cached */ 64 - movec %d0, %ACR1 65 - movel #0x80000200, %d0 /* Setup cache mask */ 66 - movec %d0, %CACR /* Enable cache */ 67 - nop 68 - .endm 69 - #endif /* CONFIG_M528x */ 70 - 71 - #if defined(CONFIG_M5249) || defined(CONFIG_M5307) 72 - /* 73 - * The version 3 core cache. Oddly enough the version 2 core 5249 74 - * has the same SDRAM and cache setup as the version 3 cores. 75 - * This is a single unified instruction/data cache. 76 - */ 77 - .macro CACHE_ENABLE 78 - movel #0x01000000,%d0 /* invalidate whole cache */ 79 - movec %d0,%CACR 80 - nop 81 - #if defined(DEBUGGER_COMPATIBLE_CACHE) || defined(CONFIG_SECUREEDGEMP3) 82 - movel #0x0000c000,%d0 /* set SDRAM cached (write-thru) */ 83 - #else 84 - movel #0x0000c020,%d0 /* set SDRAM cached (copyback) */ 85 - #endif 86 - movec %d0,%ACR0 87 - movel #0x00000000,%d0 /* no other regions cached */ 88 - movec %d0,%ACR1 89 - movel #0xa0000200,%d0 /* enable cache */ 90 - movec %d0,%CACR 91 - nop 92 - .endm 93 - #endif /* CONFIG_M5249 || CONFIG_M5307 */ 94 - 95 - #if defined(CONFIG_M532x) 96 - .macro CACHE_ENABLE 97 - movel #0x01000000,%d0 /* invalidate cache cmd */ 98 - movec %d0,%CACR /* do invalidate cache */ 99 - nop 100 - movel #0x4001C000,%d0 /* set SDRAM cached (write-thru) */ 101 - movec %d0,%ACR0 102 - movel #0x00000000,%d0 /* no other regions cached */ 103 - movec %d0,%ACR1 104 - movel #0x80000210,%d0 /* setup cache mask */ 105 - movec %d0,%CACR /* enable cache */ 106 - nop 107 - .endm 108 - #endif /* CONFIG_M532x */ 109 - 110 - #if defined(CONFIG_M5407) || defined(CONFIG_M54xx) 111 - 112 - .macro CACHE_ENABLE 113 - /* invalidate whole cache */ 114 - movel #(CACR_DCINVA+CACR_BCINVA+CACR_ICINVA),%d0 115 - movec %d0,%CACR 116 - nop 117 - /* addresses range for data cache : 0x00000000-0x0fffffff */ 118 - movel #(0x000f0000+DATA_CACHE_MODE),%d0 /* set SDRAM cached */ 119 - movec %d0, %ACR0 120 - movel #0x00000000,%d0 /* no other regions cached */ 121 - movec %d0, %ACR1 122 - /* addresses range for instruction cache : 0x00000000-0x0fffffff */ 123 - movel #(0x000f0000+INSN_CACHE_MODE),%d0 /* set SDRAM cached */ 124 - movec %d0, %ACR2 125 - movel #0x00000000,%d0 /* no other regions cached */ 126 - movec %d0, %ACR3 127 - /* enable caches */ 128 - movel #(CACHE_MODE),%d0 129 - movec %d0,%CACR 130 - nop 131 - .endm 132 - #endif /* CONFIG_M5407 || CONFIG_M54xx */ 133 - 134 - #if defined(CONFIG_M520x) 135 - .macro CACHE_ENABLE 136 - move.l #0x01000000,%d0 /* invalidate whole cache */ 137 - movec %d0,%CACR 138 - nop 139 - move.l #0x0000c000,%d0 /* set SDRAM cached (write-thru) */ 140 - movec %d0,%ACR0 141 - move.l #0x00000000,%d0 /* no other regions cached */ 142 - movec %d0,%ACR1 143 - move.l #0x80400010,%d0 /* enable 8K instruction cache */ 144 - movec %d0,%CACR 145 - nop 146 - .endm 147 - #endif /* CONFIG_M520x */ 148 - 149 - /****************************************************************************/ 150 - #endif /* __M68KNOMMU_MCFCACHE_H */
···
+21 -5
arch/m68knommu/platform/coldfire/head.S
··· 3 /* 4 * head.S -- common startup code for ColdFire CPUs. 5 * 6 - * (C) Copyright 1999-2006, Greg Ungerer <gerg@snapgear.com>. 7 */ 8 9 /*****************************************************************************/ ··· 13 #include <linux/init.h> 14 #include <asm/asm-offsets.h> 15 #include <asm/coldfire.h> 16 - #include <asm/mcfcache.h> 17 #include <asm/mcfsim.h> 18 #include <asm/thread_info.h> 19 ··· 172 173 /* 174 * Now that we know what the memory is, lets enable cache 175 - * and get things moving. This is Coldfire CPU specific. 176 */ 177 - CACHE_ENABLE /* enable CPU cache */ 178 - 179 180 #ifdef CONFIG_ROMFS_FS 181 /*
··· 3 /* 4 * head.S -- common startup code for ColdFire CPUs. 5 * 6 + * (C) Copyright 1999-2010, Greg Ungerer <gerg@snapgear.com>. 7 */ 8 9 /*****************************************************************************/ ··· 13 #include <linux/init.h> 14 #include <asm/asm-offsets.h> 15 #include <asm/coldfire.h> 16 #include <asm/mcfsim.h> 17 #include <asm/thread_info.h> 18 ··· 173 174 /* 175 * Now that we know what the memory is, lets enable cache 176 + * and get things moving. This is Coldfire CPU specific. Not 177 + * all version cores have identical cache register setup. But 178 + * it is very similar. Define the exact settings in the headers 179 + * then the code here is the same for all. 180 */ 181 + movel #CACHE_INIT,%d0 /* invalidate whole cache */ 182 + movec %d0,%CACR 183 + nop 184 + movel #ACR0_MODE,%d0 /* set RAM region for caching */ 185 + movec %d0,%ACR0 186 + movel #ACR1_MODE,%d0 /* anything else to cache? */ 187 + movec %d0,%ACR1 188 + #ifdef ACR2_MODE 189 + movel #ACR2_MODE,%d0 190 + movec %d0,%ACR2 191 + movel #ACR3_MODE,%d0 192 + movec %d0,%ACR3 193 + #endif 194 + movel #CACHE_MODE,%d0 /* enable cache */ 195 + movec %d0,%CACR 196 + nop 197 198 #ifdef CONFIG_ROMFS_FS 199 /*