Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

ARM: 6466/1: implement flush_icache_all for the rest of the CPUs

Commit 81d11955bf0 ("ARM: 6405/1: Handle __flush_icache_all for
CONFIG_SMP_ON_UP") added a new function to struct cpu_cache_fns:
flush_icache_all(). It also implemented this for v6 and v7 but not
for v5 and backwards. Without the function pointer in place, we
will be calling wrong cache functions.

For example with ep93xx we get following:

Unable to handle kernel paging request at virtual address ee070f38
pgd = c0004000
[ee070f38] *pgd=00000000
Internal error: Oops: 80000005 [#1] PREEMPT
last sysfs file:
Modules linked in:
CPU: 0 Not tainted (2.6.36+ #1)
PC is at 0xee070f38
LR is at __dma_alloc+0x11c/0x2d0
pc : [<ee070f38>] lr : [<c0032c8c>] psr: 60000013
sp : c581bde0 ip : 00000000 fp : c0472000
r10: c0472000 r9 : 000000d0 r8 : 00020000
r7 : 0001ffff r6 : 00000000 r5 : c0472400 r4 : c5980000
r3 : c03ab7e0 r2 : 00000000 r1 : c59a0000 r0 : c5980000
Flags: nZCv IRQs on FIQs on Mode SVC_32 ISA ARM Segment kernel
Control: c000717f Table: c0004000 DAC: 00000017
Process swapper (pid: 1, stack limit = 0xc581a270)
[<c0032c8c>] (__dma_alloc+0x11c/0x2d0)
[<c0032e5c>] (dma_alloc_writecombine+0x1c/0x24)
[<c0204148>] (ep93xx_pcm_preallocate_dma_buffer+0x44/0x60)
[<c02041c0>] (ep93xx_pcm_new+0x5c/0x88)
[<c01ff188>] (snd_soc_instantiate_cards+0x8a8/0xbc0)
[<c01ff59c>] (soc_probe+0xfc/0x134)
[<c01adafc>] (platform_drv_probe+0x18/0x1c)
[<c01acca4>] (driver_probe_device+0xb0/0x16c)
[<c01ac284>] (bus_for_each_drv+0x48/0x84)
[<c01ace90>] (device_attach+0x50/0x68)
[<c01ac0f8>] (bus_probe_device+0x24/0x44)
[<c01aad7c>] (device_add+0x2fc/0x44c)
[<c01adfa8>] (platform_device_add+0x104/0x15c)
[<c0015eb8>] (simone_init+0x60/0x94)
[<c0021410>] (do_one_initcall+0xd0/0x1a4)

__dma_alloc() calls (inlined) __dma_alloc_buffer() which ends up
calling dmac_flush_range(). Now since the entries in the
arm920_cache_fns are shifted by one, we jump into address 0xee070f38
which is actually next instruction after the arm920_cache_fns
structure.

So implement flush_icache_all() for the rest of the supported CPUs
using a generic 'invalidate I cache' instruction.

Signed-off-by: Mika Westerberg <mika.westerberg@iki.fi>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>

authored by

Mika Westerberg and committed by
Russell King
c8c90860 4e54d93d

+225
+12
arch/arm/mm/cache-fa.S
··· 38 38 #define CACHE_DLIMIT (CACHE_DSIZE * 2) 39 39 40 40 /* 41 + * flush_icache_all() 42 + * 43 + * Unconditionally clean and invalidate the entire icache. 44 + */ 45 + ENTRY(fa_flush_icache_all) 46 + mov r0, #0 47 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 48 + mov pc, lr 49 + ENDPROC(fa_flush_icache_all) 50 + 51 + /* 41 52 * flush_user_cache_all() 42 53 * 43 54 * Clean and invalidate all cache entries in a particular address ··· 244 233 245 234 .type fa_cache_fns, #object 246 235 ENTRY(fa_cache_fns) 236 + .long fa_flush_icache_all 247 237 .long fa_flush_kern_cache_all 248 238 .long fa_flush_user_cache_all 249 239 .long fa_flush_user_cache_range
+10
arch/arm/mm/cache-v3.S
··· 13 13 #include "proc-macros.S" 14 14 15 15 /* 16 + * flush_icache_all() 17 + * 18 + * Unconditionally clean and invalidate the entire icache. 19 + */ 20 + ENTRY(v3_flush_icache_all) 21 + mov pc, lr 22 + ENDPROC(v3_flush_icache_all) 23 + 24 + /* 16 25 * flush_user_cache_all() 17 26 * 18 27 * Invalidate all cache entries in a particular address ··· 131 122 132 123 .type v3_cache_fns, #object 133 124 ENTRY(v3_cache_fns) 125 + .long v3_flush_icache_all 134 126 .long v3_flush_kern_cache_all 135 127 .long v3_flush_user_cache_all 136 128 .long v3_flush_user_cache_range
+10
arch/arm/mm/cache-v4.S
··· 13 13 #include "proc-macros.S" 14 14 15 15 /* 16 + * flush_icache_all() 17 + * 18 + * Unconditionally clean and invalidate the entire icache. 19 + */ 20 + ENTRY(v4_flush_icache_all) 21 + mov pc, lr 22 + ENDPROC(v4_flush_icache_all) 23 + 24 + /* 16 25 * flush_user_cache_all() 17 26 * 18 27 * Invalidate all cache entries in a particular address ··· 143 134 144 135 .type v4_cache_fns, #object 145 136 ENTRY(v4_cache_fns) 137 + .long v4_flush_icache_all 146 138 .long v4_flush_kern_cache_all 147 139 .long v4_flush_user_cache_all 148 140 .long v4_flush_user_cache_range
+12
arch/arm/mm/cache-v4wb.S
··· 51 51 .text 52 52 53 53 /* 54 + * flush_icache_all() 55 + * 56 + * Unconditionally clean and invalidate the entire icache. 57 + */ 58 + ENTRY(v4wb_flush_icache_all) 59 + mov r0, #0 60 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 61 + mov pc, lr 62 + ENDPROC(v4wb_flush_icache_all) 63 + 64 + /* 54 65 * flush_user_cache_all() 55 66 * 56 67 * Clean and invalidate all cache entries in a particular address ··· 255 244 256 245 .type v4wb_cache_fns, #object 257 246 ENTRY(v4wb_cache_fns) 247 + .long v4wb_flush_icache_all 258 248 .long v4wb_flush_kern_cache_all 259 249 .long v4wb_flush_user_cache_all 260 250 .long v4wb_flush_user_cache_range
+12
arch/arm/mm/cache-v4wt.S
··· 41 41 #define CACHE_DLIMIT 16384 42 42 43 43 /* 44 + * flush_icache_all() 45 + * 46 + * Unconditionally clean and invalidate the entire icache. 47 + */ 48 + ENTRY(v4wt_flush_icache_all) 49 + mov r0, #0 50 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 51 + mov pc, lr 52 + ENDPROC(v4wt_flush_icache_all) 53 + 54 + /* 44 55 * flush_user_cache_all() 45 56 * 46 57 * Invalidate all cache entries in a particular address ··· 199 188 200 189 .type v4wt_cache_fns, #object 201 190 ENTRY(v4wt_cache_fns) 191 + .long v4wt_flush_icache_all 202 192 .long v4wt_flush_kern_cache_all 203 193 .long v4wt_flush_user_cache_all 204 194 .long v4wt_flush_user_cache_range
+15
arch/arm/mm/proc-arm1020.S
··· 119 119 /* ================================= CACHE ================================ */ 120 120 121 121 .align 5 122 + 123 + /* 124 + * flush_icache_all() 125 + * 126 + * Unconditionally clean and invalidate the entire icache. 127 + */ 128 + ENTRY(arm1020_flush_icache_all) 129 + #ifndef CONFIG_CPU_ICACHE_DISABLE 130 + mov r0, #0 131 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 132 + #endif 133 + mov pc, lr 134 + ENDPROC(arm1020_flush_icache_all) 135 + 122 136 /* 123 137 * flush_user_cache_all() 124 138 * ··· 365 351 ENDPROC(arm1020_dma_unmap_area) 366 352 367 353 ENTRY(arm1020_cache_fns) 354 + .long arm1020_flush_icache_all 368 355 .long arm1020_flush_kern_cache_all 369 356 .long arm1020_flush_user_cache_all 370 357 .long arm1020_flush_user_cache_range
+15
arch/arm/mm/proc-arm1020e.S
··· 119 119 /* ================================= CACHE ================================ */ 120 120 121 121 .align 5 122 + 123 + /* 124 + * flush_icache_all() 125 + * 126 + * Unconditionally clean and invalidate the entire icache. 127 + */ 128 + ENTRY(arm1020e_flush_icache_all) 129 + #ifndef CONFIG_CPU_ICACHE_DISABLE 130 + mov r0, #0 131 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 132 + #endif 133 + mov pc, lr 134 + ENDPROC(arm1020e_flush_icache_all) 135 + 122 136 /* 123 137 * flush_user_cache_all() 124 138 * ··· 351 337 ENDPROC(arm1020e_dma_unmap_area) 352 338 353 339 ENTRY(arm1020e_cache_fns) 340 + .long arm1020e_flush_icache_all 354 341 .long arm1020e_flush_kern_cache_all 355 342 .long arm1020e_flush_user_cache_all 356 343 .long arm1020e_flush_user_cache_range
+15
arch/arm/mm/proc-arm1022.S
··· 108 108 /* ================================= CACHE ================================ */ 109 109 110 110 .align 5 111 + 112 + /* 113 + * flush_icache_all() 114 + * 115 + * Unconditionally clean and invalidate the entire icache. 116 + */ 117 + ENTRY(arm1022_flush_icache_all) 118 + #ifndef CONFIG_CPU_ICACHE_DISABLE 119 + mov r0, #0 120 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 121 + #endif 122 + mov pc, lr 123 + ENDPROC(arm1022_flush_icache_all) 124 + 111 125 /* 112 126 * flush_user_cache_all() 113 127 * ··· 340 326 ENDPROC(arm1022_dma_unmap_area) 341 327 342 328 ENTRY(arm1022_cache_fns) 329 + .long arm1022_flush_icache_all 343 330 .long arm1022_flush_kern_cache_all 344 331 .long arm1022_flush_user_cache_all 345 332 .long arm1022_flush_user_cache_range
+15
arch/arm/mm/proc-arm1026.S
··· 108 108 /* ================================= CACHE ================================ */ 109 109 110 110 .align 5 111 + 112 + /* 113 + * flush_icache_all() 114 + * 115 + * Unconditionally clean and invalidate the entire icache. 116 + */ 117 + ENTRY(arm1026_flush_icache_all) 118 + #ifndef CONFIG_CPU_ICACHE_DISABLE 119 + mov r0, #0 120 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 121 + #endif 122 + mov pc, lr 123 + ENDPROC(arm1026_flush_icache_all) 124 + 111 125 /* 112 126 * flush_user_cache_all() 113 127 * ··· 334 320 ENDPROC(arm1026_dma_unmap_area) 335 321 336 322 ENTRY(arm1026_cache_fns) 323 + .long arm1026_flush_icache_all 337 324 .long arm1026_flush_kern_cache_all 338 325 .long arm1026_flush_user_cache_all 339 326 .long arm1026_flush_user_cache_range
+12
arch/arm/mm/proc-arm920.S
··· 110 110 #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 111 111 112 112 /* 113 + * flush_icache_all() 114 + * 115 + * Unconditionally clean and invalidate the entire icache. 116 + */ 117 + ENTRY(arm920_flush_icache_all) 118 + mov r0, #0 119 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 120 + mov pc, lr 121 + ENDPROC(arm920_flush_icache_all) 122 + 123 + /* 113 124 * flush_user_cache_all() 114 125 * 115 126 * Invalidate all cache entries in a particular address ··· 316 305 ENDPROC(arm920_dma_unmap_area) 317 306 318 307 ENTRY(arm920_cache_fns) 308 + .long arm920_flush_icache_all 319 309 .long arm920_flush_kern_cache_all 320 310 .long arm920_flush_user_cache_all 321 311 .long arm920_flush_user_cache_range
+12
arch/arm/mm/proc-arm922.S
··· 112 112 #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 113 113 114 114 /* 115 + * flush_icache_all() 116 + * 117 + * Unconditionally clean and invalidate the entire icache. 118 + */ 119 + ENTRY(arm922_flush_icache_all) 120 + mov r0, #0 121 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 122 + mov pc, lr 123 + ENDPROC(arm922_flush_icache_all) 124 + 125 + /* 115 126 * flush_user_cache_all() 116 127 * 117 128 * Clean and invalidate all cache entries in a particular ··· 318 307 ENDPROC(arm922_dma_unmap_area) 319 308 320 309 ENTRY(arm922_cache_fns) 310 + .long arm922_flush_icache_all 321 311 .long arm922_flush_kern_cache_all 322 312 .long arm922_flush_user_cache_all 323 313 .long arm922_flush_user_cache_range
+12
arch/arm/mm/proc-arm925.S
··· 145 145 mov pc, lr 146 146 147 147 /* 148 + * flush_icache_all() 149 + * 150 + * Unconditionally clean and invalidate the entire icache. 151 + */ 152 + ENTRY(arm925_flush_icache_all) 153 + mov r0, #0 154 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 155 + mov pc, lr 156 + ENDPROC(arm925_flush_icache_all) 157 + 158 + /* 148 159 * flush_user_cache_all() 149 160 * 150 161 * Clean and invalidate all cache entries in a particular ··· 373 362 ENDPROC(arm925_dma_unmap_area) 374 363 375 364 ENTRY(arm925_cache_fns) 365 + .long arm925_flush_icache_all 376 366 .long arm925_flush_kern_cache_all 377 367 .long arm925_flush_user_cache_all 378 368 .long arm925_flush_user_cache_range
+12
arch/arm/mm/proc-arm926.S
··· 111 111 mov pc, lr 112 112 113 113 /* 114 + * flush_icache_all() 115 + * 116 + * Unconditionally clean and invalidate the entire icache. 117 + */ 118 + ENTRY(arm926_flush_icache_all) 119 + mov r0, #0 120 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 121 + mov pc, lr 122 + ENDPROC(arm926_flush_icache_all) 123 + 124 + /* 114 125 * flush_user_cache_all() 115 126 * 116 127 * Clean and invalidate all cache entries in a particular ··· 336 325 ENDPROC(arm926_dma_unmap_area) 337 326 338 327 ENTRY(arm926_cache_fns) 328 + .long arm926_flush_icache_all 339 329 .long arm926_flush_kern_cache_all 340 330 .long arm926_flush_user_cache_all 341 331 .long arm926_flush_user_cache_range
+12
arch/arm/mm/proc-arm940.S
··· 68 68 mov pc, lr 69 69 70 70 /* 71 + * flush_icache_all() 72 + * 73 + * Unconditionally clean and invalidate the entire icache. 74 + */ 75 + ENTRY(arm940_flush_icache_all) 76 + mov r0, #0 77 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 78 + mov pc, lr 79 + ENDPROC(arm940_flush_icache_all) 80 + 81 + /* 71 82 * flush_user_cache_all() 72 83 */ 73 84 ENTRY(arm940_flush_user_cache_all) ··· 265 254 ENDPROC(arm940_dma_unmap_area) 266 255 267 256 ENTRY(arm940_cache_fns) 257 + .long arm940_flush_icache_all 268 258 .long arm940_flush_kern_cache_all 269 259 .long arm940_flush_user_cache_all 270 260 .long arm940_flush_user_cache_range
+12
arch/arm/mm/proc-arm946.S
··· 75 75 mov pc, lr 76 76 77 77 /* 78 + * flush_icache_all() 79 + * 80 + * Unconditionally clean and invalidate the entire icache. 81 + */ 82 + ENTRY(arm946_flush_icache_all) 83 + mov r0, #0 84 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 85 + mov pc, lr 86 + ENDPROC(arm946_flush_icache_all) 87 + 88 + /* 78 89 * flush_user_cache_all() 79 90 */ 80 91 ENTRY(arm946_flush_user_cache_all) ··· 307 296 ENDPROC(arm946_dma_unmap_area) 308 297 309 298 ENTRY(arm946_cache_fns) 299 + .long arm946_flush_icache_all 310 300 .long arm946_flush_kern_cache_all 311 301 .long arm946_flush_user_cache_all 312 302 .long arm946_flush_user_cache_range
+13
arch/arm/mm/proc-feroceon.S
··· 124 124 mov pc, lr 125 125 126 126 /* 127 + * flush_icache_all() 128 + * 129 + * Unconditionally clean and invalidate the entire icache. 130 + */ 131 + ENTRY(feroceon_flush_icache_all) 132 + mov r0, #0 133 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 134 + mov pc, lr 135 + ENDPROC(feroceon_flush_icache_all) 136 + 137 + /* 127 138 * flush_user_cache_all() 128 139 * 129 140 * Clean and invalidate all cache entries in a particular ··· 412 401 ENDPROC(feroceon_dma_unmap_area) 413 402 414 403 ENTRY(feroceon_cache_fns) 404 + .long feroceon_flush_icache_all 415 405 .long feroceon_flush_kern_cache_all 416 406 .long feroceon_flush_user_cache_all 417 407 .long feroceon_flush_user_cache_range ··· 424 412 .long feroceon_dma_flush_range 425 413 426 414 ENTRY(feroceon_range_cache_fns) 415 + .long feroceon_flush_icache_all 427 416 .long feroceon_flush_kern_cache_all 428 417 .long feroceon_flush_user_cache_all 429 418 .long feroceon_flush_user_cache_range
+12
arch/arm/mm/proc-xsc3.S
··· 141 141 /* ================================= CACHE ================================ */ 142 142 143 143 /* 144 + * flush_icache_all() 145 + * 146 + * Unconditionally clean and invalidate the entire icache. 147 + */ 148 + ENTRY(xsc3_flush_icache_all) 149 + mov r0, #0 150 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 151 + mov pc, lr 152 + ENDPROC(xsc3_flush_icache_all) 153 + 154 + /* 144 155 * flush_user_cache_all() 145 156 * 146 157 * Invalidate all cache entries in a particular address ··· 336 325 ENDPROC(xsc3_dma_unmap_area) 337 326 338 327 ENTRY(xsc3_cache_fns) 328 + .long xsc3_flush_icache_all 339 329 .long xsc3_flush_kern_cache_all 340 330 .long xsc3_flush_user_cache_all 341 331 .long xsc3_flush_user_cache_range
+12
arch/arm/mm/proc-xscale.S
··· 181 181 /* ================================= CACHE ================================ */ 182 182 183 183 /* 184 + * flush_icache_all() 185 + * 186 + * Unconditionally clean and invalidate the entire icache. 187 + */ 188 + ENTRY(xscale_flush_icache_all) 189 + mov r0, #0 190 + mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 191 + mov pc, lr 192 + ENDPROC(xscale_flush_icache_all) 193 + 194 + /* 184 195 * flush_user_cache_all() 185 196 * 186 197 * Invalidate all cache entries in a particular address ··· 408 397 ENDPROC(xscale_dma_unmap_area) 409 398 410 399 ENTRY(xscale_cache_fns) 400 + .long xscale_flush_icache_all 411 401 .long xscale_flush_kern_cache_all 412 402 .long xscale_flush_user_cache_all 413 403 .long xscale_flush_user_cache_range