[SPARC]: Merge asm-sparc{,64}/cache.h

Signed-off-by: David S. Miller <davem@davemloft.net>

authored by David S. Miller and committed by David S. Miller d113fcd9 f610bbc6

+20 -24
+4
arch/sparc/kernel/vmlinux.lds.S
··· 89 89 .data.cacheline_aligned : { 90 90 *(.data.cacheline_aligned) 91 91 } 92 + . = ALIGN(32); 93 + .data.read_mostly : { 94 + *(.data.read_mostly) 95 + } 92 96 93 97 __bss_start = .; 94 98 .sbss : {
+15 -6
include/asm-sparc/cache.h
··· 1 - /* $Id: cache.h,v 1.9 1999/08/14 03:51:58 anton Exp $ 2 - * cache.h: Cache specific code for the Sparc. These include flushing 1 + /* cache.h: Cache specific code for the Sparc. These include flushing 3 2 * and direct tag/data line access. 4 3 * 5 - * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu) 4 + * Copyright (C) 1995, 2007 David S. Miller (davem@davemloft.net) 6 5 */ 7 6 8 7 #ifndef _SPARC_CACHE_H 9 8 #define _SPARC_CACHE_H 10 9 11 - #include <asm/asi.h> 12 - 13 10 #define L1_CACHE_SHIFT 5 14 11 #define L1_CACHE_BYTES 32 15 12 #define L1_CACHE_ALIGN(x) ((((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))) 16 13 17 - #define SMP_CACHE_BYTES 32 14 + #ifdef CONFIG_SPARC32 15 + #define SMP_CACHE_BYTES_SHIFT 5 16 + #else 17 + #define SMP_CACHE_BYTES_SHIFT 6 18 + #endif 19 + 20 + #define SMP_CACHE_BYTES (1 << SMP_CACHE_BYTES_SHIFT) 21 + 22 + #define __read_mostly __attribute__((__section__(".data.read_mostly"))) 23 + 24 + #ifdef CONFIG_SPARC32 25 + #include <asm/asi.h> 18 26 19 27 /* Direct access to the instruction cache is provided through and 20 28 * alternate address space. The IDC bit must be off in the ICCR on ··· 133 125 "r" (addr), "i" (ASI_M_FLUSH_USER) : 134 126 "memory"); 135 127 } 128 + #endif /* CONFIG_SPARC32 */ 136 129 137 130 #endif /* !(_SPARC_CACHE_H) */
+1 -18
include/asm-sparc64/cache.h
··· 1 - /* 2 - * include/asm-sparc64/cache.h 3 - */ 4 - #ifndef __ARCH_SPARC64_CACHE_H 5 - #define __ARCH_SPARC64_CACHE_H 6 - 7 - /* bytes per L1 cache line */ 8 - #define L1_CACHE_SHIFT 5 9 - #define L1_CACHE_BYTES 32 /* Two 16-byte sub-blocks per line. */ 10 - 11 - #define L1_CACHE_ALIGN(x) (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1)) 12 - 13 - #define SMP_CACHE_BYTES_SHIFT 6 14 - #define SMP_CACHE_BYTES (1 << SMP_CACHE_BYTES_SHIFT) /* L2 cache line size. */ 15 - 16 - #define __read_mostly __attribute__((__section__(".data.read_mostly"))) 17 - 18 - #endif 1 + #include <asm-sparc/cache.h>