at v2.6.16-rc2 18 lines 470 B view raw
1#ifndef __UM_CACHE_H 2#define __UM_CACHE_H 3 4#include <linux/config.h> 5 6#if defined(CONFIG_UML_X86) && !defined(CONFIG_64BIT) 7# define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT) 8#elif defined(CONFIG_UML_X86) /* 64-bit */ 9# define L1_CACHE_SHIFT 6 /* Should be 7 on Intel */ 10#else 11/* XXX: this was taken from x86, now it's completely random. Luckily only 12 * affects SMP padding. */ 13# define L1_CACHE_SHIFT 5 14#endif 15 16#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) 17 18#endif