at v2.6.24 222 lines 6.4 kB view raw
1#ifndef _LINUX_BYTEORDER_SWAB_H 2#define _LINUX_BYTEORDER_SWAB_H 3 4/* 5 * linux/byteorder/swab.h 6 * Byte-swapping, independently from CPU endianness 7 * swabXX[ps]?(foo) 8 * 9 * Francois-Rene Rideau <fare@tunes.org> 19971205 10 * separated swab functions from cpu_to_XX, 11 * to clean up support for bizarre-endian architectures. 12 * 13 * Trent Piepho <xyzzy@speakeasy.org> 2007114 14 * make constant-folding work, provide C versions that 15 * gcc can optimize better, explain different versions 16 * 17 * See asm-i386/byteorder.h and suches for examples of how to provide 18 * architecture-dependent optimized versions 19 * 20 */ 21 22#include <linux/compiler.h> 23 24/* Functions/macros defined, there are a lot: 25 * 26 * ___swabXX 27 * Generic C versions of the swab functions. 28 * 29 * ___constant_swabXX 30 * C versions that gcc can fold into a compile-time constant when 31 * the argument is a compile-time constant. 32 * 33 * __arch__swabXX[sp]? 34 * Architecture optimized versions of all the swab functions 35 * (including the s and p versions). These can be defined in 36 * asm-arch/byteorder.h. Any which are not, are defined here. 37 * __arch__swabXXs() is defined in terms of __arch__swabXXp(), which 38 * is defined in terms of __arch__swabXX(), which is in turn defined 39 * in terms of ___swabXX(x). 40 * These must be macros. They may be unsafe for arguments with 41 * side-effects. 42 * 43 * __fswabXX 44 * Inline function versions of the __arch__ macros. These _are_ safe 45 * if the arguments have side-effects. Note there are no s and p 46 * versions of these. 47 * 48 * __swabXX[sb] 49 * There are the ones you should actually use. The __swabXX versions 50 * will be a constant given a constant argument and use the arch 51 * specific code (if any) for non-constant arguments. The s and p 52 * versions always use the arch specific code (constant folding 53 * doesn't apply). They are safe to use with arguments with 54 * side-effects. 55 * 56 * swabXX[sb] 57 * Nicknames for __swabXX[sb] to use in the kernel. 58 */ 59 60/* casts are necessary for constants, because we never know how for sure 61 * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way. 62 */ 63 64static __inline__ __attribute_const__ __u16 ___swab16(__u16 x) 65{ 66 return x<<8 | x>>8; 67} 68static __inline__ __attribute_const__ __u32 ___swab32(__u32 x) 69{ 70 return x<<24 | x>>24 | 71 (x & (__u32)0x0000ff00UL)<<8 | 72 (x & (__u32)0x00ff0000UL)>>8; 73} 74static __inline__ __attribute_const__ __u64 ___swab64(__u64 x) 75{ 76 return x<<56 | x>>56 | 77 (x & (__u64)0x000000000000ff00ULL)<<40 | 78 (x & (__u64)0x0000000000ff0000ULL)<<24 | 79 (x & (__u64)0x00000000ff000000ULL)<< 8 | 80 (x & (__u64)0x000000ff00000000ULL)>> 8 | 81 (x & (__u64)0x0000ff0000000000ULL)>>24 | 82 (x & (__u64)0x00ff000000000000ULL)>>40; 83} 84 85#define ___constant_swab16(x) \ 86 ((__u16)( \ 87 (((__u16)(x) & (__u16)0x00ffU) << 8) | \ 88 (((__u16)(x) & (__u16)0xff00U) >> 8) )) 89#define ___constant_swab32(x) \ 90 ((__u32)( \ 91 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \ 92 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \ 93 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \ 94 (((__u32)(x) & (__u32)0xff000000UL) >> 24) )) 95#define ___constant_swab64(x) \ 96 ((__u64)( \ 97 (__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \ 98 (__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \ 99 (__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \ 100 (__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) << 8) | \ 101 (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >> 8) | \ 102 (__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \ 103 (__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \ 104 (__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) )) 105 106/* 107 * provide defaults when no architecture-specific optimization is detected 108 */ 109#ifndef __arch__swab16 110# define __arch__swab16(x) ___swab16(x) 111#endif 112#ifndef __arch__swab32 113# define __arch__swab32(x) ___swab32(x) 114#endif 115#ifndef __arch__swab64 116# define __arch__swab64(x) ___swab64(x) 117#endif 118 119#ifndef __arch__swab16p 120# define __arch__swab16p(x) __arch__swab16(*(x)) 121#endif 122#ifndef __arch__swab32p 123# define __arch__swab32p(x) __arch__swab32(*(x)) 124#endif 125#ifndef __arch__swab64p 126# define __arch__swab64p(x) __arch__swab64(*(x)) 127#endif 128 129#ifndef __arch__swab16s 130# define __arch__swab16s(x) ((void)(*(x) = __arch__swab16p(x))) 131#endif 132#ifndef __arch__swab32s 133# define __arch__swab32s(x) ((void)(*(x) = __arch__swab32p(x))) 134#endif 135#ifndef __arch__swab64s 136# define __arch__swab64s(x) ((void)(*(x) = __arch__swab64p(x))) 137#endif 138 139 140/* 141 * Allow constant folding 142 */ 143#if defined(__GNUC__) && defined(__OPTIMIZE__) 144# define __swab16(x) \ 145(__builtin_constant_p((__u16)(x)) ? \ 146 ___constant_swab16((x)) : \ 147 __fswab16((x))) 148# define __swab32(x) \ 149(__builtin_constant_p((__u32)(x)) ? \ 150 ___constant_swab32((x)) : \ 151 __fswab32((x))) 152# define __swab64(x) \ 153(__builtin_constant_p((__u64)(x)) ? \ 154 ___constant_swab64((x)) : \ 155 __fswab64((x))) 156#else 157# define __swab16(x) __fswab16(x) 158# define __swab32(x) __fswab32(x) 159# define __swab64(x) __fswab64(x) 160#endif /* OPTIMIZE */ 161 162 163static __inline__ __attribute_const__ __u16 __fswab16(__u16 x) 164{ 165 return __arch__swab16(x); 166} 167static __inline__ __u16 __swab16p(const __u16 *x) 168{ 169 return __arch__swab16p(x); 170} 171static __inline__ void __swab16s(__u16 *addr) 172{ 173 __arch__swab16s(addr); 174} 175 176static __inline__ __attribute_const__ __u32 __fswab32(__u32 x) 177{ 178 return __arch__swab32(x); 179} 180static __inline__ __u32 __swab32p(const __u32 *x) 181{ 182 return __arch__swab32p(x); 183} 184static __inline__ void __swab32s(__u32 *addr) 185{ 186 __arch__swab32s(addr); 187} 188 189#ifdef __BYTEORDER_HAS_U64__ 190static __inline__ __attribute_const__ __u64 __fswab64(__u64 x) 191{ 192# ifdef __SWAB_64_THRU_32__ 193 __u32 h = x >> 32; 194 __u32 l = x & ((1ULL<<32)-1); 195 return (((__u64)__swab32(l)) << 32) | ((__u64)(__swab32(h))); 196# else 197 return __arch__swab64(x); 198# endif 199} 200static __inline__ __u64 __swab64p(const __u64 *x) 201{ 202 return __arch__swab64p(x); 203} 204static __inline__ void __swab64s(__u64 *addr) 205{ 206 __arch__swab64s(addr); 207} 208#endif /* __BYTEORDER_HAS_U64__ */ 209 210#if defined(__KERNEL__) 211#define swab16 __swab16 212#define swab32 __swab32 213#define swab64 __swab64 214#define swab16p __swab16p 215#define swab32p __swab32p 216#define swab64p __swab64p 217#define swab16s __swab16s 218#define swab32s __swab32s 219#define swab64s __swab64s 220#endif 221 222#endif /* _LINUX_BYTEORDER_SWAB_H */