Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Blame | Last modification | View Log | RSS feed

  1. #ifndef _LINUX_BYTEORDER_SWAB_H
  2. #define _LINUX_BYTEORDER_SWAB_H
  3.  
  4. /*
  5.  * linux/byteorder/swab.h
  6.  * Byte-swapping, independently from CPU endianness
  7.  *      swabXX[ps]?(foo)
  8.  *
  9.  * Francois-Rene Rideau <fare@tunes.org> 19971205
  10.  *    separated swab functions from cpu_to_XX,
  11.  *    to clean up support for bizarre-endian architectures.
  12.  *
  13.  * See asm-i386/byteorder.h and suches for examples of how to provide
  14.  * architecture-dependent optimized versions
  15.  *
  16.  */
  17.  
  18. /* casts are necessary for constants, because we never know how for sure
  19.  * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
  20.  */
  21. #define ___swab16(x) \
  22. ({ \
  23.         __u16 __x = (x); \
  24.         ((__u16)( \
  25.                 (((__u16)(__x) & (__u16)0x00ffU) << 8) | \
  26.                 (((__u16)(__x) & (__u16)0xff00U) >> 8) )); \
  27. })
  28.  
  29. #define ___swab32(x) \
  30. ({ \
  31.         __u32 __x = (x); \
  32.         ((__u32)( \
  33.                 (((__u32)(__x) & (__u32)0x000000ffUL) << 24) | \
  34.                 (((__u32)(__x) & (__u32)0x0000ff00UL) <<  8) | \
  35.                 (((__u32)(__x) & (__u32)0x00ff0000UL) >>  8) | \
  36.                 (((__u32)(__x) & (__u32)0xff000000UL) >> 24) )); \
  37. })
  38.  
  39. #define ___swab64(x) \
  40. ({ \
  41.         __u64 __x = (x); \
  42.         ((__u64)( \
  43.                 (__u64)(((__u64)(__x) & (__u64)0x00000000000000ffULL) << 56) | \
  44.                 (__u64)(((__u64)(__x) & (__u64)0x000000000000ff00ULL) << 40) | \
  45.                 (__u64)(((__u64)(__x) & (__u64)0x0000000000ff0000ULL) << 24) | \
  46.                 (__u64)(((__u64)(__x) & (__u64)0x00000000ff000000ULL) <<  8) | \
  47.                 (__u64)(((__u64)(__x) & (__u64)0x000000ff00000000ULL) >>  8) | \
  48.                 (__u64)(((__u64)(__x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
  49.                 (__u64)(((__u64)(__x) & (__u64)0x00ff000000000000ULL) >> 40) | \
  50.                 (__u64)(((__u64)(__x) & (__u64)0xff00000000000000ULL) >> 56) )); \
  51. })
  52.  
  53. #define ___constant_swab16(x) \
  54.         ((__u16)( \
  55.                 (((__u16)(x) & (__u16)0x00ffU) << 8) | \
  56.                 (((__u16)(x) & (__u16)0xff00U) >> 8) ))
  57. #define ___constant_swab32(x) \
  58.         ((__u32)( \
  59.                 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
  60.                 (((__u32)(x) & (__u32)0x0000ff00UL) <<  8) | \
  61.                 (((__u32)(x) & (__u32)0x00ff0000UL) >>  8) | \
  62.                 (((__u32)(x) & (__u32)0xff000000UL) >> 24) ))
  63. #define ___constant_swab64(x) \
  64.         ((__u64)( \
  65.                 (__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \
  66.                 (__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \
  67.                 (__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \
  68.                 (__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) <<  8) | \
  69.                 (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >>  8) | \
  70.                 (__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
  71.                 (__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \
  72.                 (__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) ))
  73.  
  74. /*
  75.  * provide defaults when no architecture-specific optimization is detected
  76.  */
  77. #ifndef __arch__swab16
  78. #  define __arch__swab16(x) ({ __u16 __tmp = (x) ; ___swab16(__tmp); })
  79. #endif
  80. #ifndef __arch__swab32
  81. #  define __arch__swab32(x) ({ __u32 __tmp = (x) ; ___swab32(__tmp); })
  82. #endif
  83. #ifndef __arch__swab64
  84. #  define __arch__swab64(x) ({ __u64 __tmp = (x) ; ___swab64(__tmp); })
  85. #endif
  86.  
  87. #ifndef __arch__swab16p
  88. #  define __arch__swab16p(x) __arch__swab16(*(x))
  89. #endif
  90. #ifndef __arch__swab32p
  91. #  define __arch__swab32p(x) __arch__swab32(*(x))
  92. #endif
  93. #ifndef __arch__swab64p
  94. #  define __arch__swab64p(x) __arch__swab64(*(x))
  95. #endif
  96.  
  97. #ifndef __arch__swab16s
  98. #  define __arch__swab16s(x) do { *(x) = __arch__swab16p((x)); } while (0)
  99. #endif
  100. #ifndef __arch__swab32s
  101. #  define __arch__swab32s(x) do { *(x) = __arch__swab32p((x)); } while (0)
  102. #endif
  103. #ifndef __arch__swab64s
  104. #  define __arch__swab64s(x) do { *(x) = __arch__swab64p((x)); } while (0)
  105. #endif
  106.  
  107.  
  108. /*
  109.  * Allow constant folding
  110.  */
  111. #if defined(__GNUC__) && (__GNUC__ >= 2) && defined(__OPTIMIZE__)
  112. #  define __swab16(x) \
  113. (__builtin_constant_p((__u16)(x)) ? \
  114.  ___swab16((x)) : \
  115.  __fswab16((x)))
  116. #  define __swab32(x) \
  117. (__builtin_constant_p((__u32)(x)) ? \
  118.  ___swab32((x)) : \
  119.  __fswab32((x)))
  120. #  define __swab64(x) \
  121. (__builtin_constant_p((__u64)(x)) ? \
  122.  ___swab64((x)) : \
  123.  __fswab64((x)))
  124. #else
  125. #  define __swab16(x) __fswab16(x)
  126. #  define __swab32(x) __fswab32(x)
  127. #  define __swab64(x) __fswab64(x)
  128. #endif /* OPTIMIZE */
  129.  
  130.  
  131. static __inline__ __const__ __u16 __fswab16(__u16 x)
  132. {
  133.         return __arch__swab16(x);
  134. }
  135. static __inline__ __u16 __swab16p(__u16 *x)
  136. {
  137.         return __arch__swab16p(x);
  138. }
  139. static __inline__ void __swab16s(__u16 *addr)
  140. {
  141.         __arch__swab16s(addr);
  142. }
  143.  
  144. static __inline__ __const__ __u32 __fswab32(__u32 x)
  145. {
  146.         return __arch__swab32(x);
  147. }
  148. static __inline__ __u32 __swab32p(__u32 *x)
  149. {
  150.         return __arch__swab32p(x);
  151. }
  152. static __inline__ void __swab32s(__u32 *addr)
  153. {
  154.         __arch__swab32s(addr);
  155. }
  156.  
  157. #ifdef __BYTEORDER_HAS_U64__
  158. static __inline__ const __u64 __fswab64(__u64 x)
  159. {
  160. #  ifdef __SWAB_64_THRU_32__
  161.         __u32 h = x >> 32;
  162.         __u32 l = x & ((1ULL<<32)-1);
  163.         return (((__u64)__swab32(l)) << 32) | ((__u64)(__swab32(h)));
  164. #  else
  165.         return __arch__swab64(x);
  166. #  endif
  167. }
  168. static __inline__ __u64 __swab64p(__u64 *x)
  169. {
  170.         return __arch__swab64p(x);
  171. }
  172. static __inline__ void __swab64s(__u64 *addr)
  173. {
  174.         __arch__swab64s(addr);
  175. }
  176. #endif /* __BYTEORDER_HAS_U64__ */
  177.  
  178. #if defined(__KERNEL__)
  179. #define swab16 __swab16
  180. #define swab32 __swab32
  181. #define swab64 __swab64
  182. #define swab16p __swab16p
  183. #define swab32p __swab32p
  184. #define swab64p __swab64p
  185. #define swab16s __swab16s
  186. #define swab32s __swab32s
  187. #define swab64s __swab64s
  188. #endif
  189.  
  190. #endif /* _LINUX_BYTEORDER_SWAB_H */
  191.