diff options
| author | David Woodhouse <David.Woodhouse@intel.com> | 2012-12-03 11:25:40 -0500 |
|---|---|---|
| committer | David Woodhouse <David.Woodhouse@intel.com> | 2012-12-05 20:22:31 -0500 |
| commit | cf66bb93e0f75e0a4ba1ec070692618fa028e994 (patch) | |
| tree | 0ae48658adb29f50bdd85a94cbb84670a234f441 /include/uapi/linux | |
| parent | 27d7c2a006a81c04fab00b8cd81b99af3b32738d (diff) | |
byteorder: allow arch to opt to use GCC intrinsics for byteswapping
Since GCC 4.4, there have been __builtin_bswap32() and __builtin_bswap16()
intrinsics. A __builtin_bswap16() came a little later (4.6 for PowerPC,
48 for other platforms).
By using these instead of the inline assembler that most architectures
have in their __arch_swabXX() macros, we let the compiler see what's
actually happening. The resulting code should be at least as good, and
much *better* in the cases where it can be combined with a nearby load
or store, using a load-and-byteswap or store-and-byteswap instruction
(e.g. lwbrx/stwbrx on PowerPC, movbe on Atom).
When GCC is sufficiently recent *and* the architecture opts in to using
the intrinsics by setting CONFIG_ARCH_USE_BUILTIN_BSWAP, they will be
used in preference to the __arch_swabXX() macros. An architecture which
does not set ARCH_USE_BUILTIN_BSWAP will continue to use its own
hand-crafted macros.
Signed-off-by: David Woodhouse <David.Woodhouse@intel.com>
Acked-by: H. Peter Anvin <hpa@linux.intel.com>
Diffstat (limited to 'include/uapi/linux')
| -rw-r--r-- | include/uapi/linux/swab.h | 12 |
1 files changed, 9 insertions, 3 deletions
diff --git a/include/uapi/linux/swab.h b/include/uapi/linux/swab.h index e811474724c2..0e011eb91b5d 100644 --- a/include/uapi/linux/swab.h +++ b/include/uapi/linux/swab.h | |||
| @@ -45,7 +45,9 @@ | |||
| 45 | 45 | ||
| 46 | static inline __attribute_const__ __u16 __fswab16(__u16 val) | 46 | static inline __attribute_const__ __u16 __fswab16(__u16 val) |
| 47 | { | 47 | { |
| 48 | #ifdef __arch_swab16 | 48 | #ifdef __HAVE_BUILTIN_BSWAP16__ |
| 49 | return __builtin_bswap16(val); | ||
| 50 | #elif defined (__arch_swab16) | ||
| 49 | return __arch_swab16(val); | 51 | return __arch_swab16(val); |
| 50 | #else | 52 | #else |
| 51 | return ___constant_swab16(val); | 53 | return ___constant_swab16(val); |
| @@ -54,7 +56,9 @@ static inline __attribute_const__ __u16 __fswab16(__u16 val) | |||
| 54 | 56 | ||
| 55 | static inline __attribute_const__ __u32 __fswab32(__u32 val) | 57 | static inline __attribute_const__ __u32 __fswab32(__u32 val) |
| 56 | { | 58 | { |
| 57 | #ifdef __arch_swab32 | 59 | #ifdef __HAVE_BUILTIN_BSWAP32__ |
| 60 | return __builtin_bswap32(val); | ||
| 61 | #elif defined(__arch_swab32) | ||
| 58 | return __arch_swab32(val); | 62 | return __arch_swab32(val); |
| 59 | #else | 63 | #else |
| 60 | return ___constant_swab32(val); | 64 | return ___constant_swab32(val); |
| @@ -63,7 +67,9 @@ static inline __attribute_const__ __u32 __fswab32(__u32 val) | |||
| 63 | 67 | ||
| 64 | static inline __attribute_const__ __u64 __fswab64(__u64 val) | 68 | static inline __attribute_const__ __u64 __fswab64(__u64 val) |
| 65 | { | 69 | { |
| 66 | #ifdef __arch_swab64 | 70 | #ifdef __HAVE_BUILTIN_BSWAP64__ |
| 71 | return __builtin_bswap64(val); | ||
| 72 | #elif defined (__arch_swab64) | ||
| 67 | return __arch_swab64(val); | 73 | return __arch_swab64(val); |
| 68 | #elif defined(__SWAB_64_THRU_32__) | 74 | #elif defined(__SWAB_64_THRU_32__) |
| 69 | __u32 h = val >> 32; | 75 | __u32 h = val >> 32; |
