aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHarvey Harrison <harvey.harrison@gmail.com>2009-01-06 17:56:21 -0500
committerLinus Torvalds <torvalds@linux-foundation.org>2009-01-06 21:10:26 -0500
commit991c0e6d1ae3df59f0ddfe05edecec8319e35a1b (patch)
tree3bc3d717b6d3e6a05d07b62738e0605851a7259a
parentc89a9f5a42811aa5b2f258e32750c0109f570fc1 (diff)
byteorder: only use linux/swab.h
The first step to make swab.h a regular header that will include an asm/swab.h with arch overrides. Avoid the gratuitous differences introduced in the new linux/swab.h by naming the ___constant_swabXX bits and __fswabXX bits exactly as found in the old implementation in byteorder/swab[b].h Use this new swab.h in byteorder/[big|little]_endian.h and remove the two old swab headers. Although the inclusion of asm/byteorder.h looks strange in linux/swab.h, this will allow each arch to move the actual arch overrides for the swab bits in an asm file and then the includes can be cleaned up without requiring a flag day for all arches at once. Keep providing __fswabXX in case some userspace was using them directly, but the revised __swabXX should be used instead in any new code and will always do constant folding not dependent on the optimization level, which means the __constant versions can be phased out in-kernel. Arches that use the old-style arch macros will lose their optimized versions until they move to the new style, but at least they will still compile. Many arches have already moved and the patches to move the remaining arches are trivial. Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r--include/linux/byteorder/Kbuild2
-rw-r--r--include/linux/byteorder/big_endian.h3
-rw-r--r--include/linux/byteorder/little_endian.h3
-rw-r--r--include/linux/byteorder/swab.h222
-rw-r--r--include/linux/byteorder/swabb.h135
-rw-r--r--include/linux/swab.h50
6 files changed, 27 insertions, 388 deletions
diff --git a/include/linux/byteorder/Kbuild b/include/linux/byteorder/Kbuild
index fbaa7f9cee32..38437225b092 100644
--- a/include/linux/byteorder/Kbuild
+++ b/include/linux/byteorder/Kbuild
@@ -1,4 +1,2 @@
1unifdef-y += big_endian.h 1unifdef-y += big_endian.h
2unifdef-y += little_endian.h 2unifdef-y += little_endian.h
3unifdef-y += swab.h
4unifdef-y += swabb.h
diff --git a/include/linux/byteorder/big_endian.h b/include/linux/byteorder/big_endian.h
index 1cba3f3efe5f..3c80fd7e8b56 100644
--- a/include/linux/byteorder/big_endian.h
+++ b/include/linux/byteorder/big_endian.h
@@ -9,8 +9,7 @@
9#endif 9#endif
10 10
11#include <linux/types.h> 11#include <linux/types.h>
12#include <linux/byteorder/swab.h> 12#include <linux/swab.h>
13#include <linux/byteorder/swabb.h>
14 13
15#define __constant_htonl(x) ((__force __be32)(__u32)(x)) 14#define __constant_htonl(x) ((__force __be32)(__u32)(x))
16#define __constant_ntohl(x) ((__force __u32)(__be32)(x)) 15#define __constant_ntohl(x) ((__force __u32)(__be32)(x))
diff --git a/include/linux/byteorder/little_endian.h b/include/linux/byteorder/little_endian.h
index cedc1b5a289c..83195fb82962 100644
--- a/include/linux/byteorder/little_endian.h
+++ b/include/linux/byteorder/little_endian.h
@@ -9,8 +9,7 @@
9#endif 9#endif
10 10
11#include <linux/types.h> 11#include <linux/types.h>
12#include <linux/byteorder/swab.h> 12#include <linux/swab.h>
13#include <linux/byteorder/swabb.h>
14 13
15#define __constant_htonl(x) ((__force __be32)___constant_swab32((x))) 14#define __constant_htonl(x) ((__force __be32)___constant_swab32((x)))
16#define __constant_ntohl(x) ___constant_swab32((__force __be32)(x)) 15#define __constant_ntohl(x) ___constant_swab32((__force __be32)(x))
diff --git a/include/linux/byteorder/swab.h b/include/linux/byteorder/swab.h
deleted file mode 100644
index 142134ff1645..000000000000
--- a/include/linux/byteorder/swab.h
+++ /dev/null
@@ -1,222 +0,0 @@
1#ifndef _LINUX_BYTEORDER_SWAB_H
2#define _LINUX_BYTEORDER_SWAB_H
3
4/*
5 * linux/byteorder/swab.h
6 * Byte-swapping, independently from CPU endianness
7 * swabXX[ps]?(foo)
8 *
9 * Francois-Rene Rideau <fare@tunes.org> 19971205
10 * separated swab functions from cpu_to_XX,
11 * to clean up support for bizarre-endian architectures.
12 *
13 * Trent Piepho <xyzzy@speakeasy.org> 2007114
14 * make constant-folding work, provide C versions that
15 * gcc can optimize better, explain different versions
16 *
17 * See asm-i386/byteorder.h and suches for examples of how to provide
18 * architecture-dependent optimized versions
19 *
20 */
21
22#include <linux/compiler.h>
23
24/* Functions/macros defined, there are a lot:
25 *
26 * ___swabXX
27 * Generic C versions of the swab functions.
28 *
29 * ___constant_swabXX
30 * C versions that gcc can fold into a compile-time constant when
31 * the argument is a compile-time constant.
32 *
33 * __arch__swabXX[sp]?
34 * Architecture optimized versions of all the swab functions
35 * (including the s and p versions). These can be defined in
36 * asm-arch/byteorder.h. Any which are not, are defined here.
37 * __arch__swabXXs() is defined in terms of __arch__swabXXp(), which
38 * is defined in terms of __arch__swabXX(), which is in turn defined
39 * in terms of ___swabXX(x).
40 * These must be macros. They may be unsafe for arguments with
41 * side-effects.
42 *
43 * __fswabXX
44 * Inline function versions of the __arch__ macros. These _are_ safe
45 * if the arguments have side-effects. Note there are no s and p
46 * versions of these.
47 *
48 * __swabXX[sb]
49 * There are the ones you should actually use. The __swabXX versions
50 * will be a constant given a constant argument and use the arch
51 * specific code (if any) for non-constant arguments. The s and p
52 * versions always use the arch specific code (constant folding
53 * doesn't apply). They are safe to use with arguments with
54 * side-effects.
55 *
56 * swabXX[sb]
57 * Nicknames for __swabXX[sb] to use in the kernel.
58 */
59
60/* casts are necessary for constants, because we never know how for sure
61 * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
62 */
63
64static __inline__ __attribute_const__ __u16 ___swab16(__u16 x)
65{
66 return x<<8 | x>>8;
67}
68static __inline__ __attribute_const__ __u32 ___swab32(__u32 x)
69{
70 return x<<24 | x>>24 |
71 (x & (__u32)0x0000ff00UL)<<8 |
72 (x & (__u32)0x00ff0000UL)>>8;
73}
74static __inline__ __attribute_const__ __u64 ___swab64(__u64 x)
75{
76 return x<<56 | x>>56 |
77 (x & (__u64)0x000000000000ff00ULL)<<40 |
78 (x & (__u64)0x0000000000ff0000ULL)<<24 |
79 (x & (__u64)0x00000000ff000000ULL)<< 8 |
80 (x & (__u64)0x000000ff00000000ULL)>> 8 |
81 (x & (__u64)0x0000ff0000000000ULL)>>24 |
82 (x & (__u64)0x00ff000000000000ULL)>>40;
83}
84
85#define ___constant_swab16(x) \
86 ((__u16)( \
87 (((__u16)(x) & (__u16)0x00ffU) << 8) | \
88 (((__u16)(x) & (__u16)0xff00U) >> 8) ))
89#define ___constant_swab32(x) \
90 ((__u32)( \
91 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
92 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \
93 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \
94 (((__u32)(x) & (__u32)0xff000000UL) >> 24) ))
95#define ___constant_swab64(x) \
96 ((__u64)( \
97 (__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \
98 (__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \
99 (__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \
100 (__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) << 8) | \
101 (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >> 8) | \
102 (__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
103 (__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \
104 (__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) ))
105
106/*
107 * provide defaults when no architecture-specific optimization is detected
108 */
109#ifndef __arch__swab16
110# define __arch__swab16(x) ___swab16(x)
111#endif
112#ifndef __arch__swab32
113# define __arch__swab32(x) ___swab32(x)
114#endif
115#ifndef __arch__swab64
116# define __arch__swab64(x) ___swab64(x)
117#endif
118
119#ifndef __arch__swab16p
120# define __arch__swab16p(x) __arch__swab16(*(x))
121#endif
122#ifndef __arch__swab32p
123# define __arch__swab32p(x) __arch__swab32(*(x))
124#endif
125#ifndef __arch__swab64p
126# define __arch__swab64p(x) __arch__swab64(*(x))
127#endif
128
129#ifndef __arch__swab16s
130# define __arch__swab16s(x) ((void)(*(x) = __arch__swab16p(x)))
131#endif
132#ifndef __arch__swab32s
133# define __arch__swab32s(x) ((void)(*(x) = __arch__swab32p(x)))
134#endif
135#ifndef __arch__swab64s
136# define __arch__swab64s(x) ((void)(*(x) = __arch__swab64p(x)))
137#endif
138
139
140/*
141 * Allow constant folding
142 */
143#if defined(__GNUC__) && defined(__OPTIMIZE__)
144# define __swab16(x) \
145(__builtin_constant_p((__u16)(x)) ? \
146 ___constant_swab16((x)) : \
147 __fswab16((x)))
148# define __swab32(x) \
149(__builtin_constant_p((__u32)(x)) ? \
150 ___constant_swab32((x)) : \
151 __fswab32((x)))
152# define __swab64(x) \
153(__builtin_constant_p((__u64)(x)) ? \
154 ___constant_swab64((x)) : \
155 __fswab64((x)))
156#else
157# define __swab16(x) __fswab16(x)
158# define __swab32(x) __fswab32(x)
159# define __swab64(x) __fswab64(x)
160#endif /* OPTIMIZE */
161
162
163static __inline__ __attribute_const__ __u16 __fswab16(__u16 x)
164{
165 return __arch__swab16(x);
166}
167static __inline__ __u16 __swab16p(const __u16 *x)
168{
169 return __arch__swab16p(x);
170}
171static __inline__ void __swab16s(__u16 *addr)
172{
173 __arch__swab16s(addr);
174}
175
176static __inline__ __attribute_const__ __u32 __fswab32(__u32 x)
177{
178 return __arch__swab32(x);
179}
180static __inline__ __u32 __swab32p(const __u32 *x)
181{
182 return __arch__swab32p(x);
183}
184static __inline__ void __swab32s(__u32 *addr)
185{
186 __arch__swab32s(addr);
187}
188
189#ifdef __BYTEORDER_HAS_U64__
190static __inline__ __attribute_const__ __u64 __fswab64(__u64 x)
191{
192# ifdef __SWAB_64_THRU_32__
193 __u32 h = x >> 32;
194 __u32 l = x & ((1ULL<<32)-1);
195 return (((__u64)__swab32(l)) << 32) | ((__u64)(__swab32(h)));
196# else
197 return __arch__swab64(x);
198# endif
199}
200static __inline__ __u64 __swab64p(const __u64 *x)
201{
202 return __arch__swab64p(x);
203}
204static __inline__ void __swab64s(__u64 *addr)
205{
206 __arch__swab64s(addr);
207}
208#endif /* __BYTEORDER_HAS_U64__ */
209
210#if defined(__KERNEL__)
211#define swab16 __swab16
212#define swab32 __swab32
213#define swab64 __swab64
214#define swab16p __swab16p
215#define swab32p __swab32p
216#define swab64p __swab64p
217#define swab16s __swab16s
218#define swab32s __swab32s
219#define swab64s __swab64s
220#endif
221
222#endif /* _LINUX_BYTEORDER_SWAB_H */
diff --git a/include/linux/byteorder/swabb.h b/include/linux/byteorder/swabb.h
deleted file mode 100644
index 8c780c7d779e..000000000000
--- a/include/linux/byteorder/swabb.h
+++ /dev/null
@@ -1,135 +0,0 @@
1#ifndef _LINUX_BYTEORDER_SWABB_H
2#define _LINUX_BYTEORDER_SWABB_H
3
4/*
5 * linux/byteorder/swabb.h
6 * SWAp Bytes Bizarrely
7 * swaHHXX[ps]?(foo)
8 *
9 * Support for obNUXIous pdp-endian and other bizarre architectures.
10 * Will Linux ever run on such ancient beasts? if not, this file
11 * will be but a programming pearl. Still, it's a reminder that we
12 * shouldn't be making too many assumptions when trying to be portable.
13 *
14 */
15
16/*
17 * Meaning of the names I chose (vaxlinux people feel free to correct them):
18 * swahw32 swap 16-bit half-words in a 32-bit word
19 * swahb32 swap 8-bit halves of each 16-bit half-word in a 32-bit word
20 *
21 * No 64-bit support yet. I don't know NUXI conventions for long longs.
22 * I guarantee it will be a mess when it's there, though :->
23 * It will be even worse if there are conflicting 64-bit conventions.
24 * Hopefully, no one ever used 64-bit objects on NUXI machines.
25 *
26 */
27
28#include <linux/types.h>
29
30#define ___swahw32(x) \
31({ \
32 __u32 __x = (x); \
33 ((__u32)( \
34 (((__u32)(__x) & (__u32)0x0000ffffUL) << 16) | \
35 (((__u32)(__x) & (__u32)0xffff0000UL) >> 16) )); \
36})
37#define ___swahb32(x) \
38({ \
39 __u32 __x = (x); \
40 ((__u32)( \
41 (((__u32)(__x) & (__u32)0x00ff00ffUL) << 8) | \
42 (((__u32)(__x) & (__u32)0xff00ff00UL) >> 8) )); \
43})
44
45#define ___constant_swahw32(x) \
46 ((__u32)( \
47 (((__u32)(x) & (__u32)0x0000ffffUL) << 16) | \
48 (((__u32)(x) & (__u32)0xffff0000UL) >> 16) ))
49#define ___constant_swahb32(x) \
50 ((__u32)( \
51 (((__u32)(x) & (__u32)0x00ff00ffUL) << 8) | \
52 (((__u32)(x) & (__u32)0xff00ff00UL) >> 8) ))
53
54/*
55 * provide defaults when no architecture-specific optimization is detected
56 */
57#ifndef __arch__swahw32
58# define __arch__swahw32(x) ___swahw32(x)
59#endif
60#ifndef __arch__swahb32
61# define __arch__swahb32(x) ___swahb32(x)
62#endif
63
64#ifndef __arch__swahw32p
65# define __arch__swahw32p(x) __swahw32(*(x))
66#endif
67#ifndef __arch__swahb32p
68# define __arch__swahb32p(x) __swahb32(*(x))
69#endif
70
71#ifndef __arch__swahw32s
72# define __arch__swahw32s(x) do { *(x) = __swahw32p((x)); } while (0)
73#endif
74#ifndef __arch__swahb32s
75# define __arch__swahb32s(x) do { *(x) = __swahb32p((x)); } while (0)
76#endif
77
78
79/*
80 * Allow constant folding
81 */
82#define __swahw32(x) \
83(__builtin_constant_p((__u32)(x)) ? \
84 ___swahw32((x)) : \
85 __fswahw32((x)))
86#define __swahb32(x) \
87(__builtin_constant_p((__u32)(x)) ? \
88 ___swahb32((x)) : \
89 __fswahb32((x)))
90
91
92static inline __u32 __fswahw32(__u32 x)
93{
94 return __arch__swahw32(x);
95}
96
97static inline __u32 __swahw32p(__u32 *x)
98{
99 return __arch__swahw32p(x);
100}
101
102static inline void __swahw32s(__u32 *addr)
103{
104 __arch__swahw32s(addr);
105}
106
107static inline __u32 __fswahb32(__u32 x)
108{
109 return __arch__swahb32(x);
110}
111
112static inline __u32 __swahb32p(__u32 *x)
113{
114 return __arch__swahb32p(x);
115}
116
117static inline void __swahb32s(__u32 *addr)
118{
119 __arch__swahb32s(addr);
120}
121
122#ifdef __BYTEORDER_HAS_U64__
123/*
124 * Not supported yet
125 */
126#endif /* __BYTEORDER_HAS_U64__ */
127
128#define swahw32 __swahw32
129#define swahb32 __swahb32
130#define swahw32p __swahw32p
131#define swahb32p __swahb32p
132#define swahw32s __swahw32s
133#define swahb32s __swahb32s
134
135#endif /* _LINUX_BYTEORDER_SWABB_H */
diff --git a/include/linux/swab.h b/include/linux/swab.h
index bbed279f3b32..9a2d33e0a98a 100644
--- a/include/linux/swab.h
+++ b/include/linux/swab.h
@@ -9,17 +9,17 @@
9 * casts are necessary for constants, because we never know how for sure 9 * casts are necessary for constants, because we never know how for sure
10 * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way. 10 * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
11 */ 11 */
12#define __const_swab16(x) ((__u16)( \ 12#define ___constant_swab16(x) ((__u16)( \
13 (((__u16)(x) & (__u16)0x00ffU) << 8) | \ 13 (((__u16)(x) & (__u16)0x00ffU) << 8) | \
14 (((__u16)(x) & (__u16)0xff00U) >> 8))) 14 (((__u16)(x) & (__u16)0xff00U) >> 8)))
15 15
16#define __const_swab32(x) ((__u32)( \ 16#define ___constant_swab32(x) ((__u32)( \
17 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \ 17 (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
18 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \ 18 (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \
19 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \ 19 (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \
20 (((__u32)(x) & (__u32)0xff000000UL) >> 24))) 20 (((__u32)(x) & (__u32)0xff000000UL) >> 24)))
21 21
22#define __const_swab64(x) ((__u64)( \ 22#define ___constant_swab64(x) ((__u64)( \
23 (((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \ 23 (((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \
24 (((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \ 24 (((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \
25 (((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \ 25 (((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \
@@ -29,11 +29,11 @@
29 (((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \ 29 (((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \
30 (((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56))) 30 (((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56)))
31 31
32#define __const_swahw32(x) ((__u32)( \ 32#define ___constant_swahw32(x) ((__u32)( \
33 (((__u32)(x) & (__u32)0x0000ffffUL) << 16) | \ 33 (((__u32)(x) & (__u32)0x0000ffffUL) << 16) | \
34 (((__u32)(x) & (__u32)0xffff0000UL) >> 16))) 34 (((__u32)(x) & (__u32)0xffff0000UL) >> 16)))
35 35
36#define __const_swahb32(x) ((__u32)( \ 36#define ___constant_swahb32(x) ((__u32)( \
37 (((__u32)(x) & (__u32)0x00ff00ffUL) << 8) | \ 37 (((__u32)(x) & (__u32)0x00ff00ffUL) << 8) | \
38 (((__u32)(x) & (__u32)0xff00ff00UL) >> 8))) 38 (((__u32)(x) & (__u32)0xff00ff00UL) >> 8)))
39 39
@@ -43,25 +43,25 @@
43 * ___swab16, ___swab32, ___swab64, ___swahw32, ___swahb32 43 * ___swab16, ___swab32, ___swab64, ___swahw32, ___swahb32
44 */ 44 */
45 45
46static inline __attribute_const__ __u16 ___swab16(__u16 val) 46static inline __attribute_const__ __u16 __fswab16(__u16 val)
47{ 47{
48#ifdef __arch_swab16 48#ifdef __arch_swab16
49 return __arch_swab16(val); 49 return __arch_swab16(val);
50#else 50#else
51 return __const_swab16(val); 51 return ___constant_swab16(val);
52#endif 52#endif
53} 53}
54 54
55static inline __attribute_const__ __u32 ___swab32(__u32 val) 55static inline __attribute_const__ __u32 __fswab32(__u32 val)
56{ 56{
57#ifdef __arch_swab32 57#ifdef __arch_swab32
58 return __arch_swab32(val); 58 return __arch_swab32(val);
59#else 59#else
60 return __const_swab32(val); 60 return ___constant_swab32(val);
61#endif 61#endif
62} 62}
63 63
64static inline __attribute_const__ __u64 ___swab64(__u64 val) 64static inline __attribute_const__ __u64 __fswab64(__u64 val)
65{ 65{
66#ifdef __arch_swab64 66#ifdef __arch_swab64
67 return __arch_swab64(val); 67 return __arch_swab64(val);
@@ -70,25 +70,25 @@ static inline __attribute_const__ __u64 ___swab64(__u64 val)
70 __u32 l = val & ((1ULL << 32) - 1); 70 __u32 l = val & ((1ULL << 32) - 1);
71 return (((__u64)___swab32(l)) << 32) | ((__u64)(___swab32(h))); 71 return (((__u64)___swab32(l)) << 32) | ((__u64)(___swab32(h)));
72#else 72#else
73 return __const_swab64(val); 73 return ___constant_swab64(val);
74#endif 74#endif
75} 75}
76 76
77static inline __attribute_const__ __u32 ___swahw32(__u32 val) 77static inline __attribute_const__ __u32 __fswahw32(__u32 val)
78{ 78{
79#ifdef __arch_swahw32 79#ifdef __arch_swahw32
80 return __arch_swahw32(val); 80 return __arch_swahw32(val);
81#else 81#else
82 return __const_swahw32(val); 82 return ___constant_swahw32(val);
83#endif 83#endif
84} 84}
85 85
86static inline __attribute_const__ __u32 ___swahb32(__u32 val) 86static inline __attribute_const__ __u32 __fswahb32(__u32 val)
87{ 87{
88#ifdef __arch_swahb32 88#ifdef __arch_swahb32
89 return __arch_swahb32(val); 89 return __arch_swahb32(val);
90#else 90#else
91 return __const_swahb32(val); 91 return ___constant_swahb32(val);
92#endif 92#endif
93} 93}
94 94
@@ -98,8 +98,8 @@ static inline __attribute_const__ __u32 ___swahb32(__u32 val)
98 */ 98 */
99#define __swab16(x) \ 99#define __swab16(x) \
100 (__builtin_constant_p((__u16)(x)) ? \ 100 (__builtin_constant_p((__u16)(x)) ? \
101 __const_swab16((x)) : \ 101 ___constant_swab16(x) : \
102 ___swab16((x))) 102 __fswab16(x))
103 103
104/** 104/**
105 * __swab32 - return a byteswapped 32-bit value 105 * __swab32 - return a byteswapped 32-bit value
@@ -107,8 +107,8 @@ static inline __attribute_const__ __u32 ___swahb32(__u32 val)
107 */ 107 */
108#define __swab32(x) \ 108#define __swab32(x) \
109 (__builtin_constant_p((__u32)(x)) ? \ 109 (__builtin_constant_p((__u32)(x)) ? \
110 __const_swab32((x)) : \ 110 ___constant_swab32(x) : \
111 ___swab32((x))) 111 __fswab32(x))
112 112
113/** 113/**
114 * __swab64 - return a byteswapped 64-bit value 114 * __swab64 - return a byteswapped 64-bit value
@@ -116,8 +116,8 @@ static inline __attribute_const__ __u32 ___swahb32(__u32 val)
116 */ 116 */
117#define __swab64(x) \ 117#define __swab64(x) \
118 (__builtin_constant_p((__u64)(x)) ? \ 118 (__builtin_constant_p((__u64)(x)) ? \
119 __const_swab64((x)) : \ 119 ___constant_swab64(x) : \
120 ___swab64((x))) 120 __fswab64(x))
121 121
122/** 122/**
123 * __swahw32 - return a word-swapped 32-bit value 123 * __swahw32 - return a word-swapped 32-bit value
@@ -127,8 +127,8 @@ static inline __attribute_const__ __u32 ___swahb32(__u32 val)
127 */ 127 */
128#define __swahw32(x) \ 128#define __swahw32(x) \
129 (__builtin_constant_p((__u32)(x)) ? \ 129 (__builtin_constant_p((__u32)(x)) ? \
130 __const_swahw32((x)) : \ 130 ___constant_swahw32(x) : \
131 ___swahw32((x))) 131 __fswahw32(x))
132 132
133/** 133/**
134 * __swahb32 - return a high and low byte-swapped 32-bit value 134 * __swahb32 - return a high and low byte-swapped 32-bit value
@@ -138,8 +138,8 @@ static inline __attribute_const__ __u32 ___swahb32(__u32 val)
138 */ 138 */
139#define __swahb32(x) \ 139#define __swahb32(x) \
140 (__builtin_constant_p((__u32)(x)) ? \ 140 (__builtin_constant_p((__u32)(x)) ? \
141 __const_swahb32((x)) : \ 141 ___constant_swahb32(x) : \
142 ___swahb32((x))) 142 __fswahb32(x))
143 143
144/** 144/**
145 * __swab16p - return a byteswapped 16-bit value from a pointer 145 * __swab16p - return a byteswapped 16-bit value from a pointer