aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/s390/include/asm/byteorder.h72
1 files changed, 21 insertions, 51 deletions
diff --git a/arch/s390/include/asm/byteorder.h b/arch/s390/include/asm/byteorder.h
index 1fe2492baa8d..8bcf277c8468 100644
--- a/arch/s390/include/asm/byteorder.h
+++ b/arch/s390/include/asm/byteorder.h
@@ -11,32 +11,39 @@
11 11
12#include <asm/types.h> 12#include <asm/types.h>
13 13
14#ifdef __GNUC__ 14#define __BIG_ENDIAN
15
16#ifndef __s390x__
17# define __SWAB_64_THRU_32__
18#endif
15 19
16#ifdef __s390x__ 20#ifdef __s390x__
17static inline __u64 ___arch__swab64p(const __u64 *x) 21static inline __u64 __arch_swab64p(const __u64 *x)
18{ 22{
19 __u64 result; 23 __u64 result;
20 24
21 asm volatile("lrvg %0,%1" : "=d" (result) : "m" (*x)); 25 asm volatile("lrvg %0,%1" : "=d" (result) : "m" (*x));
22 return result; 26 return result;
23} 27}
28#define __arch_swab64p __arch_swab64p
24 29
25static inline __u64 ___arch__swab64(__u64 x) 30static inline __u64 __arch_swab64(__u64 x)
26{ 31{
27 __u64 result; 32 __u64 result;
28 33
29 asm volatile("lrvgr %0,%1" : "=d" (result) : "d" (x)); 34 asm volatile("lrvgr %0,%1" : "=d" (result) : "d" (x));
30 return result; 35 return result;
31} 36}
37#define __arch_swab64 __arch_swab64
32 38
33static inline void ___arch__swab64s(__u64 *x) 39static inline void __arch_swab64s(__u64 *x)
34{ 40{
35 *x = ___arch__swab64p(x); 41 *x = __arch_swab64p(x);
36} 42}
43#define __arch_swab64s __arch_swab64s
37#endif /* __s390x__ */ 44#endif /* __s390x__ */
38 45
39static inline __u32 ___arch__swab32p(const __u32 *x) 46static inline __u32 __arch_swab32p(const __u32 *x)
40{ 47{
41 __u32 result; 48 __u32 result;
42 49
@@ -53,25 +60,20 @@ static inline __u32 ___arch__swab32p(const __u32 *x)
53#endif /* __s390x__ */ 60#endif /* __s390x__ */
54 return result; 61 return result;
55} 62}
63#define __arch_swab32p __arch_swab32p
56 64
57static inline __u32 ___arch__swab32(__u32 x) 65#ifdef __s390x__
66static inline __u32 __arch_swab32(__u32 x)
58{ 67{
59#ifndef __s390x__
60 return ___arch__swab32p(&x);
61#else /* __s390x__ */
62 __u32 result; 68 __u32 result;
63 69
64 asm volatile("lrvr %0,%1" : "=d" (result) : "d" (x)); 70 asm volatile("lrvr %0,%1" : "=d" (result) : "d" (x));
65 return result; 71 return result;
66#endif /* __s390x__ */
67}
68
69static __inline__ void ___arch__swab32s(__u32 *x)
70{
71 *x = ___arch__swab32p(x);
72} 72}
73#define __arch_swab32 __arch_swab32
74#endif /* __s390x__ */
73 75
74static __inline__ __u16 ___arch__swab16p(const __u16 *x) 76static inline __u16 __arch_swab16p(const __u16 *x)
75{ 77{
76 __u16 result; 78 __u16 result;
77 79
@@ -86,40 +88,8 @@ static __inline__ __u16 ___arch__swab16p(const __u16 *x)
86#endif /* __s390x__ */ 88#endif /* __s390x__ */
87 return result; 89 return result;
88} 90}
91#define __arch_swab16p __arch_swab16p
89 92
90static __inline__ __u16 ___arch__swab16(__u16 x) 93#include <linux/byteorder.h>
91{
92 return ___arch__swab16p(&x);
93}
94
95static __inline__ void ___arch__swab16s(__u16 *x)
96{
97 *x = ___arch__swab16p(x);
98}
99
100#ifdef __s390x__
101#define __arch__swab64(x) ___arch__swab64(x)
102#define __arch__swab64p(x) ___arch__swab64p(x)
103#define __arch__swab64s(x) ___arch__swab64s(x)
104#endif /* __s390x__ */
105#define __arch__swab32(x) ___arch__swab32(x)
106#define __arch__swab16(x) ___arch__swab16(x)
107#define __arch__swab32p(x) ___arch__swab32p(x)
108#define __arch__swab16p(x) ___arch__swab16p(x)
109#define __arch__swab32s(x) ___arch__swab32s(x)
110#define __arch__swab16s(x) ___arch__swab16s(x)
111
112#ifndef __s390x__
113#if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
114# define __BYTEORDER_HAS_U64__
115# define __SWAB_64_THRU_32__
116#endif
117#else /* __s390x__ */
118#define __BYTEORDER_HAS_U64__
119#endif /* __s390x__ */
120
121#endif /* __GNUC__ */
122
123#include <linux/byteorder/big_endian.h>
124 94
125#endif /* _S390_BYTEORDER_H */ 95#endif /* _S390_BYTEORDER_H */