aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-s390/byteorder.h
diff options
context:
space:
mode:
authorMartin Schwidefsky <schwidefsky@de.ibm.com>2008-08-01 14:42:05 -0400
committerMartin Schwidefsky <schwidefsky@de.ibm.com>2008-08-01 14:42:05 -0400
commitc6557e7f2b6ae76a44653d38f835174074c42e05 (patch)
treeda619a76b309cb1cd259d082af768a83cfcd6d52 /include/asm-s390/byteorder.h
parentab4227cb2d936886069ef1056c02500d05beb15d (diff)
[S390] move include/asm-s390 to arch/s390/include/asm
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Diffstat (limited to 'include/asm-s390/byteorder.h')
-rw-r--r--include/asm-s390/byteorder.h125
1 files changed, 0 insertions, 125 deletions
diff --git a/include/asm-s390/byteorder.h b/include/asm-s390/byteorder.h
deleted file mode 100644
index 1fe2492baa8d..000000000000
--- a/include/asm-s390/byteorder.h
+++ /dev/null
@@ -1,125 +0,0 @@
1#ifndef _S390_BYTEORDER_H
2#define _S390_BYTEORDER_H
3
4/*
5 * include/asm-s390/byteorder.h
6 *
7 * S390 version
8 * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
9 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
10 */
11
12#include <asm/types.h>
13
14#ifdef __GNUC__
15
16#ifdef __s390x__
17static inline __u64 ___arch__swab64p(const __u64 *x)
18{
19 __u64 result;
20
21 asm volatile("lrvg %0,%1" : "=d" (result) : "m" (*x));
22 return result;
23}
24
25static inline __u64 ___arch__swab64(__u64 x)
26{
27 __u64 result;
28
29 asm volatile("lrvgr %0,%1" : "=d" (result) : "d" (x));
30 return result;
31}
32
33static inline void ___arch__swab64s(__u64 *x)
34{
35 *x = ___arch__swab64p(x);
36}
37#endif /* __s390x__ */
38
39static inline __u32 ___arch__swab32p(const __u32 *x)
40{
41 __u32 result;
42
43 asm volatile(
44#ifndef __s390x__
45 " icm %0,8,3(%1)\n"
46 " icm %0,4,2(%1)\n"
47 " icm %0,2,1(%1)\n"
48 " ic %0,0(%1)"
49 : "=&d" (result) : "a" (x), "m" (*x) : "cc");
50#else /* __s390x__ */
51 " lrv %0,%1"
52 : "=d" (result) : "m" (*x));
53#endif /* __s390x__ */
54 return result;
55}
56
57static inline __u32 ___arch__swab32(__u32 x)
58{
59#ifndef __s390x__
60 return ___arch__swab32p(&x);
61#else /* __s390x__ */
62 __u32 result;
63
64 asm volatile("lrvr %0,%1" : "=d" (result) : "d" (x));
65 return result;
66#endif /* __s390x__ */
67}
68
69static __inline__ void ___arch__swab32s(__u32 *x)
70{
71 *x = ___arch__swab32p(x);
72}
73
74static __inline__ __u16 ___arch__swab16p(const __u16 *x)
75{
76 __u16 result;
77
78 asm volatile(
79#ifndef __s390x__
80 " icm %0,2,1(%1)\n"
81 " ic %0,0(%1)\n"
82 : "=&d" (result) : "a" (x), "m" (*x) : "cc");
83#else /* __s390x__ */
84 " lrvh %0,%1"
85 : "=d" (result) : "m" (*x));
86#endif /* __s390x__ */
87 return result;
88}
89
90static __inline__ __u16 ___arch__swab16(__u16 x)
91{
92 return ___arch__swab16p(&x);
93}
94
95static __inline__ void ___arch__swab16s(__u16 *x)
96{
97 *x = ___arch__swab16p(x);
98}
99
100#ifdef __s390x__
101#define __arch__swab64(x) ___arch__swab64(x)
102#define __arch__swab64p(x) ___arch__swab64p(x)
103#define __arch__swab64s(x) ___arch__swab64s(x)
104#endif /* __s390x__ */
105#define __arch__swab32(x) ___arch__swab32(x)
106#define __arch__swab16(x) ___arch__swab16(x)
107#define __arch__swab32p(x) ___arch__swab32p(x)
108#define __arch__swab16p(x) ___arch__swab16p(x)
109#define __arch__swab32s(x) ___arch__swab32s(x)
110#define __arch__swab16s(x) ___arch__swab16s(x)
111
112#ifndef __s390x__
113#if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
114# define __BYTEORDER_HAS_U64__
115# define __SWAB_64_THRU_32__
116#endif
117#else /* __s390x__ */
118#define __BYTEORDER_HAS_U64__
119#endif /* __s390x__ */
120
121#endif /* __GNUC__ */
122
123#include <linux/byteorder/big_endian.h>
124
125#endif /* _S390_BYTEORDER_H */