aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-ppc64/byteorder.h
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 18:20:36 -0400
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 18:20:36 -0400
commit1da177e4c3f41524e886b7f1b8a0c1fc7321cac2 (patch)
tree0bba044c4ce775e45a88a51686b5d9f90697ea9d /include/asm-ppc64/byteorder.h
Linux-2.6.12-rc2v2.6.12-rc2
Initial git repository build. I'm not bothering with the full history, even though we have it. We can create a separate "historical" git archive of that later if we want to, and in the meantime it's about 3.2GB when imported into git - space that would just make the early git days unnecessarily complicated, when we don't have a lot of good infrastructure for it. Let it rip!
Diffstat (limited to 'include/asm-ppc64/byteorder.h')
-rw-r--r--include/asm-ppc64/byteorder.h96
1 files changed, 96 insertions, 0 deletions
diff --git a/include/asm-ppc64/byteorder.h b/include/asm-ppc64/byteorder.h
new file mode 100644
index 000000000000..80327532de64
--- /dev/null
+++ b/include/asm-ppc64/byteorder.h
@@ -0,0 +1,96 @@
1#ifndef _PPC64_BYTEORDER_H
2#define _PPC64_BYTEORDER_H
3
4/*
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License
7 * as published by the Free Software Foundation; either version
8 * 2 of the License, or (at your option) any later version.
9 */
10
11#include <asm/types.h>
12#include <linux/compiler.h>
13
14#ifdef __GNUC__
15#ifdef __KERNEL__
16
17static __inline__ __u16 ld_le16(const volatile __u16 *addr)
18{
19 __u16 val;
20
21 __asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
22 return val;
23}
24
25static __inline__ void st_le16(volatile __u16 *addr, const __u16 val)
26{
27 __asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
28}
29
30static __inline__ __u32 ld_le32(const volatile __u32 *addr)
31{
32 __u32 val;
33
34 __asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
35 return val;
36}
37
38static __inline__ void st_le32(volatile __u32 *addr, const __u32 val)
39{
40 __asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
41}
42
43#if 0
44static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
45{
46 __u16 result;
47
48 __asm__("rlwimi %0,%1,8,16,23"
49 : "=r" (result)
50 : "r" (value), "0" (value >> 8));
51 return result;
52}
53
54static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
55{
56 __u32 result;
57
58 __asm__("rlwimi %0,%1,24,16,23\n\t"
59 "rlwimi %0,%1,8,8,15\n\t"
60 "rlwimi %0,%1,24,0,7"
61 : "=r" (result)
62 : "r" (value), "0" (value >> 24));
63 return result;
64}
65
66static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 value)
67{
68 __u64 result;
69#error implement me
70}
71
72#define __arch__swab16(x) ___arch__swab16(x)
73#define __arch__swab32(x) ___arch__swab32(x)
74#define __arch__swab64(x) ___arch__swab64(x)
75
76#endif
77
78/* The same, but returns converted value from the location pointer by addr. */
79#define __arch__swab16p(addr) ld_le16(addr)
80#define __arch__swab32p(addr) ld_le32(addr)
81
82/* The same, but do the conversion in situ, ie. put the value back to addr. */
83#define __arch__swab16s(addr) st_le16(addr,*addr)
84#define __arch__swab32s(addr) st_le32(addr,*addr)
85
86#endif /* __KERNEL__ */
87
88#ifndef __STRICT_ANSI__
89#define __BYTEORDER_HAS_U64__
90#endif
91
92#endif /* __GNUC__ */
93
94#include <linux/byteorder/big_endian.h>
95
96#endif /* _PPC64_BYTEORDER_H */