aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/include/asm/byteorder.h
diff options
context:
space:
mode:
authorStephen Rothwell <sfr@canb.auug.org.au>2008-08-01 01:20:30 -0400
committerPaul Mackerras <paulus@samba.org>2008-08-03 22:02:00 -0400
commitb8b572e1015f81b4e748417be2629dfe51ab99f9 (patch)
tree7df58667d5ed71d6c8f8f4ce40ca16b6fb776d0b /arch/powerpc/include/asm/byteorder.h
parent2b12a4c524812fb3f6ee590a02e65b95c8c32229 (diff)
powerpc: Move include files to arch/powerpc/include/asm
from include/asm-powerpc. This is the result of a mkdir arch/powerpc/include/asm git mv include/asm-powerpc/* arch/powerpc/include/asm Followed by a few documentation/comment fixups and a couple of places where <asm-powepc/...> was being used explicitly. Of the latter only one was outside the arch code and it is a driver only built for powerpc. Signed-off-by: Stephen Rothwell <sfr@canb.auug.org.au> Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/include/asm/byteorder.h')
-rw-r--r--arch/powerpc/include/asm/byteorder.h89
1 files changed, 89 insertions, 0 deletions
diff --git a/arch/powerpc/include/asm/byteorder.h b/arch/powerpc/include/asm/byteorder.h
new file mode 100644
index 000000000000..b37752214a16
--- /dev/null
+++ b/arch/powerpc/include/asm/byteorder.h
@@ -0,0 +1,89 @@
1#ifndef _ASM_POWERPC_BYTEORDER_H
2#define _ASM_POWERPC_BYTEORDER_H
3
4/*
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License
7 * as published by the Free Software Foundation; either version
8 * 2 of the License, or (at your option) any later version.
9 */
10
11#include <asm/types.h>
12#include <linux/compiler.h>
13
14#ifdef __GNUC__
15#ifdef __KERNEL__
16
17static __inline__ __u16 ld_le16(const volatile __u16 *addr)
18{
19 __u16 val;
20
21 __asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
22 return val;
23}
24
25static __inline__ void st_le16(volatile __u16 *addr, const __u16 val)
26{
27 __asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
28}
29
30static __inline__ __u32 ld_le32(const volatile __u32 *addr)
31{
32 __u32 val;
33
34 __asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
35 return val;
36}
37
38static __inline__ void st_le32(volatile __u32 *addr, const __u32 val)
39{
40 __asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
41}
42
43static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
44{
45 __u16 result;
46
47 __asm__("rlwimi %0,%1,8,16,23"
48 : "=r" (result)
49 : "r" (value), "0" (value >> 8));
50 return result;
51}
52
53static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
54{
55 __u32 result;
56
57 __asm__("rlwimi %0,%1,24,16,23\n\t"
58 "rlwimi %0,%1,8,8,15\n\t"
59 "rlwimi %0,%1,24,0,7"
60 : "=r" (result)
61 : "r" (value), "0" (value >> 24));
62 return result;
63}
64
65#define __arch__swab16(x) ___arch__swab16(x)
66#define __arch__swab32(x) ___arch__swab32(x)
67
68/* The same, but returns converted value from the location pointer by addr. */
69#define __arch__swab16p(addr) ld_le16(addr)
70#define __arch__swab32p(addr) ld_le32(addr)
71
72/* The same, but do the conversion in situ, ie. put the value back to addr. */
73#define __arch__swab16s(addr) st_le16(addr,*addr)
74#define __arch__swab32s(addr) st_le32(addr,*addr)
75
76#endif /* __KERNEL__ */
77
78#ifndef __STRICT_ANSI__
79#define __BYTEORDER_HAS_U64__
80#ifndef __powerpc64__
81#define __SWAB_64_THRU_32__
82#endif /* __powerpc64__ */
83#endif /* __STRICT_ANSI__ */
84
85#endif /* __GNUC__ */
86
87#include <linux/byteorder/big_endian.h>
88
89#endif /* _ASM_POWERPC_BYTEORDER_H */