aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic/unaligned.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-generic/unaligned.h')
-rw-r--r--include/asm-generic/unaligned.h124
1 files changed, 0 insertions, 124 deletions
diff --git a/include/asm-generic/unaligned.h b/include/asm-generic/unaligned.h
deleted file mode 100644
index 2fe1b2e67f01..000000000000
--- a/include/asm-generic/unaligned.h
+++ /dev/null
@@ -1,124 +0,0 @@
1#ifndef _ASM_GENERIC_UNALIGNED_H_
2#define _ASM_GENERIC_UNALIGNED_H_
3
4/*
5 * For the benefit of those who are trying to port Linux to another
6 * architecture, here are some C-language equivalents.
7 *
8 * This is based almost entirely upon Richard Henderson's
9 * asm-alpha/unaligned.h implementation. Some comments were
10 * taken from David Mosberger's asm-ia64/unaligned.h header.
11 */
12
13#include <linux/types.h>
14
15/*
16 * The main single-value unaligned transfer routines.
17 */
18#define get_unaligned(ptr) \
19 __get_unaligned((ptr), sizeof(*(ptr)))
20#define put_unaligned(x,ptr) \
21 ((void)sizeof(*(ptr)=(x)),\
22 __put_unaligned((__force __u64)(x), (ptr), sizeof(*(ptr))))
23
24/*
25 * This function doesn't actually exist. The idea is that when
26 * someone uses the macros below with an unsupported size (datatype),
27 * the linker will alert us to the problem via an unresolved reference
28 * error.
29 */
30extern void bad_unaligned_access_length(void) __attribute__((noreturn));
31
32struct __una_u64 { __u64 x __attribute__((packed)); };
33struct __una_u32 { __u32 x __attribute__((packed)); };
34struct __una_u16 { __u16 x __attribute__((packed)); };
35
36/*
37 * Elemental unaligned loads
38 */
39
40static inline __u64 __uldq(const __u64 *addr)
41{
42 const struct __una_u64 *ptr = (const struct __una_u64 *) addr;
43 return ptr->x;
44}
45
46static inline __u32 __uldl(const __u32 *addr)
47{
48 const struct __una_u32 *ptr = (const struct __una_u32 *) addr;
49 return ptr->x;
50}
51
52static inline __u16 __uldw(const __u16 *addr)
53{
54 const struct __una_u16 *ptr = (const struct __una_u16 *) addr;
55 return ptr->x;
56}
57
58/*
59 * Elemental unaligned stores
60 */
61
62static inline void __ustq(__u64 val, __u64 *addr)
63{
64 struct __una_u64 *ptr = (struct __una_u64 *) addr;
65 ptr->x = val;
66}
67
68static inline void __ustl(__u32 val, __u32 *addr)
69{
70 struct __una_u32 *ptr = (struct __una_u32 *) addr;
71 ptr->x = val;
72}
73
74static inline void __ustw(__u16 val, __u16 *addr)
75{
76 struct __una_u16 *ptr = (struct __una_u16 *) addr;
77 ptr->x = val;
78}
79
80#define __get_unaligned(ptr, size) ({ \
81 const void *__gu_p = ptr; \
82 __u64 __val; \
83 switch (size) { \
84 case 1: \
85 __val = *(const __u8 *)__gu_p; \
86 break; \
87 case 2: \
88 __val = __uldw(__gu_p); \
89 break; \
90 case 4: \
91 __val = __uldl(__gu_p); \
92 break; \
93 case 8: \
94 __val = __uldq(__gu_p); \
95 break; \
96 default: \
97 bad_unaligned_access_length(); \
98 }; \
99 (__force __typeof__(*(ptr)))__val; \
100})
101
102#define __put_unaligned(val, ptr, size) \
103({ \
104 void *__gu_p = ptr; \
105 switch (size) { \
106 case 1: \
107 *(__u8 *)__gu_p = (__force __u8)val; \
108 break; \
109 case 2: \
110 __ustw((__force __u16)val, __gu_p); \
111 break; \
112 case 4: \
113 __ustl((__force __u32)val, __gu_p); \
114 break; \
115 case 8: \
116 __ustq(val, __gu_p); \
117 break; \
118 default: \
119 bad_unaligned_access_length(); \
120 }; \
121 (void)0; \
122})
123
124#endif /* _ASM_GENERIC_UNALIGNED_H */