aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/unaligned/generic.h
diff options
context:
space:
mode:
authorHarvey Harrison <harvey.harrison@gmail.com>2008-04-29 04:03:27 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2008-04-29 11:06:27 -0400
commit064106a91be5e76cb42c1ddf5d3871e3a1bd2a23 (patch)
tree7bb3931857c933343abf9a55d663e8a9b8af13ae /include/linux/unaligned/generic.h
parentdddfbaf8f86894415abb8256b55da68dab966ebe (diff)
kernel: add common infrastructure for unaligned access
Create a linux/unaligned directory similar in spirit to the linux/byteorder folder to hold generic implementations collected from various arches. Currently there are five implementations: 1) packed_struct.h: C-struct based, from asm-generic/unaligned.h 2) le_byteshift.h: Open coded byte-swapping, heavily based on asm-arm 3) be_byteshift.h: Open coded byte-swapping, heavily based on asm-arm 4) memmove.h: taken from multiple implementations in tree 5) access_ok.h: taken from x86 and others, unaligned access is ok. All of the new implementations checks for sizes not equal to 1,2,4,8 and will fail to link. API additions: get_unaligned_{le16|le32|le64|be16|be32|be64}(p) which is meant to replace code of the form: le16_to_cpu(get_unaligned((__le16 *)p)); put_unaligned_{le16|le32|le64|be16|be32|be64}(val, pointer) which is meant to replace code of the form: put_unaligned(cpu_to_le16(val), (__le16 *)p); The headers that arches should include from their asm/unaligned.h: access_ok.h : Wrappers of the byteswapping functions in asm/byteorder Choose a particular implementation for little-endian access: le_byteshift.h le_memmove.h (arch must be LE) le_struct.h (arch must be LE) Choose a particular implementation for big-endian access: be_byteshift.h be_memmove.h (arch must be BE) be_struct.h (arch must be BE) After including as needed from the above, include unaligned/generic.h and define your arch's get/put_unaligned as (for LE): Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com> Cc: <linux-arch@vger.kernel.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/linux/unaligned/generic.h')
-rw-r--r--include/linux/unaligned/generic.h68
1 files changed, 68 insertions, 0 deletions
diff --git a/include/linux/unaligned/generic.h b/include/linux/unaligned/generic.h
new file mode 100644
index 000000000000..02d97ff3df70
--- /dev/null
+++ b/include/linux/unaligned/generic.h
@@ -0,0 +1,68 @@
1#ifndef _LINUX_UNALIGNED_GENERIC_H
2#define _LINUX_UNALIGNED_GENERIC_H
3
4/*
5 * Cause a link-time error if we try an unaligned access other than
6 * 1,2,4 or 8 bytes long
7 */
8extern void __bad_unaligned_access_size(void);
9
10#define __get_unaligned_le(ptr) ((__force typeof(*(ptr)))({ \
11 __builtin_choose_expr(sizeof(*(ptr)) == 1, *(ptr), \
12 __builtin_choose_expr(sizeof(*(ptr)) == 2, get_unaligned_le16((ptr)), \
13 __builtin_choose_expr(sizeof(*(ptr)) == 4, get_unaligned_le32((ptr)), \
14 __builtin_choose_expr(sizeof(*(ptr)) == 8, get_unaligned_le64((ptr)), \
15 __bad_unaligned_access_size())))); \
16 }))
17
18#define __get_unaligned_be(ptr) ((__force typeof(*(ptr)))({ \
19 __builtin_choose_expr(sizeof(*(ptr)) == 1, *(ptr), \
20 __builtin_choose_expr(sizeof(*(ptr)) == 2, get_unaligned_be16((ptr)), \
21 __builtin_choose_expr(sizeof(*(ptr)) == 4, get_unaligned_be32((ptr)), \
22 __builtin_choose_expr(sizeof(*(ptr)) == 8, get_unaligned_be64((ptr)), \
23 __bad_unaligned_access_size())))); \
24 }))
25
26#define __put_unaligned_le(val, ptr) ({ \
27 void *__gu_p = (ptr); \
28 switch (sizeof(*(ptr))) { \
29 case 1: \
30 *(u8 *)__gu_p = (__force u8)(val); \
31 break; \
32 case 2: \
33 put_unaligned_le16((__force u16)(val), __gu_p); \
34 break; \
35 case 4: \
36 put_unaligned_le32((__force u32)(val), __gu_p); \
37 break; \
38 case 8: \
39 put_unaligned_le64((__force u64)(val), __gu_p); \
40 break; \
41 default: \
42 __bad_unaligned_access_size(); \
43 break; \
44 } \
45 (void)0; })
46
47#define __put_unaligned_be(val, ptr) ({ \
48 void *__gu_p = (ptr); \
49 switch (sizeof(*(ptr))) { \
50 case 1: \
51 *(u8 *)__gu_p = (__force u8)(val); \
52 break; \
53 case 2: \
54 put_unaligned_be16((__force u16)(val), __gu_p); \
55 break; \
56 case 4: \
57 put_unaligned_be32((__force u32)(val), __gu_p); \
58 break; \
59 case 8: \
60 put_unaligned_be64((__force u64)(val), __gu_p); \
61 break; \
62 default: \
63 __bad_unaligned_access_size(); \
64 break; \
65 } \
66 (void)0; })
67
68#endif /* _LINUX_UNALIGNED_GENERIC_H */