aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic/unaligned.h
diff options
context:
space:
mode:
authorAl Viro <viro@www.linux.org.uk>2005-04-24 15:28:35 -0400
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-24 15:28:35 -0400
commit3106dbcd914d8dac4b89f52d8d51ec93526cbb95 (patch)
tree2762fe622d385bdc2381dd7504b94489dacf8247 /include/asm-generic/unaligned.h
parentb5a48daddc88fa0467a6ba371fcff0710781bf11 (diff)
[PATCH] __get_unaligned() turned into macro
Turns __get_unaligned() and __put_unaligned into macros. That is definitely safe; leaving them as inlines breaks on e.g. alpha [try to build ncpfs there and you'll get unresolved symbols since we end up getting __get_unaligned() not inlined]. Signed-off-by: Al Viro <viro@parcelfarce.linux.theplanet.co.uk> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-generic/unaligned.h')
-rw-r--r--include/asm-generic/unaligned.h83
1 files changed, 42 insertions, 41 deletions
diff --git a/include/asm-generic/unaligned.h b/include/asm-generic/unaligned.h
index c856a43e3b45..6c90f0f36eec 100644
--- a/include/asm-generic/unaligned.h
+++ b/include/asm-generic/unaligned.h
@@ -76,46 +76,47 @@ static inline void __ustw(__u16 val, __u16 *addr)
76 ptr->x = val; 76 ptr->x = val;
77} 77}
78 78
79static inline unsigned long __get_unaligned(const void *ptr, size_t size) 79#define __get_unaligned(ptr, size) ({ \
80{ 80 const void *__gu_p = ptr; \
81 unsigned long val; 81 unsigned long val; \
82 switch (size) { 82 switch (size) { \
83 case 1: 83 case 1: \
84 val = *(const __u8 *)ptr; 84 val = *(const __u8 *)__gu_p; \
85 break; 85 break; \
86 case 2: 86 case 2: \
87 val = __uldw((const __u16 *)ptr); 87 val = __uldw(__gu_p); \
88 break; 88 break; \
89 case 4: 89 case 4: \
90 val = __uldl((const __u32 *)ptr); 90 val = __uldl(__gu_p); \
91 break; 91 break; \
92 case 8: 92 case 8: \
93 val = __uldq((const __u64 *)ptr); 93 val = __uldq(__gu_p); \
94 break; 94 break; \
95 default: 95 default: \
96 bad_unaligned_access_length(); 96 bad_unaligned_access_length(); \
97 }; 97 }; \
98 return val; 98 val; \
99} 99})
100 100
101static inline void __put_unaligned(unsigned long val, void *ptr, size_t size) 101#define __put_unaligned(val, ptr, size) \
102{ 102do { \
103 switch (size) { 103 void *__gu_p = ptr; \
104 case 1: 104 switch (size) { \
105 *(__u8 *)ptr = val; 105 case 1: \
106 break; 106 *(__u8 *)__gu_p = val; \
107 case 2: 107 break; \
108 __ustw(val, (__u16 *)ptr); 108 case 2: \
109 break; 109 __ustw(val, __gu_p); \
110 case 4: 110 break; \
111 __ustl(val, (__u32 *)ptr); 111 case 4: \
112 break; 112 __ustl(val, __gu_p); \
113 case 8: 113 break; \
114 __ustq(val, (__u64 *)ptr); 114 case 8: \
115 break; 115 __ustq(val, __gu_p); \
116 default: 116 break; \
117 bad_unaligned_access_length(); 117 default: \
118 }; 118 bad_unaligned_access_length(); \
119} 119 }; \
120} while(0)
120 121
121#endif /* _ASM_GENERIC_UNALIGNED_H */ 122#endif /* _ASM_GENERIC_UNALIGNED_H */