summaryrefslogtreecommitdiffstats
path: root/include/asm-generic/unaligned.h
diff options
context:
space:
mode:
authorAl Viro <viro@www.linux.org.uk>2005-04-24 12:28:35 -0700
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-24 12:28:35 -0700
commit3106dbcd914d8dac4b89f52d8d51ec93526cbb95 (patch)
tree2762fe622d385bdc2381dd7504b94489dacf8247 /include/asm-generic/unaligned.h
parentb5a48daddc88fa0467a6ba371fcff0710781bf11 (diff)
downloadlinux-stable-3106dbcd914d8dac4b89f52d8d51ec93526cbb95.tar.gz
linux-stable-3106dbcd914d8dac4b89f52d8d51ec93526cbb95.tar.bz2
linux-stable-3106dbcd914d8dac4b89f52d8d51ec93526cbb95.zip
[PATCH] __get_unaligned() turned into macro
Turns __get_unaligned() and __put_unaligned into macros. That is definitely safe; leaving them as inlines breaks on e.g. alpha [try to build ncpfs there and you'll get unresolved symbols since we end up getting __get_unaligned() not inlined]. Signed-off-by: Al Viro <viro@parcelfarce.linux.theplanet.co.uk> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-generic/unaligned.h')
-rw-r--r--include/asm-generic/unaligned.h83
1 files changed, 42 insertions, 41 deletions
diff --git a/include/asm-generic/unaligned.h b/include/asm-generic/unaligned.h
index c856a43e3b45..6c90f0f36eec 100644
--- a/include/asm-generic/unaligned.h
+++ b/include/asm-generic/unaligned.h
@@ -76,46 +76,47 @@ static inline void __ustw(__u16 val, __u16 *addr)
ptr->x = val;
}
-static inline unsigned long __get_unaligned(const void *ptr, size_t size)
-{
- unsigned long val;
- switch (size) {
- case 1:
- val = *(const __u8 *)ptr;
- break;
- case 2:
- val = __uldw((const __u16 *)ptr);
- break;
- case 4:
- val = __uldl((const __u32 *)ptr);
- break;
- case 8:
- val = __uldq((const __u64 *)ptr);
- break;
- default:
- bad_unaligned_access_length();
- };
- return val;
-}
-
-static inline void __put_unaligned(unsigned long val, void *ptr, size_t size)
-{
- switch (size) {
- case 1:
- *(__u8 *)ptr = val;
- break;
- case 2:
- __ustw(val, (__u16 *)ptr);
- break;
- case 4:
- __ustl(val, (__u32 *)ptr);
- break;
- case 8:
- __ustq(val, (__u64 *)ptr);
- break;
- default:
- bad_unaligned_access_length();
- };
-}
+#define __get_unaligned(ptr, size) ({ \
+ const void *__gu_p = ptr; \
+ unsigned long val; \
+ switch (size) { \
+ case 1: \
+ val = *(const __u8 *)__gu_p; \
+ break; \
+ case 2: \
+ val = __uldw(__gu_p); \
+ break; \
+ case 4: \
+ val = __uldl(__gu_p); \
+ break; \
+ case 8: \
+ val = __uldq(__gu_p); \
+ break; \
+ default: \
+ bad_unaligned_access_length(); \
+ }; \
+ val; \
+})
+
+#define __put_unaligned(val, ptr, size) \
+do { \
+ void *__gu_p = ptr; \
+ switch (size) { \
+ case 1: \
+ *(__u8 *)__gu_p = val; \
+ break; \
+ case 2: \
+ __ustw(val, __gu_p); \
+ break; \
+ case 4: \
+ __ustl(val, __gu_p); \
+ break; \
+ case 8: \
+ __ustq(val, __gu_p); \
+ break; \
+ default: \
+ bad_unaligned_access_length(); \
+ }; \
+} while(0)
#endif /* _ASM_GENERIC_UNALIGNED_H */