Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef _ASM_GENERIC_UNALIGNED_H_ |
| 2 | #define _ASM_GENERIC_UNALIGNED_H_ |
| 3 | |
| 4 | /* |
| 5 | * For the benefit of those who are trying to port Linux to another |
| 6 | * architecture, here are some C-language equivalents. |
| 7 | * |
| 8 | * This is based almost entirely upon Richard Henderson's |
| 9 | * asm-alpha/unaligned.h implementation. Some comments were |
| 10 | * taken from David Mosberger's asm-ia64/unaligned.h header. |
| 11 | */ |
| 12 | |
| 13 | #include <linux/types.h> |
| 14 | |
| 15 | /* |
| 16 | * The main single-value unaligned transfer routines. |
| 17 | */ |
| 18 | #define get_unaligned(ptr) \ |
Ralf Baechle | eed74df | 2005-09-06 15:17:51 -0700 | [diff] [blame] | 19 | __get_unaligned((ptr), sizeof(*(ptr))) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 20 | #define put_unaligned(x,ptr) \ |
Al Viro | d37c6e1 | 2007-07-17 08:49:35 +0100 | [diff] [blame] | 21 | ((void)sizeof(*(ptr)=(x)),\ |
| 22 | __put_unaligned((__force __u64)(x), (ptr), sizeof(*(ptr)))) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 23 | |
| 24 | /* |
| 25 | * This function doesn't actually exist. The idea is that when |
| 26 | * someone uses the macros below with an unsupported size (datatype), |
| 27 | * the linker will alert us to the problem via an unresolved reference |
| 28 | * error. |
| 29 | */ |
| 30 | extern void bad_unaligned_access_length(void) __attribute__((noreturn)); |
| 31 | |
| 32 | struct __una_u64 { __u64 x __attribute__((packed)); }; |
| 33 | struct __una_u32 { __u32 x __attribute__((packed)); }; |
| 34 | struct __una_u16 { __u16 x __attribute__((packed)); }; |
| 35 | |
| 36 | /* |
| 37 | * Elemental unaligned loads |
| 38 | */ |
| 39 | |
Ralf Baechle | eed74df | 2005-09-06 15:17:51 -0700 | [diff] [blame] | 40 | static inline __u64 __uldq(const __u64 *addr) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 41 | { |
| 42 | const struct __una_u64 *ptr = (const struct __una_u64 *) addr; |
| 43 | return ptr->x; |
| 44 | } |
| 45 | |
Ralf Baechle | eed74df | 2005-09-06 15:17:51 -0700 | [diff] [blame] | 46 | static inline __u32 __uldl(const __u32 *addr) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 47 | { |
| 48 | const struct __una_u32 *ptr = (const struct __una_u32 *) addr; |
| 49 | return ptr->x; |
| 50 | } |
| 51 | |
Ralf Baechle | eed74df | 2005-09-06 15:17:51 -0700 | [diff] [blame] | 52 | static inline __u16 __uldw(const __u16 *addr) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 53 | { |
| 54 | const struct __una_u16 *ptr = (const struct __una_u16 *) addr; |
| 55 | return ptr->x; |
| 56 | } |
| 57 | |
| 58 | /* |
| 59 | * Elemental unaligned stores |
| 60 | */ |
| 61 | |
| 62 | static inline void __ustq(__u64 val, __u64 *addr) |
| 63 | { |
| 64 | struct __una_u64 *ptr = (struct __una_u64 *) addr; |
| 65 | ptr->x = val; |
| 66 | } |
| 67 | |
| 68 | static inline void __ustl(__u32 val, __u32 *addr) |
| 69 | { |
| 70 | struct __una_u32 *ptr = (struct __una_u32 *) addr; |
| 71 | ptr->x = val; |
| 72 | } |
| 73 | |
| 74 | static inline void __ustw(__u16 val, __u16 *addr) |
| 75 | { |
| 76 | struct __una_u16 *ptr = (struct __una_u16 *) addr; |
| 77 | ptr->x = val; |
| 78 | } |
| 79 | |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 80 | #define __get_unaligned(ptr, size) ({ \ |
| 81 | const void *__gu_p = ptr; \ |
Mike Frysinger | 937472b | 2007-07-31 00:39:11 -0700 | [diff] [blame] | 82 | __u64 __val; \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 83 | switch (size) { \ |
| 84 | case 1: \ |
Mike Frysinger | 937472b | 2007-07-31 00:39:11 -0700 | [diff] [blame] | 85 | __val = *(const __u8 *)__gu_p; \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 86 | break; \ |
| 87 | case 2: \ |
Mike Frysinger | 937472b | 2007-07-31 00:39:11 -0700 | [diff] [blame] | 88 | __val = __uldw(__gu_p); \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 89 | break; \ |
| 90 | case 4: \ |
Mike Frysinger | 937472b | 2007-07-31 00:39:11 -0700 | [diff] [blame] | 91 | __val = __uldl(__gu_p); \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 92 | break; \ |
| 93 | case 8: \ |
Mike Frysinger | 937472b | 2007-07-31 00:39:11 -0700 | [diff] [blame] | 94 | __val = __uldq(__gu_p); \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 95 | break; \ |
| 96 | default: \ |
| 97 | bad_unaligned_access_length(); \ |
| 98 | }; \ |
Mike Frysinger | 937472b | 2007-07-31 00:39:11 -0700 | [diff] [blame] | 99 | (__force __typeof__(*(ptr)))__val; \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 100 | }) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 101 | |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 102 | #define __put_unaligned(val, ptr, size) \ |
Al Viro | d37c6e1 | 2007-07-17 08:49:35 +0100 | [diff] [blame] | 103 | ({ \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 104 | void *__gu_p = ptr; \ |
| 105 | switch (size) { \ |
| 106 | case 1: \ |
Al Viro | d37c6e1 | 2007-07-17 08:49:35 +0100 | [diff] [blame] | 107 | *(__u8 *)__gu_p = (__force __u8)val; \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 108 | break; \ |
| 109 | case 2: \ |
Al Viro | d37c6e1 | 2007-07-17 08:49:35 +0100 | [diff] [blame] | 110 | __ustw((__force __u16)val, __gu_p); \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 111 | break; \ |
| 112 | case 4: \ |
Al Viro | d37c6e1 | 2007-07-17 08:49:35 +0100 | [diff] [blame] | 113 | __ustl((__force __u32)val, __gu_p); \ |
Al Viro | 3106dbc | 2005-04-24 12:28:35 -0700 | [diff] [blame] | 114 | break; \ |
| 115 | case 8: \ |
| 116 | __ustq(val, __gu_p); \ |
| 117 | break; \ |
| 118 | default: \ |
| 119 | bad_unaligned_access_length(); \ |
| 120 | }; \ |
Al Viro | d37c6e1 | 2007-07-17 08:49:35 +0100 | [diff] [blame] | 121 | (void)0; \ |
| 122 | }) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 123 | |
| 124 | #endif /* _ASM_GENERIC_UNALIGNED_H */ |