Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef __ASM_SH_BYTEORDER_H |
| 2 | #define __ASM_SH_BYTEORDER_H |
| 3 | |
| 4 | /* |
| 5 | * Copyright (C) 1999 Niibe Yutaka |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 6 | * Copyright (C) 2000, 2001 Paolo Alberelli |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 7 | */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | #include <linux/compiler.h> |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 9 | #include <linux/types.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 10 | |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 11 | static inline __attribute_const__ __u32 ___arch__swab32(__u32 x) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 12 | { |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 13 | __asm__( |
Paul Mundt | 6dba1b6 | 2008-03-14 17:21:09 +0900 | [diff] [blame] | 14 | #ifdef __SH5__ |
| 15 | "byterev %0, %0\n\t" |
| 16 | "shari %0, 32, %0" |
| 17 | #else |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 18 | "swap.b %0, %0\n\t" |
| 19 | "swap.w %0, %0\n\t" |
| 20 | "swap.b %0, %0" |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 21 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 22 | : "=r" (x) |
| 23 | : "0" (x)); |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 24 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | return x; |
| 26 | } |
| 27 | |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 28 | static inline __attribute_const__ __u16 ___arch__swab16(__u16 x) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 29 | { |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 30 | __asm__( |
Paul Mundt | 6dba1b6 | 2008-03-14 17:21:09 +0900 | [diff] [blame] | 31 | #ifdef __SH5__ |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 32 | "byterev %0, %0\n\t" |
| 33 | "shari %0, 32, %0" |
Paul Mundt | 6dba1b6 | 2008-03-14 17:21:09 +0900 | [diff] [blame] | 34 | #else |
| 35 | "swap.b %0, %0" |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 36 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 37 | : "=r" (x) |
| 38 | : "0" (x)); |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 39 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 40 | return x; |
| 41 | } |
| 42 | |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 43 | static inline __u64 ___arch__swab64(__u64 val) |
| 44 | { |
| 45 | union { |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 46 | struct { __u32 a,b; } s; |
| 47 | __u64 u; |
| 48 | } v, w; |
| 49 | v.u = val; |
Paul Mundt | 7a65eaf | 2007-11-09 13:58:44 +0900 | [diff] [blame] | 50 | w.s.b = ___arch__swab32(v.s.a); |
| 51 | w.s.a = ___arch__swab32(v.s.b); |
| 52 | return w.u; |
| 53 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 54 | |
| 55 | #define __arch__swab64(x) ___arch__swab64(x) |
| 56 | #define __arch__swab32(x) ___arch__swab32(x) |
| 57 | #define __arch__swab16(x) ___arch__swab16(x) |
| 58 | |
| 59 | #if !defined(__STRICT_ANSI__) || defined(__KERNEL__) |
| 60 | # define __BYTEORDER_HAS_U64__ |
| 61 | # define __SWAB_64_THRU_32__ |
| 62 | #endif |
| 63 | |
| 64 | #ifdef __LITTLE_ENDIAN__ |
| 65 | #include <linux/byteorder/little_endian.h> |
| 66 | #else |
| 67 | #include <linux/byteorder/big_endian.h> |
| 68 | #endif |
| 69 | |
| 70 | #endif /* __ASM_SH_BYTEORDER_H */ |