byteorder: only use linux/swab.h

The first step to make swab.h a regular header that will
include an asm/swab.h with arch overrides.

Avoid the gratuitous differences introduced in the new
linux/swab.h by naming the ___constant_swabXX bits and
__fswabXX bits exactly as found in the old implementation
in byteorder/swab[b].h

Use this new swab.h in byteorder/[big|little]_endian.h and
remove the two old swab headers.

Although the inclusion of asm/byteorder.h looks strange in
linux/swab.h, this will allow each arch to move the actual
arch overrides for the swab bits in an asm file and then
the includes can be cleaned up without requiring a flag day
for all arches at once.

Keep providing __fswabXX in case some userspace was using them
directly, but the revised __swabXX should be used instead in
any new code and will always do constant folding not dependent
on the optimization level, which means the __constant versions
can be phased out in-kernel.

Arches that use the old-style arch macros will lose their
optimized versions until they move to the new style, but at
least they will still compile.  Many arches have already moved
and the patches to move the remaining arches are trivial.

Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
diff --git a/include/linux/swab.h b/include/linux/swab.h
index bbed279..9a2d33e 100644
--- a/include/linux/swab.h
+++ b/include/linux/swab.h
@@ -9,17 +9,17 @@
  * casts are necessary for constants, because we never know how for sure
  * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
  */
-#define __const_swab16(x) ((__u16)(				\
+#define ___constant_swab16(x) ((__u16)(				\
 	(((__u16)(x) & (__u16)0x00ffU) << 8) |			\
 	(((__u16)(x) & (__u16)0xff00U) >> 8)))
 
-#define __const_swab32(x) ((__u32)(				\
+#define ___constant_swab32(x) ((__u32)(				\
 	(((__u32)(x) & (__u32)0x000000ffUL) << 24) |		\
 	(((__u32)(x) & (__u32)0x0000ff00UL) <<  8) |		\
 	(((__u32)(x) & (__u32)0x00ff0000UL) >>  8) |		\
 	(((__u32)(x) & (__u32)0xff000000UL) >> 24)))
 
-#define __const_swab64(x) ((__u64)(				\
+#define ___constant_swab64(x) ((__u64)(				\
 	(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) |	\
 	(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) |	\
 	(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) |	\
@@ -29,11 +29,11 @@
 	(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) |	\
 	(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56)))
 
-#define __const_swahw32(x) ((__u32)(				\
+#define ___constant_swahw32(x) ((__u32)(			\
 	(((__u32)(x) & (__u32)0x0000ffffUL) << 16) |		\
 	(((__u32)(x) & (__u32)0xffff0000UL) >> 16)))
 
-#define __const_swahb32(x) ((__u32)(				\
+#define ___constant_swahb32(x) ((__u32)(			\
 	(((__u32)(x) & (__u32)0x00ff00ffUL) << 8) |		\
 	(((__u32)(x) & (__u32)0xff00ff00UL) >> 8)))
 
@@ -43,25 +43,25 @@
  * ___swab16, ___swab32, ___swab64, ___swahw32, ___swahb32
  */
 
-static inline __attribute_const__ __u16 ___swab16(__u16 val)
+static inline __attribute_const__ __u16 __fswab16(__u16 val)
 {
 #ifdef __arch_swab16
 	return __arch_swab16(val);
 #else
-	return __const_swab16(val);
+	return ___constant_swab16(val);
 #endif
 }
 
-static inline __attribute_const__ __u32 ___swab32(__u32 val)
+static inline __attribute_const__ __u32 __fswab32(__u32 val)
 {
 #ifdef __arch_swab32
 	return __arch_swab32(val);
 #else
-	return __const_swab32(val);
+	return ___constant_swab32(val);
 #endif
 }
 
-static inline __attribute_const__ __u64 ___swab64(__u64 val)
+static inline __attribute_const__ __u64 __fswab64(__u64 val)
 {
 #ifdef __arch_swab64
 	return __arch_swab64(val);
@@ -70,25 +70,25 @@
 	__u32 l = val & ((1ULL << 32) - 1);
 	return (((__u64)___swab32(l)) << 32) | ((__u64)(___swab32(h)));
 #else
-	return __const_swab64(val);
+	return ___constant_swab64(val);
 #endif
 }
 
-static inline __attribute_const__ __u32 ___swahw32(__u32 val)
+static inline __attribute_const__ __u32 __fswahw32(__u32 val)
 {
 #ifdef __arch_swahw32
 	return __arch_swahw32(val);
 #else
-	return __const_swahw32(val);
+	return ___constant_swahw32(val);
 #endif
 }
 
-static inline __attribute_const__ __u32 ___swahb32(__u32 val)
+static inline __attribute_const__ __u32 __fswahb32(__u32 val)
 {
 #ifdef __arch_swahb32
 	return __arch_swahb32(val);
 #else
-	return __const_swahb32(val);
+	return ___constant_swahb32(val);
 #endif
 }
 
@@ -98,8 +98,8 @@
  */
 #define __swab16(x)				\
 	(__builtin_constant_p((__u16)(x)) ?	\
-	__const_swab16((x)) :			\
-	___swab16((x)))
+	___constant_swab16(x) :			\
+	__fswab16(x))
 
 /**
  * __swab32 - return a byteswapped 32-bit value
@@ -107,8 +107,8 @@
  */
 #define __swab32(x)				\
 	(__builtin_constant_p((__u32)(x)) ?	\
-	__const_swab32((x)) :			\
-	___swab32((x)))
+	___constant_swab32(x) :			\
+	__fswab32(x))
 
 /**
  * __swab64 - return a byteswapped 64-bit value
@@ -116,8 +116,8 @@
  */
 #define __swab64(x)				\
 	(__builtin_constant_p((__u64)(x)) ?	\
-	__const_swab64((x)) :			\
-	___swab64((x)))
+	___constant_swab64(x) :			\
+	__fswab64(x))
 
 /**
  * __swahw32 - return a word-swapped 32-bit value
@@ -127,8 +127,8 @@
  */
 #define __swahw32(x)				\
 	(__builtin_constant_p((__u32)(x)) ?	\
-	__const_swahw32((x)) :			\
-	___swahw32((x)))
+	___constant_swahw32(x) :		\
+	__fswahw32(x))
 
 /**
  * __swahb32 - return a high and low byte-swapped 32-bit value
@@ -138,8 +138,8 @@
  */
 #define __swahb32(x)				\
 	(__builtin_constant_p((__u32)(x)) ?	\
-	__const_swahb32((x)) :			\
-	___swahb32((x)))
+	___constant_swahb32(x) :		\
+	__fswahb32(x))
 
 /**
  * __swab16p - return a byteswapped 16-bit value from a pointer