sh: Fix up the SH-5 build with caches enabled.

Signed-off-by: Paul Mundt <lethal@linux-sh.org>
diff --git a/arch/sh/mm/flush-sh4.c b/arch/sh/mm/flush-sh4.c
index 99c50dc..cef4026 100644
--- a/arch/sh/mm/flush-sh4.c
+++ b/arch/sh/mm/flush-sh4.c
@@ -19,28 +19,19 @@
 	cnt = (end - v) / L1_CACHE_BYTES;
 
 	while (cnt >= 8) {
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
 		cnt -= 8;
 	}
 
 	while (cnt) {
-		asm volatile("ocbwb	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
+		__ocbwb(v); v += L1_CACHE_BYTES;
 		cnt--;
 	}
 }
@@ -62,27 +53,18 @@
 	cnt = (end - v) / L1_CACHE_BYTES;
 
 	while (cnt >= 8) {
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
 		cnt -= 8;
 	}
 	while (cnt) {
-		asm volatile("ocbp	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
+		__ocbp(v); v += L1_CACHE_BYTES;
 		cnt--;
 	}
 }
@@ -101,28 +83,19 @@
 	cnt = (end - v) / L1_CACHE_BYTES;
 
 	while (cnt >= 8) {
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
 		cnt -= 8;
 	}
 
 	while (cnt) {
-		asm volatile("ocbi	@%0" : : "r" (v));
-		v += L1_CACHE_BYTES;
+		__ocbi(v); v += L1_CACHE_BYTES;
 		cnt--;
 	}
 }