Aarch64: Add aarch64 memory barriers.

Aarch64 memory barriers are essentially the same as on ARMv7.

Change-Id: Id3e7ee5341833cae5062eec95dad888e1f60ae8d
Signed-off-by: Stuart Monteith <stuart.monteith@arm.com>
diff --git a/runtime/atomic.h b/runtime/atomic.h
index 2a47e46..b099a9c 100644
--- a/runtime/atomic.h
+++ b/runtime/atomic.h
@@ -141,7 +141,7 @@
   }
 
   static void MembarLoadStore() {
-  #if defined(__arm__)
+  #if defined(__arm__) || defined(__aarch64__)
     __asm__ __volatile__("dmb ish" : : : "memory");
   #elif defined(__i386__) || defined(__x86_64__)
     __asm__ __volatile__("" : : : "memory");
@@ -153,7 +153,7 @@
   }
 
   static void MembarLoadLoad() {
-  #if defined(__arm__)
+  #if defined(__arm__) || defined(__aarch64__)
     __asm__ __volatile__("dmb ish" : : : "memory");
   #elif defined(__i386__) || defined(__x86_64__)
     __asm__ __volatile__("" : : : "memory");
@@ -165,7 +165,7 @@
   }
 
   static void MembarStoreStore() {
-  #if defined(__arm__)
+  #if defined(__arm__) || defined(__aarch64__)
     __asm__ __volatile__("dmb ishst" : : : "memory");
   #elif defined(__i386__) || defined(__x86_64__)
     __asm__ __volatile__("" : : : "memory");
@@ -177,7 +177,7 @@
   }
 
   static void MembarStoreLoad() {
-  #if defined(__arm__)
+  #if defined(__arm__) || defined(__aarch64__)
     __asm__ __volatile__("dmb ish" : : : "memory");
   #elif defined(__i386__) || defined(__x86_64__)
     __asm__ __volatile__("mfence" : : : "memory");