[scudo] Make MTE inline asm compatible with GNU assembler

The GNU assembler can't parse `.arch_extension ...` before a `;`.
So instead uniformly use raw string syntax with separate lines
instead of `;` separators in the assembly code.

Reviewed By: pcc

Differential Revision: https://reviews.llvm.org/D100413

GitOrigin-RevId: 5c500c9f01aacc0d921db99681c3dc9c4078938c
Change-Id: I64041bed67a65cd20f25408ce6f53c2999927764
diff --git a/standalone/memtag.h b/standalone/memtag.h
index 0a8a0b5..c1b6b99 100644
--- a/standalone/memtag.h
+++ b/standalone/memtag.h
@@ -96,11 +96,19 @@
 #endif // SCUDO_LINUX
 
 inline void disableMemoryTagChecksTestOnly() {
-  __asm__ __volatile__(".arch_extension memtag; msr tco, #1");
+  __asm__ __volatile__(
+      R"(
+      .arch_extension memtag
+      msr tco, #1
+      )");
 }
 
 inline void enableMemoryTagChecksTestOnly() {
-  __asm__ __volatile__(".arch_extension memtag; msr tco, #0");
+  __asm__ __volatile__(
+      R"(
+      .arch_extension memtag
+      msr tco, #0
+      )");
 }
 
 class ScopedDisableMemoryTagChecks {
@@ -108,21 +116,33 @@
 
 public:
   ScopedDisableMemoryTagChecks() {
-    __asm__ __volatile__(".arch_extension memtag; mrs %0, tco; msr tco, #1"
-                         : "=r"(PrevTCO));
+    __asm__ __volatile__(
+        R"(
+        .arch_extension memtag
+        mrs %0, tco
+        msr tco, #1
+        )"
+        : "=r"(PrevTCO));
   }
 
   ~ScopedDisableMemoryTagChecks() {
-    __asm__ __volatile__(".arch_extension memtag; msr tco, %0"
-                         :
-                         : "r"(PrevTCO));
+    __asm__ __volatile__(
+        R"(
+        .arch_extension memtag
+        msr tco, %0
+        )"
+        :
+        : "r"(PrevTCO));
   }
 };
 
 inline uptr selectRandomTag(uptr Ptr, uptr ExcludeMask) {
   uptr TaggedPtr;
   __asm__ __volatile__(
-      ".arch_extension memtag; irg %[TaggedPtr], %[Ptr], %[ExcludeMask]"
+      R"(
+      .arch_extension memtag
+      irg %[TaggedPtr], %[Ptr], %[ExcludeMask]
+      )"
       : [TaggedPtr] "=r"(TaggedPtr)
       : [Ptr] "r"(Ptr), [ExcludeMask] "r"(ExcludeMask));
   return TaggedPtr;
@@ -156,10 +176,14 @@
   // chunk holding a low alignment allocation is reused for a higher alignment
   // allocation, the chunk may already have a non-zero tag from the previous
   // allocation.
-  __asm__ __volatile__(".arch_extension memtag; stg %0, [%0, #-16]"
-                       :
-                       : "r"(Ptr)
-                       : "memory");
+  __asm__ __volatile__(
+      R"(
+      .arch_extension memtag
+      stg %0, [%0, #-16]
+      )"
+      :
+      : "r"(Ptr)
+      : "memory");
 
   uptr TaggedBegin, TaggedEnd;
   setRandomTag(Ptr, Size, ExcludeMask, &TaggedBegin, &TaggedEnd);
@@ -173,10 +197,14 @@
   // purpose of catching linear overflows in this case.
   uptr UntaggedEnd = untagPointer(TaggedEnd);
   if (UntaggedEnd != BlockEnd)
-    __asm__ __volatile__(".arch_extension memtag; stg %0, [%0]"
-                         :
-                         : "r"(UntaggedEnd)
-                         : "memory");
+    __asm__ __volatile__(
+        R"(
+        .arch_extension memtag
+        stg %0, [%0]
+        )"
+        :
+        : "r"(UntaggedEnd)
+        : "memory");
   return reinterpret_cast<void *>(TaggedBegin);
 }
 
@@ -187,10 +215,14 @@
     // of the allocation to 0. See explanation in prepareTaggedChunk above.
     uptr RoundNewPtr = untagPointer(roundUpTo(NewPtr, 16));
     if (RoundNewPtr != BlockEnd)
-      __asm__ __volatile__(".arch_extension memtag; stg %0, [%0]"
-                           :
-                           : "r"(RoundNewPtr)
-                           : "memory");
+      __asm__ __volatile__(
+          R"(
+          .arch_extension memtag
+          stg %0, [%0]
+          )"
+          :
+          : "r"(RoundNewPtr)
+          : "memory");
     return;
   }
 
@@ -220,10 +252,14 @@
 
 inline uptr loadTag(uptr Ptr) {
   uptr TaggedPtr = Ptr;
-  __asm__ __volatile__(".arch_extension memtag; ldg %0, [%0]"
-                       : "+r"(TaggedPtr)
-                       :
-                       : "memory");
+  __asm__ __volatile__(
+      R"(
+      .arch_extension memtag
+      ldg %0, [%0]
+      )"
+      : "+r"(TaggedPtr)
+      :
+      : "memory");
   return TaggedPtr;
 }