Disable adding main and non moving spaces to immune region in GSS

Disabled adding the main and non moving space to the immune region.
This will enable us to recycle bump pointer spaces for malloc space
-> malloc space compaction as well as collector transitions.

Also added logic for falling back to the non moving space, we may
copy objects there.

Refactored mod union table logic into MarkReachableObjects.

No measurable performance benefit or regression.

Bug: 14059466
Bug: 16291259

Change-Id: If663d9fdbde943b988173b7f6ac844e5f78a0327
diff --git a/runtime/utils.h b/runtime/utils.h
index b47de81..2ea4953 100644
--- a/runtime/utils.h
+++ b/runtime/utils.h
@@ -167,8 +167,7 @@
 
 // For rounding integers.
 template<typename T>
-static constexpr T RoundDown(T x, typename TypeIdentity<T>::type n)
-    __attribute__((warn_unused_result));
+static constexpr T RoundDown(T x, typename TypeIdentity<T>::type n) WARN_UNUSED;
 
 template<typename T>
 static constexpr T RoundDown(T x, typename TypeIdentity<T>::type n) {
@@ -178,8 +177,7 @@
 }
 
 template<typename T>
-static constexpr T RoundUp(T x, typename TypeIdentity<T>::type n)
-    __attribute__((warn_unused_result));
+static constexpr T RoundUp(T x, typename TypeIdentity<T>::type n) WARN_UNUSED;
 
 template<typename T>
 static constexpr T RoundUp(T x, typename TypeIdentity<T>::type n) {
@@ -188,7 +186,7 @@
 
 // For aligning pointers.
 template<typename T>
-static inline T* AlignDown(T* x, uintptr_t n) __attribute__((warn_unused_result));
+static inline T* AlignDown(T* x, uintptr_t n) WARN_UNUSED;
 
 template<typename T>
 static inline T* AlignDown(T* x, uintptr_t n) {
@@ -196,7 +194,7 @@
 }
 
 template<typename T>
-static inline T* AlignUp(T* x, uintptr_t n) __attribute__((warn_unused_result));
+static inline T* AlignUp(T* x, uintptr_t n) WARN_UNUSED;
 
 template<typename T>
 static inline T* AlignUp(T* x, uintptr_t n) {