Merge "ART: Make Touch's stack array smaller under ASAN"
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index 060f12d..bf5cf29 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -154,8 +154,13 @@
     }
     pre_fence_visitor(obj, usable_size);
     QuasiAtomic::ThreadFenceForConstructor();
-    new_num_bytes_allocated = static_cast<size_t>(
-        num_bytes_allocated_.FetchAndAddRelaxed(bytes_tl_bulk_allocated)) + bytes_tl_bulk_allocated;
+    new_num_bytes_allocated = num_bytes_allocated_.FetchAndAddRelaxed(bytes_tl_bulk_allocated) +
+        bytes_tl_bulk_allocated;
+    if (bytes_tl_bulk_allocated > 0) {
+      // Only trace when we get an increase in the number of bytes allocated. This happens when
+      // obtaining a new TLAB and isn't often enough to hurt performance according to golem.
+      TraceHeapSize(new_num_bytes_allocated + bytes_tl_bulk_allocated);
+    }
   }
   if (kIsDebugBuild && Runtime::Current()->IsStarted()) {
     CHECK_LE(obj->SizeOf(), usable_size);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 880b2d4..ad4c0d5 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -2678,6 +2678,10 @@
   }
 }
 
+void Heap::TraceHeapSize(size_t heap_size) {
+  ATRACE_INT("Heap size (KB)", heap_size / KB);
+}
+
 collector::GcType Heap::CollectGarbageInternal(collector::GcType gc_type,
                                                GcCause gc_cause,
                                                bool clear_soft_references) {
@@ -2726,8 +2730,6 @@
     ++self->GetStats()->gc_for_alloc_count;
   }
   const uint64_t bytes_allocated_before_gc = GetBytesAllocated();
-  // Approximate heap size.
-  ATRACE_INT("Heap size (KB)", bytes_allocated_before_gc / KB);
 
   if (gc_type == NonStickyGcType()) {
     // Move all bytes from new_native_bytes_allocated_ to
@@ -3632,6 +3634,8 @@
   // We know what our utilization is at this moment.
   // This doesn't actually resize any memory. It just lets the heap grow more when necessary.
   const uint64_t bytes_allocated = GetBytesAllocated();
+  // Trace the new heap size after the GC is finished.
+  TraceHeapSize(bytes_allocated);
   uint64_t target_size;
   collector::GcType gc_type = collector_ran->GetGcType();
   const double multiplier = HeapGrowthMultiplier();  // Use the multiplier to grow more for
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 3484e02..9e55081 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -1089,6 +1089,8 @@
     return growth_limit_ / 2;
   }
 
+  void TraceHeapSize(size_t heap_size);
+
   // All-known continuous spaces, where objects lie within fixed bounds.
   std::vector<space::ContinuousSpace*> continuous_spaces_ GUARDED_BY(Locks::mutator_lock_);