Fill Class and String .bss slots in runtime.

Shift the responsibility for filling Class and String .bss
slots from compiled code to runtime. This reduces the size
of the compiled code.

Make oatdump list .bss slot mappings (ArtMethod, Class and
String) for each dex file.

aosp_taimen-userdebug boot image size:
  - before:
    arm boot*.oat: 36534524
    arm64 boot*.oat: 42723256
  - after:
    arm boot*.oat: 36431448 (-101KiB, -0.3%)
    arm64 boot*.oat: 42645016 (-76KiB, -0.2%)

Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: Pixel 2 XL boots.
Test: testrunner.py --target --optimizing
Test: m dump-oat, manually inspect output.
Bug: 65737953
Change-Id: I1330d070307410107e12c309d4c7f8121baba83c
diff --git a/runtime/index_bss_mapping.cc b/runtime/index_bss_mapping.cc
new file mode 100644
index 0000000..8d9d8cf
--- /dev/null
+++ b/runtime/index_bss_mapping.cc
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+
+#include "index_bss_mapping.h"
+
+#include "base/bit_utils.h"
+#include "base/length_prefixed_array.h"
+
+namespace art {
+
+size_t IndexBssMappingEntry::GetBssOffset(size_t index_bits,
+                                          uint32_t index,
+                                          size_t slot_size) const {
+  uint32_t diff = GetIndex(index_bits) - index;
+  if (diff == 0u) {
+    return bss_offset;
+  }
+  size_t mask_bits = 32u - index_bits;
+  if (diff > mask_bits) {
+    return IndexBssMappingLookup::npos;
+  }
+  // Shift out the index bits and bits for lower indexes.
+  // Note that `index_bits + (mask_bits - diff) == 32 - diff`.
+  uint32_t mask_from_index = index_and_mask >> (32u - diff);
+  if ((mask_from_index & 1u) != 0u) {
+    return bss_offset - POPCOUNT(mask_from_index) * slot_size;
+  } else {
+    return IndexBssMappingLookup::npos;
+  }
+}
+
+constexpr size_t IndexBssMappingLookup::npos;
+
+size_t IndexBssMappingLookup::GetBssOffset(const IndexBssMapping* mapping,
+                                           uint32_t index,
+                                           uint32_t number_of_indexes,
+                                           size_t slot_size) {
+  DCHECK_LT(index, number_of_indexes);
+  if (mapping == nullptr) {
+    return npos;
+  }
+  size_t index_bits = IndexBssMappingEntry::IndexBits(number_of_indexes);
+  uint32_t index_mask = IndexBssMappingEntry::IndexMask(index_bits);
+  auto it = std::partition_point(
+      mapping->begin(),
+      mapping->end(),
+      [=](const struct IndexBssMappingEntry& entry) {
+        return (entry.index_and_mask & index_mask) < index;
+      });
+  if (it == mapping->end()) {
+    return npos;
+  }
+  const IndexBssMappingEntry& entry = *it;
+  return entry.GetBssOffset(index_bits, index, slot_size);
+}
+
+}  // namespace art