Fill Class and String .bss slots in runtime.

Shift the responsibility for filling Class and String .bss
slots from compiled code to runtime. This reduces the size
of the compiled code.

Make oatdump list .bss slot mappings (ArtMethod, Class and
String) for each dex file.

aosp_taimen-userdebug boot image size:
  - before:
    arm boot*.oat: 36534524
    arm64 boot*.oat: 42723256
  - after:
    arm boot*.oat: 36431448 (-101KiB, -0.3%)
    arm64 boot*.oat: 42645016 (-76KiB, -0.2%)

Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: Pixel 2 XL boots.
Test: testrunner.py --target --optimizing
Test: m dump-oat, manually inspect output.
Bug: 65737953
Change-Id: I1330d070307410107e12c309d4c7f8121baba83c
diff --git a/dex2oat/linker/index_bss_mapping_encoder.h b/dex2oat/linker/index_bss_mapping_encoder.h
new file mode 100644
index 0000000..9bc1432
--- /dev/null
+++ b/dex2oat/linker/index_bss_mapping_encoder.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_DEX2OAT_LINKER_INDEX_BSS_MAPPING_ENCODER_H_
+#define ART_DEX2OAT_LINKER_INDEX_BSS_MAPPING_ENCODER_H_
+
+#include "base/bit_utils.h"
+#include "base/bit_vector-inl.h"
+#include "base/logging.h"
+#include "index_bss_mapping.h"
+
+namespace art {
+namespace linker {
+
+// Helper class for encoding compressed IndexBssMapping.
+class IndexBssMappingEncoder {
+ public:
+  IndexBssMappingEncoder(size_t number_of_indexes, size_t slot_size)
+      : index_bits_(IndexBssMappingEntry::IndexBits(number_of_indexes)),
+        slot_size_(slot_size) {
+    entry_.index_and_mask = static_cast<uint32_t>(-1);
+    entry_.bss_offset = static_cast<uint32_t>(-1);
+    DCHECK_NE(number_of_indexes, 0u);
+  }
+
+  // Try to merge the next index -> bss_offset mapping into the current entry.
+  // Return true on success, false on failure.
+  bool TryMerge(uint32_t index, uint32_t bss_offset) {
+    DCHECK_LE(MinimumBitsToStore(index), index_bits_);
+    DCHECK_NE(index, entry_.GetIndex(index_bits_));
+    if (entry_.bss_offset + slot_size_ != bss_offset) {
+      return false;
+    }
+    uint32_t diff = index - entry_.GetIndex(index_bits_);
+    if (diff > 32u - index_bits_) {
+      return false;
+    }
+    uint32_t mask = entry_.GetMask(index_bits_);
+    if ((mask & ~(static_cast<uint32_t>(-1) << diff)) != 0u) {
+      return false;
+    }
+    // Insert the bit indicating the index we've just overwritten
+    // and shift bits indicating indexes before that.
+    mask = ((mask << index_bits_) >> diff) | (static_cast<uint32_t>(1u) << (32 - diff));
+    entry_.index_and_mask = mask | index;
+    entry_.bss_offset = bss_offset;
+    return true;
+  }
+
+  void Reset(uint32_t method_index, uint32_t bss_offset) {
+    DCHECK_LE(MinimumBitsToStore(method_index), index_bits_);
+    entry_.index_and_mask = method_index;  // Mask bits set to 0.
+    entry_.bss_offset = bss_offset;
+  }
+
+  IndexBssMappingEntry GetEntry() {
+    return entry_;
+  }
+
+  size_t GetIndexBits() const {
+    return index_bits_;
+  }
+
+ private:
+  const size_t index_bits_;
+  const size_t slot_size_;
+  IndexBssMappingEntry entry_;
+};
+
+}  // namespace linker
+}  // namespace art
+
+#endif  // ART_DEX2OAT_LINKER_INDEX_BSS_MAPPING_ENCODER_H_