JIT baseline: trigger optimized compilation on hotness threshold.
- Add a new hotness count in the ProfilingInfo to not conflict with
interpreter hotness which may use it for OSR.
- Add a baseline flag in the OatQuickMethodHeader to identify baseline
compiled methods.
- Add a -Xusetieredjit flag to experiment and test.
Bug: 119800099
Test: test.py with Xusetieredjit to true
Change-Id: I8512853f869f1312e3edc60bf64413dee9143c52
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 3f6010d..dd6d1a2 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -42,7 +42,8 @@
void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
size_t core_spill_mask,
size_t fp_spill_mask,
- uint32_t num_dex_registers) {
+ uint32_t num_dex_registers,
+ bool baseline) {
DCHECK(!in_method_) << "Mismatched Begin/End calls";
in_method_ = true;
DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
@@ -52,6 +53,7 @@
core_spill_mask_ = core_spill_mask;
fp_spill_mask_ = fp_spill_mask;
num_dex_registers_ = num_dex_registers;
+ baseline_ = baseline;
if (kVerifyStackMaps) {
dchecks_.emplace_back([=](const CodeInfo& code_info) {
@@ -299,6 +301,7 @@
DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
+ flags |= baseline_ ? CodeInfo::kIsBaseline : 0;
uint32_t bit_table_flags = 0;
ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
if (bit_table->size() != 0) { // Record which bit-tables are stored.