Statsd CPU optimization.

The key change is to revamp how we parse/store/match a log event, especially how we match repeated
field and attribution nodes, and how we construct dimensions and compare them.

+ We use a integer to encode the field of a log element. And also encode the FieldMatcher into an
integer and a bit mask. The log matching becomes 2 integer operations.

+ Dimension is stored as encoded field and value pair. Checking if 2 dimensions are equal is then
  becoming checking if the underlying integers are equal. The integers are stored contiguously
  in memory, so it's much faster than previous tree structure.

Start review from FieldValue.h

Test: statsd_test + new unit tests

Bug: 72659059

Change-Id: Iec8daeacdd3f39ab297c10ab9cd7b710a9c42e86
diff --git a/cmds/statsd/src/metrics/DurationMetricProducer.cpp b/cmds/statsd/src/metrics/DurationMetricProducer.cpp
index 65cbc4a..6b321e1 100644
--- a/cmds/statsd/src/metrics/DurationMetricProducer.cpp
+++ b/cmds/statsd/src/metrics/DurationMetricProducer.cpp
@@ -21,7 +21,6 @@
 #include "guardrail/StatsdStats.h"
 #include "stats_util.h"
 #include "stats_log_util.h"
-#include "dimension.h"
 
 #include <limits.h>
 #include <stdlib.h>
@@ -68,8 +67,7 @@
       mStartIndex(startIndex),
       mStopIndex(stopIndex),
       mStopAllIndex(stopAllIndex),
-      mNested(nesting),
-      mInternalDimensions(internalDimensions) {
+      mNested(nesting) {
     // TODO: The following boiler plate code appears in all MetricProducers, but we can't abstract
     // them in the base class, because the proto generated CountMetric, and DurationMetric are
     // not related. Maybe we should add a template in the future??
@@ -79,16 +77,28 @@
         mBucketSizeNs = LLONG_MAX;
     }
 
-    // TODO: use UidMap if uid->pkg_name is required
-    mDimensionsInWhat = metric.dimensions_in_what();
-    mDimensionsInCondition = metric.dimensions_in_condition();
+    if (metric.has_dimensions_in_what()) {
+        translateFieldMatcher(metric.dimensions_in_what(), &mDimensionsInWhat);
+    }
+
+    if (internalDimensions.has_field()) {
+        translateFieldMatcher(internalDimensions, &mInternalDimensions);
+    }
+
+    if (metric.has_dimensions_in_condition()) {
+        translateFieldMatcher(metric.dimensions_in_condition(), &mDimensionsInCondition);
+    }
 
     if (metric.links().size() > 0) {
-        mConditionLinks.insert(mConditionLinks.begin(), metric.links().begin(),
-                               metric.links().end());
+        for (const auto& link : metric.links()) {
+            Metric2Condition mc;
+            mc.conditionId = link.condition();
+            translateFieldMatcher(link.fields_in_what(), &mc.metricFields);
+            translateFieldMatcher(link.fields_in_condition(), &mc.conditionFields);
+            mMetric2ConditionLinks.push_back(mc);
+        }
     }
-    mConditionSliced = (metric.links().size() > 0)||
-        (mDimensionsInCondition.has_field() && mDimensionsInCondition.child_size() > 0);
+    mConditionSliced = (metric.links().size() > 0) || (mDimensionsInCondition.size() > 0);
 
     VLOG("metric %lld created. bucket size %lld start_time: %lld", (long long)metric.id(),
          (long long)mBucketSizeNs, (long long)mStartTimeNs);
@@ -174,37 +184,18 @@
     }
 }
 
-void DurationMetricProducer::onDumpReportLocked(const uint64_t dumpTimeNs, StatsLogReport* report) {
-    flushIfNeededLocked(dumpTimeNs);
-    report->set_metric_id(mMetricId);
-
-    auto duration_metrics = report->mutable_duration_metrics();
-    for (const auto& pair : mPastBuckets) {
-        DurationMetricData* metricData = duration_metrics->add_data();
-        *metricData->mutable_dimensions_in_what() =
-            pair.first.getDimensionKeyInWhat().getDimensionsValue();
-        *metricData->mutable_dimensions_in_condition() =
-            pair.first.getDimensionKeyInCondition().getDimensionsValue();
-        for (const auto& bucket : pair.second) {
-            auto bucketInfo = metricData->add_bucket_info();
-            bucketInfo->set_start_bucket_nanos(bucket.mBucketStartNs);
-            bucketInfo->set_end_bucket_nanos(bucket.mBucketEndNs);
-            bucketInfo->set_duration_nanos(bucket.mDuration);
-        }
-    }
-}
-
 void DurationMetricProducer::onDumpReportLocked(const uint64_t dumpTimeNs,
                                                 ProtoOutputStream* protoOutput) {
     flushIfNeededLocked(dumpTimeNs);
     if (mPastBuckets.empty()) {
+        VLOG(" Duration metric, empty return");
         return;
     }
 
     protoOutput->write(FIELD_TYPE_INT64 | FIELD_ID_ID, (long long)mMetricId);
     long long protoToken = protoOutput->start(FIELD_TYPE_MESSAGE | FIELD_ID_DURATION_METRICS);
 
-    VLOG("metric %lld dump report now...", (long long)mMetricId);
+    VLOG("Duration metric %lld dump report now...", (long long)mMetricId);
 
     for (const auto& pair : mPastBuckets) {
         const MetricDimensionKey& dimensionKey = pair.first;
@@ -216,15 +207,13 @@
         // First fill dimension.
         long long dimensionToken = protoOutput->start(
                 FIELD_TYPE_MESSAGE | FIELD_ID_DIMENSION_IN_WHAT);
-        writeDimensionsValueProtoToStream(
-            dimensionKey.getDimensionKeyInWhat().getDimensionsValue(), protoOutput);
+        writeDimensionToProto(dimensionKey.getDimensionKeyInWhat(), protoOutput);
         protoOutput->end(dimensionToken);
 
         if (dimensionKey.hasDimensionKeyInCondition()) {
             long long dimensionInConditionToken = protoOutput->start(
                     FIELD_TYPE_MESSAGE | FIELD_ID_DIMENSION_IN_CONDITION);
-            writeDimensionsValueProtoToStream(
-                dimensionKey.getDimensionKeyInCondition().getDimensionsValue(), protoOutput);
+            writeDimensionToProto(dimensionKey.getDimensionKeyInCondition(), protoOutput);
             protoOutput->end(dimensionInConditionToken);
         }
 
@@ -339,8 +328,8 @@
 
     auto it = mCurrentSlicedDurationTrackerMap.find(eventKey);
 
-    std::vector<DimensionsValue> values;
-    getDimensionKeys(event, mInternalDimensions, &values);
+    std::vector<HashableDimensionKey> values;
+    filterValues(mInternalDimensions, event.getValues(), &values);
     if (values.empty()) {
         if (matcherIndex == mStartIndex) {
             it->second->noteStart(DEFAULT_DIMENSION_KEY, condition,
@@ -349,13 +338,11 @@
             it->second->noteStop(DEFAULT_DIMENSION_KEY, event.GetTimestampNs(), false);
         }
     } else {
-        for (const DimensionsValue& value : values) {
+        for (const auto& value : values) {
             if (matcherIndex == mStartIndex) {
-                it->second->noteStart(
-                    HashableDimensionKey(value), condition, event.GetTimestampNs(), conditionKeys);
+                it->second->noteStart(value, condition, event.GetTimestampNs(), conditionKeys);
             } else if (matcherIndex == mStopIndex) {
-                it->second->noteStop(
-                   HashableDimensionKey(value), event.GetTimestampNs(), false);
+                it->second->noteStop(value, event.GetTimestampNs(), false);
             }
         }
     }