multi-value aggregation in ValueMetric

Allow aggregation on multiple fields, instead of one at a time.
All these fields should use the same aggregation time, use_diff,
direction, etc.
The config reuses value_field but allows multiple fields to be
specified.
The order they are specified determines the "index" of a value in the
output.

Bug: 119217634
Test: unit test
Change-Id: I38b1465d13723a897b30ee0b4f868498f60ad4db
diff --git a/cmds/statsd/src/metrics/ValueMetricProducer.cpp b/cmds/statsd/src/metrics/ValueMetricProducer.cpp
index c8b1cf0..7250b17 100644
--- a/cmds/statsd/src/metrics/ValueMetricProducer.cpp
+++ b/cmds/statsd/src/metrics/ValueMetricProducer.cpp
@@ -64,8 +64,10 @@
 const int FIELD_ID_DIMENSION_LEAF_IN_WHAT = 4;
 const int FIELD_ID_DIMENSION_LEAF_IN_CONDITION = 5;
 // for ValueBucketInfo
-const int FIELD_ID_VALUE_LONG = 7;
-const int FIELD_ID_VALUE_DOUBLE = 8;
+const int FIELD_ID_VALUE_INDEX = 1;
+const int FIELD_ID_VALUE_LONG = 2;
+const int FIELD_ID_VALUE_DOUBLE = 3;
+const int FIELD_ID_VALUES = 9;
 const int FIELD_ID_BUCKET_NUM = 4;
 const int FIELD_ID_START_BUCKET_ELAPSED_MILLIS = 5;
 const int FIELD_ID_END_BUCKET_ELAPSED_MILLIS = 6;
@@ -78,7 +80,6 @@
                                          const sp<StatsPullerManager>& pullerManager)
     : MetricProducer(metric.id(), key, timeBaseNs, conditionIndex, wizard),
       mPullerManager(pullerManager),
-      mValueField(metric.value_field()),
       mPullTagId(pullTagId),
       mIsPulled(pullTagId != -1),
       mMinBucketSizeNs(metric.min_bucket_size_nanos()),
@@ -103,6 +104,9 @@
     }
 
     mBucketSizeNs = bucketSizeMills * 1000000;
+
+    translateFieldMatcher(metric.value_field(), &mFieldMatchers);
+
     if (metric.has_dimensions_in_what()) {
         translateFieldMatcher(metric.dimensions_in_what(), &mDimensionsInWhat);
         mContainANYPositionInDimensionsInWhat = HasPositionANY(metric.dimensions_in_what());
@@ -122,9 +126,6 @@
         }
     }
 
-    if (mValueField.child_size() > 0) {
-        mField = mValueField.child(0).field();
-    }
     mConditionSliced = (metric.links().size() > 0) || (mDimensionsInCondition.size() > 0);
     mSliceByPositionALL = HasPositionALL(metric.dimensions_in_what()) ||
                           HasPositionALL(metric.dimensions_in_condition());
@@ -259,18 +260,27 @@
                 protoOutput->write(FIELD_TYPE_INT64 | FIELD_ID_BUCKET_NUM,
                                    (long long)(getBucketNumFromEndTimeNs(bucket.mBucketEndNs)));
             }
-            if (bucket.value.getType() == LONG) {
-                protoOutput->write(FIELD_TYPE_INT64 | FIELD_ID_VALUE_LONG,
-                                   (long long)bucket.value.long_value);
-                VLOG("\t bucket [%lld - %lld] count: %lld", (long long)bucket.mBucketStartNs,
-                     (long long)bucket.mBucketEndNs, (long long)bucket.value.long_value);
-            } else if (bucket.value.getType() == DOUBLE) {
-                protoOutput->write(FIELD_TYPE_DOUBLE | FIELD_ID_VALUE_DOUBLE,
-                                   bucket.value.double_value);
-                VLOG("\t bucket [%lld - %lld] count: %.2f", (long long)bucket.mBucketStartNs,
-                     (long long)bucket.mBucketEndNs, bucket.value.double_value);
-            } else {
-                VLOG("Wrong value type for ValueMetric output: %d", bucket.value.getType());
+            for (int i = 0; i < (int)bucket.valueIndex.size(); i ++) {
+                int index = bucket.valueIndex[i];
+                const Value& value = bucket.values[i];
+                uint64_t valueToken = protoOutput->start(
+                        FIELD_TYPE_MESSAGE | FIELD_COUNT_REPEATED | FIELD_ID_VALUES);
+                protoOutput->write(FIELD_TYPE_INT32 | FIELD_ID_VALUE_INDEX,
+                                   index);
+                if (value.getType() == LONG) {
+                    protoOutput->write(FIELD_TYPE_INT64 | FIELD_ID_VALUE_LONG,
+                                       (long long)value.long_value);
+                    VLOG("\t bucket [%lld - %lld] value %d: %lld", (long long)bucket.mBucketStartNs,
+                         (long long)bucket.mBucketEndNs, index, (long long)value.long_value);
+                } else if (value.getType() == DOUBLE) {
+                    protoOutput->write(FIELD_TYPE_DOUBLE | FIELD_ID_VALUE_DOUBLE,
+                                       value.double_value);
+                    VLOG("\t bucket [%lld - %lld] value %d: %.2f", (long long)bucket.mBucketStartNs,
+                         (long long)bucket.mBucketEndNs, index, value.double_value);
+                } else {
+                    VLOG("Wrong value type for ValueMetric output: %d", value.getType());
+                }
+                protoOutput->end(valueToken);
             }
             protoOutput->end(bucketInfoToken);
         }
@@ -303,7 +313,9 @@
     // when condition change from true to false, clear diff base
     if (mUseDiff && mCondition && !condition) {
         for (auto& slice : mCurrentSlicedBucket) {
-            slice.second.hasBase = false;
+            for (auto& interval : slice.second) {
+                interval.hasBase = false;
+            }
         }
     }
 
@@ -363,10 +375,12 @@
             (unsigned long)mCurrentSlicedBucket.size());
     if (verbose) {
         for (const auto& it : mCurrentSlicedBucket) {
+          for (const auto& interval : it.second) {
             fprintf(out, "\t(what)%s\t(condition)%s  (value)%s\n",
                     it.first.getDimensionKeyInWhat().toString().c_str(),
                     it.first.getDimensionKeyInCondition().toString().c_str(),
-                    it.second.value.toString().c_str());
+                    interval.value.toString().c_str());
+          }
         }
     }
 }
@@ -391,25 +405,29 @@
     return false;
 }
 
-const Value getDoubleOrLong(const Value& value) {
-    Value v;
-    switch (value.type) {
-        case INT:
-            v.setLong(value.int_value);
-            break;
-        case LONG:
-            v.setLong(value.long_value);
-            break;
-        case FLOAT:
-            v.setDouble(value.float_value);
-            break;
-        case DOUBLE:
-            v.setDouble(value.double_value);
-            break;
-        default:
-            break;
+bool getDoubleOrLong(const LogEvent& event, const Matcher& matcher, Value& ret) {
+    for (const FieldValue& value : event.getValues()) {
+        if (value.mField.matches(matcher)) {
+            switch (value.mValue.type) {
+                case INT:
+                    ret.setLong(value.mValue.int_value);
+                    break;
+                case LONG:
+                    ret.setLong(value.mValue.long_value);
+                    break;
+                case FLOAT:
+                    ret.setDouble(value.mValue.float_value);
+                    break;
+                case DOUBLE:
+                    ret.setDouble(value.mValue.double_value);
+                    break;
+                default:
+                    break;
+            }
+            return true;
+        }
     }
-    return v;
+    return false;
 }
 
 void ValueMetricProducer::onMatchedLogEventInternalLocked(const size_t matcherIndex,
@@ -436,82 +454,90 @@
     if (hitGuardRailLocked(eventKey)) {
         return;
     }
-    Interval& interval = mCurrentSlicedBucket[eventKey];
-
-    if (mField > event.size()) {
-        VLOG("Failed to extract value field %d from atom %s. %d", mField, event.ToString().c_str(),
-             (int)event.size());
-        return;
+    vector<Interval>& multiIntervals = mCurrentSlicedBucket[eventKey];
+    if (multiIntervals.size() < mFieldMatchers.size()) {
+        VLOG("Resizing number of intervals to %d", (int)mFieldMatchers.size());
+        multiIntervals.resize(mFieldMatchers.size());
     }
-    Value value = getDoubleOrLong(event.getValues()[mField - 1].mValue);
 
-    if (mUseDiff) {
-        // no base. just update base and return.
-        if (!interval.hasBase) {
-            interval.base = value;
-            interval.hasBase = true;
+    for (int i = 0; i < (int)mFieldMatchers.size(); i++) {
+        const Matcher& matcher = mFieldMatchers[i];
+        Interval& interval = multiIntervals[i];
+        interval.valueIndex = i;
+        Value value;
+        if (!getDoubleOrLong(event, matcher, value)) {
+            VLOG("Failed to get value %d from event %s", i, event.ToString().c_str());
             return;
         }
-        Value diff;
-        switch (mValueDirection) {
-            case ValueMetric::INCREASING:
-                if (value >= interval.base) {
-                    diff = value - interval.base;
-                } else if (mUseAbsoluteValueOnReset) {
-                    diff = value;
-                } else {
-                    VLOG("Unexpected decreasing value");
-                    StatsdStats::getInstance().notePullDataError(mPullTagId);
-                    interval.base = value;
-                    return;
-                }
-                break;
-            case ValueMetric::DECREASING:
-                if (interval.base >= value) {
-                    diff = interval.base - value;
-                } else if (mUseAbsoluteValueOnReset) {
-                    diff = value;
-                } else {
-                    VLOG("Unexpected increasing value");
-                    StatsdStats::getInstance().notePullDataError(mPullTagId);
-                    interval.base = value;
-                    return;
-                }
-                break;
-            case ValueMetric::ANY:
-                diff = value - interval.base;
-                break;
-            default:
-                break;
-        }
-        interval.base = value;
-        value = diff;
-    }
 
-    if (interval.hasValue) {
-        switch (mAggregationType) {
-            case ValueMetric::SUM:
-                // for AVG, we add up and take average when flushing the bucket
-            case ValueMetric::AVG:
-                interval.value += value;
-                break;
-            case ValueMetric::MIN:
-                interval.value = std::min(value, interval.value);
-                break;
-            case ValueMetric::MAX:
-                interval.value = std::max(value, interval.value);
-                break;
-            default:
-                break;
+        if (mUseDiff) {
+            // no base. just update base and return.
+            if (!interval.hasBase) {
+                interval.base = value;
+                interval.hasBase = true;
+                return;
+            }
+            Value diff;
+            switch (mValueDirection) {
+                case ValueMetric::INCREASING:
+                    if (value >= interval.base) {
+                        diff = value - interval.base;
+                    } else if (mUseAbsoluteValueOnReset) {
+                        diff = value;
+                    } else {
+                        VLOG("Unexpected decreasing value");
+                        StatsdStats::getInstance().notePullDataError(mPullTagId);
+                        interval.base = value;
+                        return;
+                    }
+                    break;
+                case ValueMetric::DECREASING:
+                    if (interval.base >= value) {
+                        diff = interval.base - value;
+                    } else if (mUseAbsoluteValueOnReset) {
+                        diff = value;
+                    } else {
+                        VLOG("Unexpected increasing value");
+                        StatsdStats::getInstance().notePullDataError(mPullTagId);
+                        interval.base = value;
+                        return;
+                    }
+                    break;
+                case ValueMetric::ANY:
+                    diff = value - interval.base;
+                    break;
+                default:
+                    break;
+            }
+            interval.base = value;
+            value = diff;
         }
-    } else {
-        interval.value = value;
-        interval.hasValue = true;
+
+        if (interval.hasValue) {
+            switch (mAggregationType) {
+                case ValueMetric::SUM:
+                    // for AVG, we add up and take average when flushing the bucket
+                case ValueMetric::AVG:
+                    interval.value += value;
+                    break;
+                case ValueMetric::MIN:
+                    interval.value = std::min(value, interval.value);
+                    break;
+                case ValueMetric::MAX:
+                    interval.value = std::max(value, interval.value);
+                    break;
+                default:
+                    break;
+            }
+        } else {
+            interval.value = value;
+            interval.hasValue = true;
+        }
+        interval.sampleSize += 1;
     }
-    interval.sampleSize += 1;
 
     // TODO: propgate proper values down stream when anomaly support doubles
-    long wholeBucketVal = interval.value.long_value;
+    long wholeBucketVal = multiIntervals[0].value.long_value;
     auto prev = mCurrentFullBucket.find(eventKey);
     if (prev != mCurrentFullBucket.end()) {
         wholeBucketVal += prev->second;
@@ -540,7 +566,9 @@
         VLOG("Skipping forward %lld buckets", (long long)numBucketsForward);
         // take base again in future good bucket.
         for (auto& slice : mCurrentSlicedBucket) {
-            slice.second.hasBase = false;
+            for (auto& interval : slice.second) {
+                interval.hasBase = false;
+            }
         }
     }
     VLOG("metric %lld: new bucket start time: %lld", (long long)mMetricId,
@@ -552,37 +580,38 @@
          (int)mCurrentSlicedBucket.size());
     int64_t fullBucketEndTimeNs = getCurrentBucketEndTimeNs();
 
-    ValueBucket info;
-    info.mBucketStartNs = mCurrentBucketStartTimeNs;
-    if (eventTimeNs < fullBucketEndTimeNs) {
-        info.mBucketEndNs = eventTimeNs;
-    } else {
-        info.mBucketEndNs = fullBucketEndTimeNs;
-    }
+    int64_t bucketEndTime = eventTimeNs < fullBucketEndTimeNs ? eventTimeNs : fullBucketEndTimeNs;
 
-    if (info.mBucketEndNs - mCurrentBucketStartTimeNs >= mMinBucketSizeNs) {
+    if (bucketEndTime - mCurrentBucketStartTimeNs >= mMinBucketSizeNs) {
         // The current bucket is large enough to keep.
         for (const auto& slice : mCurrentSlicedBucket) {
-            if (slice.second.hasValue) {
-                // skip the output if the diff is zero
-                if (mSkipZeroDiffOutput && mUseDiff && slice.second.value.isZero()) {
-                    continue;
+            ValueBucket bucket;
+            bucket.mBucketStartNs = mCurrentBucketStartTimeNs;
+            bucket.mBucketEndNs = bucketEndTime;
+            for (const auto& interval : slice.second) {
+                if (interval.hasValue) {
+                    // skip the output if the diff is zero
+                    if (mSkipZeroDiffOutput && mUseDiff && interval.value.isZero()) {
+                        continue;
+                    }
+                    bucket.valueIndex.push_back(interval.valueIndex);
+                    if (mAggregationType != ValueMetric::AVG) {
+                        bucket.values.push_back(interval.value);
+                    } else {
+                        double sum = interval.value.type == LONG ? (double)interval.value.long_value
+                                                                 : interval.value.double_value;
+                        bucket.values.push_back(Value((double)sum / interval.sampleSize));
+                    }
                 }
-                if (mAggregationType != ValueMetric::AVG) {
-                    info.value = slice.second.value;
-                } else {
-                    double sum = slice.second.value.type == LONG
-                                         ? (double)slice.second.value.long_value
-                                         : slice.second.value.double_value;
-                    info.value.setDouble(sum / slice.second.sampleSize);
-                }
-                // it will auto create new vector of ValuebucketInfo if the key is not found.
+            }
+            // it will auto create new vector of ValuebucketInfo if the key is not found.
+            if (bucket.valueIndex.size() > 0) {
                 auto& bucketList = mPastBuckets[slice.first];
-                bucketList.push_back(info);
+                bucketList.push_back(bucket);
             }
         }
     } else {
-        mSkippedBuckets.emplace_back(info.mBucketStartNs, info.mBucketEndNs);
+        mSkippedBuckets.emplace_back(mCurrentBucketStartTimeNs, bucketEndTime);
     }
 
     if (eventTimeNs > fullBucketEndTimeNs) {  // If full bucket, send to anomaly tracker.
@@ -590,7 +619,7 @@
         if (mCurrentFullBucket.size() > 0) {
             for (const auto& slice : mCurrentSlicedBucket) {
                 // TODO: fix this when anomaly can accept double values
-                mCurrentFullBucket[slice.first] += slice.second.value.long_value;
+                mCurrentFullBucket[slice.first] += slice.second[0].value.long_value;
             }
             for (const auto& slice : mCurrentFullBucket) {
                 for (auto& tracker : mAnomalyTrackers) {
@@ -606,7 +635,7 @@
                 for (auto& tracker : mAnomalyTrackers) {
                     if (tracker != nullptr) {
                         // TODO: fix this when anomaly can accept double values
-                        tracker->addPastBucket(slice.first, slice.second.value.long_value,
+                        tracker->addPastBucket(slice.first, slice.second[0].value.long_value,
                                                mCurrentBucketNum);
                     }
                 }
@@ -616,14 +645,16 @@
         // Accumulate partial bucket.
         for (const auto& slice : mCurrentSlicedBucket) {
             // TODO: fix this when anomaly can accept double values
-            mCurrentFullBucket[slice.first] += slice.second.value.long_value;
+            mCurrentFullBucket[slice.first] += slice.second[0].value.long_value;
         }
     }
 
     // Reset counters
     for (auto& slice : mCurrentSlicedBucket) {
-        slice.second.hasValue = false;
-        slice.second.sampleSize = 0;
+        for (auto& interval : slice.second) {
+            interval.hasValue = false;
+            interval.sampleSize = 0;
+        }
     }
 }
 
diff --git a/cmds/statsd/src/metrics/ValueMetricProducer.h b/cmds/statsd/src/metrics/ValueMetricProducer.h
index 3416afe..c682a66 100644
--- a/cmds/statsd/src/metrics/ValueMetricProducer.h
+++ b/cmds/statsd/src/metrics/ValueMetricProducer.h
@@ -34,7 +34,8 @@
 struct ValueBucket {
     int64_t mBucketStartNs;
     int64_t mBucketEndNs;
-    Value value;
+    std::vector<int> valueIndex;
+    std::vector<Value> values;
 };
 
 class ValueMetricProducer : public virtual MetricProducer, public virtual PullDataReceiver {
@@ -97,7 +98,8 @@
 
     sp<StatsPullerManager> mPullerManager;
 
-    const FieldMatcher mValueField;
+    // Value fields for matching.
+    std::vector<Matcher> mFieldMatchers;
 
     // tagId for pulled data. -1 if this is not pulled
     const int mPullTagId;
@@ -105,10 +107,10 @@
     // if this is pulled metric
     const bool mIsPulled;
 
-    int mField;
-
-    // internal state of a bucket.
+    // internal state of an ongoing aggregation bucket.
     typedef struct {
+        // Index in multi value aggregation.
+        int valueIndex;
         // Holds current base value of the dimension. Take diff and update if necessary.
         Value base;
         // Whether there is a base to diff to.
@@ -122,7 +124,7 @@
         bool hasValue;
     } Interval;
 
-    std::unordered_map<MetricDimensionKey, Interval> mCurrentSlicedBucket;
+    std::unordered_map<MetricDimensionKey, std::vector<Interval>> mCurrentSlicedBucket;
 
     std::unordered_map<MetricDimensionKey, int64_t> mCurrentFullBucket;
 
diff --git a/cmds/statsd/src/stats_log.proto b/cmds/statsd/src/stats_log.proto
index 3b42b31..5d0f3d1 100644
--- a/cmds/statsd/src/stats_log.proto
+++ b/cmds/statsd/src/stats_log.proto
@@ -108,12 +108,22 @@
 
   optional int64 value = 3 [deprecated = true];
 
-  oneof values {
-      int64 value_long = 7;
+  oneof single_value {
+      int64 value_long = 7 [deprecated = true];
 
-      double value_double = 8;
+      double value_double = 8 [deprecated = true];
   }
 
+  message Value {
+      optional int32 index = 1;
+      oneof value {
+          int64 value_long = 2;
+          double value_double = 3;
+      }
+  }
+
+  repeated Value values = 9;
+
   optional int64 bucket_num = 4;
 
   optional int64 start_bucket_elapsed_millis = 5;
diff --git a/cmds/statsd/tests/e2e/ValueMetric_pull_e2e_test.cpp b/cmds/statsd/tests/e2e/ValueMetric_pull_e2e_test.cpp
index 095b401..abf1ab1 100644
--- a/cmds/statsd/tests/e2e/ValueMetric_pull_e2e_test.cpp
+++ b/cmds/statsd/tests/e2e/ValueMetric_pull_e2e_test.cpp
@@ -142,23 +142,23 @@
 
     EXPECT_EQ(baseTimeNs + 2 * bucketSizeNs, data.bucket_info(0).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 3 * bucketSizeNs, data.bucket_info(0).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(0).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(0).values_size());
 
     EXPECT_EQ(baseTimeNs + 3 * bucketSizeNs, data.bucket_info(1).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 4 * bucketSizeNs, data.bucket_info(1).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(1).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(1).values_size());
 
     EXPECT_EQ(baseTimeNs + 4 * bucketSizeNs, data.bucket_info(2).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 5 * bucketSizeNs, data.bucket_info(2).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(2).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(2).values_size());
 
     EXPECT_EQ(baseTimeNs + 6 * bucketSizeNs, data.bucket_info(3).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 7 * bucketSizeNs, data.bucket_info(3).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(3).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(3).values_size());
 
     EXPECT_EQ(baseTimeNs + 7 * bucketSizeNs, data.bucket_info(4).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 8 * bucketSizeNs, data.bucket_info(4).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(4).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(4).values_size());
 }
 
 TEST(ValueMetricE2eTest, TestPulledEvents_LateAlarm) {
@@ -249,15 +249,15 @@
 
     EXPECT_EQ(baseTimeNs + 2 * bucketSizeNs, data.bucket_info(0).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 3 * bucketSizeNs, data.bucket_info(0).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(0).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(0).values_size());
 
     EXPECT_EQ(baseTimeNs + 8 * bucketSizeNs, data.bucket_info(1).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 9 * bucketSizeNs, data.bucket_info(1).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(1).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(1).values_size());
 
     EXPECT_EQ(baseTimeNs + 9 * bucketSizeNs, data.bucket_info(2).start_bucket_elapsed_nanos());
     EXPECT_EQ(baseTimeNs + 10 * bucketSizeNs, data.bucket_info(2).end_bucket_elapsed_nanos());
-    EXPECT_TRUE(data.bucket_info(2).has_value_long());
+    EXPECT_EQ(1, data.bucket_info(2).values_size());
 }
 
 #else
diff --git a/cmds/statsd/tests/metrics/ValueMetricProducer_test.cpp b/cmds/statsd/tests/metrics/ValueMetricProducer_test.cpp
index ffa07081..ee225df 100644
--- a/cmds/statsd/tests/metrics/ValueMetricProducer_test.cpp
+++ b/cmds/statsd/tests/metrics/ValueMetricProducer_test.cpp
@@ -140,7 +140,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
 
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(11, curInterval.base.long_value);
@@ -157,7 +157,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
 
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(23, curInterval.base.long_value);
@@ -165,7 +165,7 @@
     EXPECT_EQ(12, curInterval.value.long_value);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(8, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(8, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 
     allData.clear();
     event = make_shared<LogEvent>(tagId, bucket4StartTimeNs + 1);
@@ -175,7 +175,7 @@
     allData.push_back(event);
     valueProducer.onDataPulled(allData);
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
 
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(36, curInterval.base.long_value);
@@ -183,7 +183,7 @@
     EXPECT_EQ(13, curInterval.value.long_value);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(2UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(12, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(12, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 /*
@@ -217,7 +217,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
 
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(11, curInterval.base.long_value);
@@ -233,7 +233,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(10, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
@@ -248,14 +248,14 @@
     allData.push_back(event);
     valueProducer.onDataPulled(allData);
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(36, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
     EXPECT_EQ(26, curInterval.value.long_value);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(10, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(10, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 /*
@@ -288,7 +288,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
 
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(11, curInterval.base.long_value);
@@ -304,7 +304,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(10, curInterval.base.long_value);
     EXPECT_EQ(false, curInterval.hasValue);
@@ -318,7 +318,7 @@
     allData.push_back(event);
     valueProducer.onDataPulled(allData);
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(36, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
@@ -370,7 +370,7 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     // startUpdated:false sum:0 start:100
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(100, curInterval.base.long_value);
@@ -388,7 +388,7 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(110, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
@@ -399,7 +399,7 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasValue);
     EXPECT_EQ(10, curInterval.value.long_value);
     EXPECT_EQ(false, curInterval.hasBase);
@@ -486,7 +486,7 @@
     valueProducer.notifyAppUpgrade(bucket2StartTimeNs + 150, "ANY.APP", 1, 1);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY].size());
     EXPECT_EQ(bucket2StartTimeNs + 150, valueProducer.mCurrentBucketStartTimeNs);
-    EXPECT_EQ(20L, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY][0].value.long_value);
+    EXPECT_EQ(20L, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY][0].values[0].long_value);
 
     allData.clear();
     event = make_shared<LogEvent>(tagId, bucket3StartTimeNs + 1);
@@ -497,7 +497,7 @@
     valueProducer.onDataPulled(allData);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY].size());
     EXPECT_EQ(bucket2StartTimeNs + 150, valueProducer.mCurrentBucketStartTimeNs);
-    EXPECT_EQ(20L, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY][0].value.long_value);
+    EXPECT_EQ(20L, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY][0].values[0].long_value);
 }
 
 TEST(ValueMetricProducerTest, TestPulledValueWithUpgradeWhileConditionFalse) {
@@ -545,7 +545,7 @@
     EXPECT_EQ(bucket2StartTimeNs-50, valueProducer.mCurrentBucketStartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY].size());
     EXPECT_EQ(bucketStartTimeNs, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY][0].mBucketStartNs);
-    EXPECT_EQ(20L, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY][0].value.long_value);
+    EXPECT_EQ(20L, valueProducer.mPastBuckets[DEFAULT_METRIC_DIMENSION_KEY][0].values[0].long_value);
     EXPECT_FALSE(valueProducer.mCondition);
 }
 
@@ -573,7 +573,7 @@
     valueProducer.onMatchedLogEvent(1 /*log matcher index*/, *event1);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(10, curInterval.value.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
 
@@ -581,13 +581,13 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(30, curInterval.value.long_value);
 
     valueProducer.flushIfNeededLocked(bucket3StartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(30, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(30, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 TEST(ValueMetricProducerTest, TestPushedEventsWithCondition) {
@@ -620,8 +620,8 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(20, curInterval.value.long_value);
 
     shared_ptr<LogEvent> event3 = make_shared<LogEvent>(tagId, bucketStartTimeNs + 30);
@@ -632,7 +632,7 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(50, curInterval.value.long_value);
 
     valueProducer.onConditionChangedLocked(false, bucketStartTimeNs + 35);
@@ -644,13 +644,13 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(50, curInterval.value.long_value);
 
     valueProducer.flushIfNeededLocked(bucket3StartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(50, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(50, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 TEST(ValueMetricProducerTest, TestAnomalyDetection) {
@@ -765,7 +765,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
 
     // startUpdated:true sum:0 start:11
     EXPECT_EQ(true, curInterval.hasBase);
@@ -783,7 +783,7 @@
     valueProducer.onDataPulled(allData);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     // tartUpdated:false sum:12
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(23, curInterval.base.long_value);
@@ -803,14 +803,14 @@
     allData.push_back(event);
     valueProducer.onDataPulled(allData);
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     // startUpdated:false sum:12
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(36, curInterval.base.long_value);
     EXPECT_EQ(false, curInterval.hasValue);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(12, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(12, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 /*
@@ -860,7 +860,7 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(100, curInterval.base.long_value);
     EXPECT_EQ(false, curInterval.hasValue);
@@ -868,7 +868,7 @@
 
     // pull on bucket boundary come late, condition change happens before it
     valueProducer.onConditionChanged(false, bucket2StartTimeNs + 1);
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(false, curInterval.hasBase);
     EXPECT_EQ(true, curInterval.hasValue);
     EXPECT_EQ(20, curInterval.value.long_value);
@@ -885,7 +885,7 @@
     allData.push_back(event);
     valueProducer.onDataPulled(allData);
 
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(false, curInterval.hasBase);
     EXPECT_EQ(true, curInterval.hasValue);
     EXPECT_EQ(20, curInterval.value.long_value);
@@ -950,7 +950,7 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     // startUpdated:false sum:0 start:100
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(100, curInterval.base.long_value);
@@ -959,7 +959,7 @@
 
     // pull on bucket boundary come late, condition change happens before it
     valueProducer.onConditionChanged(false, bucket2StartTimeNs + 1);
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(false, curInterval.hasBase);
     EXPECT_EQ(true, curInterval.hasValue);
     EXPECT_EQ(20, curInterval.value.long_value);
@@ -967,7 +967,7 @@
 
     // condition changed to true again, before the pull alarm is delivered
     valueProducer.onConditionChanged(true, bucket2StartTimeNs + 25);
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(130, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
@@ -984,7 +984,7 @@
     allData.push_back(event);
     valueProducer.onDataPulled(allData);
 
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(130, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
@@ -1017,7 +1017,7 @@
     valueProducer.onMatchedLogEvent(1 /*log matcher index*/, *event1);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(10, curInterval.value.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
 
@@ -1025,13 +1025,13 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(10, curInterval.value.long_value);
 
     valueProducer.flushIfNeededLocked(bucket3StartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(10, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(10, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 TEST(ValueMetricProducerTest, TestPushedAggregateMax) {
@@ -1059,7 +1059,7 @@
     valueProducer.onMatchedLogEvent(1 /*log matcher index*/, *event1);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(10, curInterval.value.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
 
@@ -1067,13 +1067,13 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(20, curInterval.value.long_value);
 
     valueProducer.flushIfNeededLocked(bucket3StartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(20, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(20, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 TEST(ValueMetricProducerTest, TestPushedAggregateAvg) {
@@ -1102,7 +1102,7 @@
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
     ValueMetricProducer::Interval curInterval;
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(10, curInterval.value.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
     EXPECT_EQ(1, curInterval.sampleSize);
@@ -1111,14 +1111,14 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(25, curInterval.value.long_value);
     EXPECT_EQ(2, curInterval.sampleSize);
 
     valueProducer.flushIfNeededLocked(bucket3StartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_TRUE(std::abs(valueProducer.mPastBuckets.begin()->second.back().value.double_value - 12.5) < epsilon);
+    EXPECT_TRUE(std::abs(valueProducer.mPastBuckets.begin()->second.back().values[0].double_value - 12.5) < epsilon);
 }
 
 TEST(ValueMetricProducerTest, TestPushedAggregateSum) {
@@ -1146,7 +1146,7 @@
     valueProducer.onMatchedLogEvent(1 /*log matcher index*/, *event1);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(10, curInterval.value.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
 
@@ -1154,13 +1154,13 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(25, curInterval.value.long_value);
 
     valueProducer.flushIfNeededLocked(bucket3StartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(25, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(25, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 TEST(ValueMetricProducerTest, TestSkipZeroDiffOutput) {
@@ -1189,7 +1189,7 @@
     valueProducer.onMatchedLogEvent(1 /*log matcher index*/, *event1);
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    ValueMetricProducer::Interval curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(10, curInterval.base.long_value);
     EXPECT_EQ(false, curInterval.hasValue);
@@ -1198,7 +1198,7 @@
 
     // has one slice
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasValue);
     EXPECT_EQ(5, curInterval.value.long_value);
 
@@ -1209,7 +1209,7 @@
     event3->init();
     valueProducer.onMatchedLogEvent(1 /*log matcher index*/, *event3);
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(15, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
@@ -1220,7 +1220,7 @@
     event4->init();
     valueProducer.onMatchedLogEvent(1 /*log matcher index*/, *event4);
     EXPECT_EQ(1UL, valueProducer.mCurrentSlicedBucket.size());
-    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second;
+    curInterval = valueProducer.mCurrentSlicedBucket.begin()->second[0];
     EXPECT_EQ(true, curInterval.hasBase);
     EXPECT_EQ(15, curInterval.base.long_value);
     EXPECT_EQ(true, curInterval.hasValue);
@@ -1228,7 +1228,7 @@
     valueProducer.flushIfNeededLocked(bucket3StartTimeNs);
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.size());
     EXPECT_EQ(1UL, valueProducer.mPastBuckets.begin()->second.size());
-    EXPECT_EQ(5, valueProducer.mPastBuckets.begin()->second.back().value.long_value);
+    EXPECT_EQ(5, valueProducer.mPastBuckets.begin()->second.back().values[0].long_value);
 }
 
 }  // namespace statsd