Merge "If a retry config is used that is not rescheduling allow it"
diff --git a/atest/test_runners/atest_tf_test_runner.py b/atest/test_runners/atest_tf_test_runner.py
index 3205948..1ccc795 100644
--- a/atest/test_runners/atest_tf_test_runner.py
+++ b/atest/test_runners/atest_tf_test_runner.py
@@ -17,6 +17,7 @@
 """
 
 from __future__ import print_function
+from collections import deque
 import errno
 import json
 import logging
@@ -42,7 +43,9 @@
 # EVENT_RE has groups for the name and the data. "." does not match \n.
 EVENT_RE = re.compile(r'^(?P<event_name>[A-Z_]+) (?P<json_data>{.*})(?:\n|$)')
 EVENT_NAMES = {'module_started': 'TEST_MODULE_STARTED',
+               'module_ended': 'TEST_MODULE_ENDED',
                'run_started': 'TEST_RUN_STARTED',
+               'run_ended': 'TEST_RUN_ENDED',
                # Next three are test-level events
                'test_started': 'TEST_STARTED',
                'test_failed': 'TEST_FAILED',
@@ -51,6 +54,11 @@
                # Invocation failure is broader than run failure.
                'run_failed': 'TEST_RUN_FAILED',
                'invocation_failed': 'INVOCATION_FAILED'}
+EVENT_PAIRS = {EVENT_NAMES['module_started']: EVENT_NAMES['module_ended'],
+               EVENT_NAMES['run_started']: EVENT_NAMES['run_ended'],
+               EVENT_NAMES['test_started']: EVENT_NAMES['test_ended']}
+START_EVENTS = list(EVENT_PAIRS.keys())
+END_EVENTS = list(EVENT_PAIRS.values())
 TEST_NAME_TEMPLATE = '%s#%s'
 EXEC_DEPENDENCIES = ('adb', 'aapt')
 
@@ -67,6 +75,9 @@
 TRADEFED_EXIT_MSG = ('TradeFed subprocess exited early with exit code=%s. '
                      'Use --verbose to see underlying TradeFed output.')
 
+EVENTS_NOT_BALANCED = ('Error: Saw %s Start event and %s End event. These '
+                       'should be equal!')
+
 
 class AtestTradefedTestRunner(test_runner_base.TestRunnerBase):
     """TradeFed Test Runner class."""
@@ -270,6 +281,7 @@
         connection_state = CONNECTION_STATE.copy()
         conn.settimeout(None)
         buf = ''
+        event_stack = deque()
         while True:
             logging.debug('Waiting to receive data')
             data = conn.recv(SOCKET_BUFFER)
@@ -287,7 +299,7 @@
                         event_name = match.group('event_name')
                         buf = buf[match.end():]
                         self._process_event(event_name, event_data, reporter,
-                                            connection_state)
+                                            connection_state, event_stack)
                         continue
                     break
             else:
@@ -295,7 +307,41 @@
                 conn.close()
                 break
 
-    def _process_event(self, event_name, event_data, reporter, state):
+    def _check_events_are_balanced(self, event_name, reporter, state,
+                                   event_stack):
+        """Check Start events and End events. They should be balanced.
+
+        If they are not balanced, print the error message in
+        state['last_failed'], then raise TradeFedExitError.
+
+        Args:
+            event_name: A string of the event name.
+            reporter: A ResultReporter instance.
+            state: A dict of the state of the test run.
+            event_stack: A collections.deque(stack) of the events for pairing
+                         START and END events.
+        Raises:
+            TradeFedExitError if we doesn't have a balance of START/END events.
+        """
+        start_event = event_stack.pop() if event_stack else None
+        if not start_event or EVENT_PAIRS[start_event] != event_name:
+            # Here bubble up the failed trace in the situation having
+            # TEST_FAILED but never receiving TEST_ENDED.
+            if state['last_failed'] and (start_event ==
+                                         EVENT_NAMES['test_started']):
+                reporter.process_test_result(test_runner_base.TestResult(
+                    runner_name=self.NAME,
+                    group_name=state['current_group'],
+                    test_name=state['last_failed']['name'],
+                    status=test_runner_base.FAILED_STATUS,
+                    details=state['last_failed']['trace'],
+                    runner_total=None,
+                    group_total=state['current_group_total']))
+            raise TradeFedExitError(EVENTS_NOT_BALANCED % (start_event,
+                                                           event_name))
+
+    def _process_event(self, event_name, event_data, reporter, state,
+                       event_stack):
         """Process the events of the test run and call reporter with results.
 
         Args:
@@ -303,8 +349,15 @@
             event_data: A dict of event data.
             reporter: A ResultReporter instance.
             state: A dict of the state of the test run.
+            event_stack: A collections.deque(stack) of the events for pairing
+                         START and END events.
         """
         logging.debug('Processing %s %s', event_name, event_data)
+        if event_name in START_EVENTS:
+            event_stack.append(event_name)
+        elif event_name in END_EVENTS:
+            self._check_events_are_balanced(event_name, reporter, state,
+                                            event_stack)
         if event_name == EVENT_NAMES['module_started']:
             state['current_group'] = event_data['moduleName']
             state['last_failed'] = None
@@ -349,6 +402,7 @@
             if state['last_failed'] and name == state['last_failed']['name']:
                 status = test_runner_base.FAILED_STATUS
                 trace = state['last_failed']['trace']
+                state['last_failed'] = None
             else:
                 status = test_runner_base.PASSED_STATUS
                 trace = None
diff --git a/atest/test_runners/atest_tf_test_runner_unittest.py b/atest/test_runners/atest_tf_test_runner_unittest.py
index 4157cd1..964731e 100755
--- a/atest/test_runners/atest_tf_test_runner_unittest.py
+++ b/atest/test_runners/atest_tf_test_runner_unittest.py
@@ -130,6 +130,17 @@
     ('INVOCATION_FAILED', {'cause': 'someInvocationFailureReason'})
 ]
 
+EVENTS_NOT_BALANCED_BEFORE_RAISE = [
+    ('TEST_MODULE_STARTED', {
+        'moduleContextFileName':'serial-util1146216{974}2772610436.ser',
+        'moduleName':'someTestModule'}),
+    ('TEST_RUN_STARTED', {'testCount': 2}),
+    ('TEST_STARTED', {'className':'someClassName', 'testName':'someTestName'}),
+    ('TEST_ENDED', {'className':'someClassName', 'testName':'someTestName'}),
+    ('TEST_STARTED', {'className':'someClassName', 'testName':'someTestName'}),
+    ('TEST_FAILED', {'className':'someClassName2', 'testName':'someTestName2',
+                     'trace': 'someTrace'}),
+]
 
 class AtestTradefedTestRunnerUnittests(unittest.TestCase):
     """Unit tests for atest_tf_test_runner.py"""
@@ -212,7 +223,7 @@
         socket_data.append('')
         mock_socket.recv.side_effect = socket_data
         self.tr._process_connection(mock_socket, 'fake reporter')
-        calls = [mock.call(name, data, 'fake reporter', mock.ANY)
+        calls = [mock.call(name, data, 'fake reporter', mock.ANY, mock.ANY)
                  for name, data in EVENTS_NORMAL]
         mock_pe.assert_has_calls(calls)
 
@@ -225,7 +236,7 @@
         socket_data = [squashed_events, '']
         mock_socket.recv.side_effect = socket_data
         self.tr._process_connection(mock_socket, 'fake reporter')
-        calls = [mock.call(name, data, 'fake reporter', mock.ANY)
+        calls = [mock.call(name, data, 'fake reporter', mock.ANY, mock.ANY)
                  for name, data in EVENTS_NORMAL]
         mock_pe.assert_has_calls(calls)
 
@@ -243,7 +254,7 @@
         socket_data.extend([socket_events[1][:-4], socket_events[1][-4:], ''])
         mock_socket.recv.side_effect = socket_data
         self.tr._process_connection(mock_socket, 'fake reporter')
-        calls = [mock.call(name, data, 'fake reporter', mock.ANY)
+        calls = [mock.call(name, data, 'fake reporter', mock.ANY, mock.ANY)
                  for name, data in module_events]
         mock_pe.assert_has_calls(calls)
 
@@ -251,8 +262,9 @@
         """Test _process_event method for normal test results."""
         mock_reporter = mock.Mock()
         state = atf_tr.CONNECTION_STATE.copy()
+        stack = []
         for name, data in EVENTS_NORMAL:
-            self.tr._process_event(name, data, mock_reporter, state)
+            self.tr._process_event(name, data, mock_reporter, state, stack)
         call1 = mock.call(test_runner_base.TestResult(
             runner_name=self.tr.NAME,
             group_name='someTestModule',
@@ -277,8 +289,9 @@
         """Test _process_event method run failure."""
         mock_reporter = mock.Mock()
         state = atf_tr.CONNECTION_STATE.copy()
+        stack = []
         for name, data in EVENTS_RUN_FAILURE:
-            self.tr._process_event(name, data, mock_reporter, state)
+            self.tr._process_event(name, data, mock_reporter, state, stack)
         call = mock.call(test_runner_base.TestResult(
             runner_name=self.tr.NAME,
             group_name='someTestModule',
@@ -294,8 +307,9 @@
         """Test _process_event method with invocation failure."""
         mock_reporter = mock.Mock()
         state = atf_tr.CONNECTION_STATE.copy()
+        stack = []
         for name, data in EVENTS_INVOCATION_FAILURE:
-            self.tr._process_event(name, data, mock_reporter, state)
+            self.tr._process_event(name, data, mock_reporter, state, stack)
         call = mock.call(test_runner_base.TestResult(
             runner_name=self.tr.NAME,
             group_name=None,
@@ -307,6 +321,38 @@
         ))
         mock_reporter.process_test_result.assert_has_calls([call])
 
+    def test_process_event_not_balanced(self):
+        """Test _process_event method with start/end event name not balanced."""
+        mock_reporter = mock.Mock()
+        state = atf_tr.CONNECTION_STATE.copy()
+        stack = []
+        for name, data in EVENTS_NOT_BALANCED_BEFORE_RAISE:
+            self.tr._process_event(name, data, mock_reporter, state, stack)
+        call = mock.call(test_runner_base.TestResult(
+            runner_name=self.tr.NAME,
+            group_name='someTestModule',
+            test_name='someClassName#someTestName',
+            status=test_runner_base.PASSED_STATUS,
+            details=None,
+            runner_total=None,
+            group_total=2
+        ))
+        mock_reporter.process_test_result.assert_has_calls([call])
+        # Event pair: TEST_STARTED -> TEST_RUN_ENDED
+        # It should raise TradeFedExitError in _check_events_are_balanced()
+        name = 'TEST_RUN_ENDED'
+        data = {}
+        self.assertRaises(atf_tr.TradeFedExitError,
+                          self.tr._check_events_are_balanced,
+                          name, mock_reporter, state, stack)
+        # Event pair: TEST_RUN_STARTED -> TEST_MODULE_ENDED
+        # It should raise TradeFedExitError in _check_events_are_balanced()
+        name = 'TEST_MODULE_ENDED'
+        data = {'foo': 'bar'}
+        self.assertRaises(atf_tr.TradeFedExitError,
+                          self.tr._check_events_are_balanced,
+                          name, mock_reporter, state, stack)
+
     @mock.patch('atest_utils.get_result_server_args')
     def test_generate_run_command(self, mock_resultargs):
         """Test _generate_run_command method."""
diff --git a/atest_tradefed.sh b/atest_tradefed.sh
index bdbf981..6972cb8 100755
--- a/atest_tradefed.sh
+++ b/atest_tradefed.sh
@@ -24,7 +24,7 @@
 # installation.
 # Include any host-side dependency jars.
 if [ ! -z "${ANDROID_HOST_OUT}" ]; then
-    deps="compatibility-host-util.jar hosttestlib.jar cts-tradefed.jar vts-tradefed.jar host-libprotobuf-java-full.jar"
+    deps="compatibility-host-util.jar hosttestlib.jar cts-tradefed.jar vts-tradefed.jar host-libprotobuf-java-full.jar cts-dalvik-host-test-runner.jar"
     for dep in $deps; do
         if [ -f "${ANDROID_HOST_OUT}"/framework/$dep ]; then
             TF_PATH=${TF_PATH}:"${ANDROID_HOST_OUT}"/framework/$dep
diff --git a/prod-tests/src/com/android/performance/tests/AppInstallTest.java b/prod-tests/src/com/android/performance/tests/AppInstallTest.java
index ac85b3b..684722c 100644
--- a/prod-tests/src/com/android/performance/tests/AppInstallTest.java
+++ b/prod-tests/src/com/android/performance/tests/AppInstallTest.java
@@ -25,6 +25,7 @@
 import com.android.tradefed.testtype.IDeviceTest;
 import com.android.tradefed.testtype.IRemoteTest;
 import com.android.tradefed.util.AaptParser;
+import com.android.tradefed.util.RunUtil;
 import com.android.tradefed.util.proto.TfMetricProtoUtil;
 import java.io.File;
 import java.util.HashMap;
@@ -60,6 +61,10 @@
     )
     private boolean mUseDexMetadata = false;
 
+    @Option(name = "test-delay-between-installs",
+            description = "Delay in ms to wait for before starting the install test.")
+    private long mTestDelayBetweenInstalls = 5000;
+
     @Option(
         name = "test-dex-metadata-variant",
         description =
@@ -106,12 +111,9 @@
 
         // Delay test start time to give the background processes to finish.
         if (mTestStartDelay > 0) {
-            try {
-                Thread.sleep(mTestStartDelay);
-            } catch (InterruptedException e) {
-                CLog.e("Failed to delay test: %s", e.toString());
-            }
+            RunUtil.getDefault().sleep(mTestStartDelay);
         }
+
         Assert.assertFalse(mTestApkPath.isEmpty());
         File apkDir = new File(mTestApkPath);
         Assert.assertTrue(apkDir.isDirectory());
@@ -126,8 +128,11 @@
                 }
                 File file = new File(apkDir, fileName);
                 // Install app and measure time.
-                String installTime = Long.toString(installAndTime(file));
-                metrics.put(fileName, installTime);
+                long installTime = installAndTime(file);
+                if (installTime > 0) {
+                    metrics.put(fileName, Long.toString(installTime));
+                }
+                RunUtil.getDefault().sleep(mTestDelayBetweenInstalls);
             }
         } finally {
             reportMetrics(listener, mTestLabel, metrics);
@@ -142,11 +147,16 @@
      */
     long installAndTime(File packageFile) throws DeviceNotAvailableException {
         AaptParser parser = AaptParser.parse(packageFile);
+        if (parser == null) {
+            CLog.e("Failed to parse %s", packageFile);
+            return -1;
+        }
         String packageName = parser.getPackageName();
 
         String remotePath = "/data/local/tmp/" + packageFile.getName();
         if (!mDevice.pushFile(packageFile, remotePath)) {
-            throw new RuntimeException("Failed to push " + packageFile.getAbsolutePath());
+            CLog.e("Failed to push %s", packageFile);
+            return -1;
         }
 
         String dmRemotePath = null;
@@ -154,7 +164,8 @@
             File dexMetadataFile = getDexMetadataFile(packageFile);
             dmRemotePath = "/data/local/tmp/" + dexMetadataFile.getName();
             if (!mDevice.pushFile(dexMetadataFile, dmRemotePath)) {
-                throw new RuntimeException("Failed to push " + dexMetadataFile.getAbsolutePath());
+                CLog.e("Failed to push %s", dexMetadataFile);
+                return -1;
             }
         }
 
diff --git a/pylintrc b/pylintrc
index ba97286..ccbc81f 100644
--- a/pylintrc
+++ b/pylintrc
@@ -20,6 +20,9 @@
 
 [DESIGN]
 
+# Maximum number of arguments for function / method
+max-args=6
+
 # Maximum number of return / yield for function / method body
 max-returns=10
 
diff --git a/src/com/android/tradefed/device/DeviceManager.java b/src/com/android/tradefed/device/DeviceManager.java
index 1e35e7f..5531521 100644
--- a/src/com/android/tradefed/device/DeviceManager.java
+++ b/src/com/android/tradefed/device/DeviceManager.java
@@ -935,6 +935,9 @@
     @Override
     public List<DeviceDescriptor> listAllDevices() {
         final List<DeviceDescriptor> serialStates = new ArrayList<DeviceDescriptor>();
+        if (mAdbBridgeNeedRestart) {
+            return serialStates;
+        }
         for (IManagedTestDevice d : mManagedDeviceList) {
             DeviceDescriptor desc = buildDeviceDescriptor(d);
             if (desc != null) {
diff --git a/src/com/android/tradefed/postprocessor/AggregatePostProcessor.java b/src/com/android/tradefed/postprocessor/AggregatePostProcessor.java
new file mode 100644
index 0000000..f6746b3
--- /dev/null
+++ b/src/com/android/tradefed/postprocessor/AggregatePostProcessor.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.postprocessor;
+
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements;
+import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
+
+import com.google.common.collect.ListMultimap;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.DoubleSummaryStatistics;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * A metric aggregator that gives the min, max, mean, variance and standard deviation for numeric
+ * metrics collected during multiple-iteration test runs, treating them as doubles. Non-numeric
+ * metrics are ignored.
+ *
+ * <p>It parses metrics from single string as currently metrics are passed this way.
+ */
+public class AggregatePostProcessor extends BasePostProcessor {
+    private static final String STATS_KEY_MIN = "min";
+    private static final String STATS_KEY_MAX = "max";
+    private static final String STATS_KEY_MEAN = "mean";
+    private static final String STATS_KEY_VAR = "var";
+    private static final String STATS_KEY_STDEV = "stdev";
+    // Separator for final upload
+    private static final String STATS_KEY_SEPARATOR = "-";
+
+    @Override
+    public Map<String, Metric.Builder> processRunMetrics(HashMap<String, Metric> rawMetrics) {
+        return new HashMap<String, Metric.Builder>();
+    }
+
+    @Override
+    public Map<String, Metric.Builder> processAllTestMetrics(
+            ListMultimap<String, Metric> allTestMetrics) {
+        // Aggregate final test metrics.
+        Map<String, Metric.Builder> aggregateMetrics = new HashMap<String, Metric.Builder>();
+        for (String key : allTestMetrics.keySet()) {
+            List<Metric> metrics = allTestMetrics.get(key);
+            List<Measurements> measures =
+                    metrics.stream().map(Metric::getMeasurements).collect(Collectors.toList());
+            // Parse metrics into a list of SingleString values, concating lists in the process
+            List<String> rawValues =
+                    measures.stream()
+                            .map(Measurements::getSingleString)
+                            .map(
+                                    m -> {
+                                        // Split results; also deals with the case of empty results
+                                        // in a certain run
+                                        List<String> splitVals = Arrays.asList(m.split(",", 0));
+                                        if (splitVals.size() == 1 && splitVals.get(0).isEmpty()) {
+                                            return Collections.<String>emptyList();
+                                        }
+                                        return splitVals;
+                                    })
+                            .flatMap(Collection::stream)
+                            .map(String::trim)
+                            .collect(Collectors.toList());
+            // Do not report empty metrics
+            if (rawValues.isEmpty()) {
+                continue;
+            }
+            boolean areAllDoubles =
+                    rawValues
+                            .stream()
+                            .allMatch(
+                                    val -> {
+                                        try {
+                                            Double.parseDouble(val);
+                                            return true;
+                                        } catch (NumberFormatException e) {
+                                            return false;
+                                        }
+                                    });
+            if (areAllDoubles) {
+                List<Double> values =
+                        rawValues.stream().map(Double::parseDouble).collect(Collectors.toList());
+                HashMap<String, Double> stats = getStats(values);
+                for (String statKey : stats.keySet()) {
+                    Metric.Builder metricBuilder = Metric.newBuilder();
+                    metricBuilder
+                            .getMeasurementsBuilder()
+                            .setSingleString(String.format("%2.2f", stats.get(statKey)));
+                    aggregateMetrics.put(
+                            String.join(STATS_KEY_SEPARATOR, key, statKey), metricBuilder);
+                }
+            } else {
+                CLog.i("Metric %s is not numeric", key);
+            }
+        }
+        // Ignore the passed-in run metrics.
+        return aggregateMetrics;
+    }
+
+    private HashMap<String, Double> getStats(Iterable<Double> values) {
+        HashMap<String, Double> stats = new HashMap<>();
+        DoubleSummaryStatistics summaryStats = new DoubleSummaryStatistics();
+        for (Double value : values) {
+            summaryStats.accept(value);
+        }
+        Double mean = summaryStats.getAverage();
+        Double count = Long.valueOf(summaryStats.getCount()).doubleValue();
+        Double variance = (double) 0;
+        for (Double value : values) {
+            variance += Math.pow(value - mean, 2) / count;
+        }
+        stats.put(STATS_KEY_MIN, summaryStats.getMin());
+        stats.put(STATS_KEY_MAX, summaryStats.getMax());
+        stats.put(STATS_KEY_MEAN, mean);
+        stats.put(STATS_KEY_VAR, variance);
+        stats.put(STATS_KEY_STDEV, Math.sqrt(variance));
+        return stats;
+    }
+}
diff --git a/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java b/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
index 8b79420..ff4fcdd 100644
--- a/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
+++ b/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
@@ -141,13 +141,13 @@
      *
      * &#064;Test
      * public void testFoo() {
-     *     metrics.put("key", "value");
-     *     metrics.put("key2", "value2");
+     *     metrics.addTestMetric("key", "value");
+     *     metrics.addTestMetric("key2", "value2");
      * }
      *
      * &#064;Test
      * public void testFoo2() {
-     *     metrics.put("key3", "value3");
+     *     metrics.addTestMetric("key3", "value3");
      * }
      * </pre>
      */
diff --git a/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java b/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
index c722798..fead3b3 100644
--- a/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
+++ b/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
@@ -118,12 +118,16 @@
     static final Pattern PATTERN_TEST_SKIPPED = Pattern.compile("skipped '.*");
     static final Pattern PATTERN_TEST_UNEXPECTED_SUCCESS = Pattern.compile("unexpected success");
 
-    static final Pattern PATTERN_ONE_LINE_RESULT = Pattern.compile(
-            "(\\S*) \\((\\S*)\\) ... (ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
+    static final Pattern PATTERN_ONE_LINE_RESULT =
+            Pattern.compile(
+                    "(\\S*) \\((\\S*)\\) \\.\\.\\. "
+                            + "(ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
     static final Pattern PATTERN_TWO_LINE_RESULT_FIRST = Pattern.compile(
             "(\\S*) \\((\\S*)\\)");
-    static final Pattern PATTERN_TWO_LINE_RESULT_SECOND = Pattern.compile(
-            "(.*) ... (ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
+    static final Pattern PATTERN_TWO_LINE_RESULT_SECOND =
+            Pattern.compile(
+                    "(.*) \\.\\.\\. "
+                            + "(ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
     static final Pattern PATTERN_FAIL_MESSAGE = Pattern.compile(
             "(FAIL|ERROR): (\\S*) \\((\\S*)\\)");
     static final Pattern PATTERN_RUN_SUMMARY = Pattern.compile(
@@ -131,10 +135,10 @@
 
     /** In case of error spanning over multiple lines. */
     static final Pattern MULTILINE_RESULT_WITH_WARNING =
-            Pattern.compile("(.*) ... (.*)", Pattern.DOTALL);
+            Pattern.compile("(.*) \\.\\.\\. (.*)", Pattern.DOTALL);
 
     static final Pattern MULTILINE_FINAL_RESULT_WITH_WARNING =
-            Pattern.compile("(.*) ... (.*)ok(.*)", Pattern.DOTALL);
+            Pattern.compile("(.*) \\.\\.\\. (.*)ok(.*)", Pattern.DOTALL);
 
     static final Pattern PATTERN_RUN_RESULT = Pattern.compile("(OK|FAILED).*");
 
diff --git a/tests/res/testtype/python_output2.txt b/tests/res/testtype/python_output2.txt
new file mode 100644
index 0000000..8cbf0ef
--- /dev/null
+++ b/tests/res/testtype/python_output2.txt
@@ -0,0 +1,220 @@
+No handlers could be found for logger "oauth2client.contrib.multistore_file"
+testAddData (public.report_test.ReportTest)
+test AddData. ... ok
+testAddError (public.report_test.ReportTest)
+test AddError. ... ok
+testSetStatus (public.report_test.ReportTest)
+test SetStatus. ... ok
+testCleanup (public.device_driver_test.DeviceDriverTest)
+Test Cleanup. ... ok
+testCreateAndroidVirtualDevices (public.device_driver_test.DeviceDriverTest)
+Test CreateAndroidVirtualDevices. ... ok
+testCreateAndroidVirtualDevicesInternalIP (public.device_driver_test.DeviceDriverTest)
+Test CreateAndroidVirtualDevices with internal IP. ... ok
+testDeleteAndroidVirtualDevices (public.device_driver_test.DeviceDriverTest)
+Test DeleteAndroidVirtualDevices. ... ok
+testLoadConfigFails (public.config_test.AcloudConfigManagerTest)
+Test loading a bad file. ... ok
+testLoadInternalConfig (public.config_test.AcloudConfigManagerTest)
+Test loading internal config. ... ok
+testLoadUserConfig (public.config_test.AcloudConfigManagerTest)
+Test loading user config. ... ok
+testLoadUserConfigLogic (public.config_test.AcloudConfigManagerTest)
+Test load user config logic. ... ok
+testOverrideWithHWProperty (public.config_test.AcloudConfigManagerTest)
+Test override hw property by flavor type. ... ok
+testCreateDevices (public.actions.create_cuttlefish_action_test.CreateCuttlefishActionTest)
+Test CreateDevices. ... ok
+testCreateDevices (public.actions.create_goldfish_action_test.CreateGoldfishActionTest)
+Tests CreateDevices. ... ok
+testCreateDevicesWithoutBuildId (public.actions.create_goldfish_action_test.CreateGoldfishActionTest)
+Test CreateDevices when emulator sys image build id is not provided. ... ok
+testCreateDevicesWithoutEmulatorBuildId (public.actions.create_goldfish_action_test.CreateGoldfishActionTest)
+Test CreateDevices when emulator build id is not provided. ... ok
+testCreateDevices (public.actions.common_operations_test.CommonOperationsTest)
+Test Create Devices. ... ok
+testCreateDevicesInternalIP (public.actions.common_operations_test.CommonOperationsTest)
+Test Create Devices and report internal IP. ... ok
+testDevicePoolCreateDevices (public.actions.common_operations_test.CommonOperationsTest)
+Test Device Pool Create Devices. ... ok
+testParseHWPropertyStr (create.create_common_test.CreateCommonTest)
+Test ParseHWPropertyArgs. ... ok
+testProcessHWPropertyWithInvalidArgs (create.create_common_test.CreateCommonTest)
+Test ParseHWPropertyArgs with invalid args. ... ok
+testGetBranchFromRepo (create.avd_spec_test.AvdSpecTest)
+Test get branch name from repo info. ... ok
+testGetBuildTarget (create.avd_spec_test.AvdSpecTest)
+Test get build target name. ... ok
+testParseHWPropertyStr (create.avd_spec_test.AvdSpecTest)
+Test _ParseHWPropertyStr. ... ok
+testProcessHWPropertyWithInvalidArgs (create.avd_spec_test.AvdSpecTest)
+Test _ProcessHWPropertyArgs with invalid args. ... ok
+testProcessImageArgs (create.avd_spec_test.AvdSpecTest)
+Test process image source. ... ok
+testProcessLocalImageArgs (create.avd_spec_test.AvdSpecTest)
+Test process args.local_image. ... ok
+testAddSshRsa (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AddSshRsa.. ... ok
+testAddSshRsaInvalidKey (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AddSshRsa.. ... ok
+testAttachAccelerator (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AttachAccelerator. ... ok
+testAttachDisk (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AttachDisk. ... ok
+testBatchExecuteOnInstances (internal.lib.gcompute_client_test.ComputeClientTest)
+Test BatchExecuteOnInstances. ... ok
+testCheckImageExistsFalse (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CheckImageExists return False. ... ok
+testCheckImageExistsTrue (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CheckImageExists return True. ... ok
+testCompareMachineSizeBadMetric (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize with bad metric. ... ok
+testCompareMachineSizeEqual (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize where two machine sizes are equal. ... ok
+testCompareMachineSizeLarge (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize where the first one is larger. ... ok
+testCompareMachineSizeSmall (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize where the first one is smaller. ... ok
+testCreateDiskWithNoSourceProject (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images with no set project. ... ok
+testCreateDiskWithProject (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images using a set project. ... ok
+testCreateDiskWithTypeSSD (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images using standard. ... ok
+testCreateDiskWithTypeStandard (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images using standard. ... ok
+testCreateImageFail (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage fails. ... ok
+testCreateImageRaiseDriverErrorWithInvalidInput (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with valid input. ... ok
+testCreateImageRaiseDriverErrorWithValidInput (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with valid input. ... ok
+testCreateImageWithSourceDisk (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with src disk. ... ok
+testCreateImageWithSourceDiskAndLabel (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with src disk and label. ... ok
+testCreateImageWithSourceURI (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with src uri. ... ok
+testCreateInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateInstance. ... ok
+testCreateInstanceWithGpu (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateInstance with a GPU parameter not set to None. ... ok
+testDeleteDisks (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteDisks. ... ok
+testDeleteImage (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteImage. ... ok
+testDeleteImages (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteImages. ... ok
+testDeleteInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteInstance. ... ok
+testDeleteInstances (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteInstances. ... ok
+testDetachDisk (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DetachDisk. ... ok
+testGetImage (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetImage. ... ok
+testGetImageOther (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetImage with other project. ... ok
+testGetInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetInstance. ... ok
+testGetInstanceNamesByIPs (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetInstanceNamesByIPs. ... ok
+testGetMachineType (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetMachineType. ... ok
+testGetOperationStatusError (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus failed. ... ok
+testGetOperationStatusGlobal (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus for global. ... ok
+testGetOperationStatusRegion (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus for region. ... ok
+testGetOperationStatusZone (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus for zone. ... ok
+testGetSerialPortOutput (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetSerialPortOutput. ... ok
+testGetSerialPortOutputFail (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetSerialPortOutputFail. ... ok
+testListImages (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ListImages. ... ok
+testListImagesFromExternalProject (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ListImages which accepts different project. ... ok
+testListInstances (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ListInstances. ... ok
+testResetInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ResetInstance. ... ok
+testRetryOnFingerPrintError (internal.lib.gcompute_client_test.ComputeClientTest)
+Test RetryOnFingerPrintError. ... ok
+testSetImageLabel (internal.lib.gcompute_client_test.ComputeClientTest)
+Test SetImageLabel. ... ok
+testWaitOnOperation (internal.lib.gcompute_client_test.ComputeClientTest)
+Test WaitOnOperation. ... ok
+testCreateInstance (internal.lib.cvd_compute_client_test.CvdComputeClientTest)
+Test CreateInstance. ... ok
+testCreatePublicKeyAreCreated (internal.lib.utils_test.UtilsTest)
+Test when the PublicKey created. ... ok
+testCreateSshKeyPairKeyAlreadyExists (internal.lib.utils_test.UtilsTest)
+Test when the key pair already exists. ... ok
+testCreateSshKeyPairKeyAreCreated (internal.lib.utils_test.UtilsTest)
+Test when the key pair created. ... ok
+testRetry (internal.lib.utils_test.UtilsTest)
+Test Retry. ... ok
+testRetryExceptionType (internal.lib.utils_test.UtilsTest)
+Test RetryExceptionType function. ... ok
+testTempDirOrininalErrorRaised (internal.lib.utils_test.UtilsTest)
+Test original error is raised even if tmp dir deletion failed. ... ok
+testTempDirWhenDeleteEncounterError (internal.lib.utils_test.UtilsTest)
+Test create a temp dir and encoutered error during deletion. ... ok
+testTempDirWhenDeleteTempDirNoLongerExist (internal.lib.utils_test.UtilsTest)
+Test create a temp dir and dir no longer exists during deletion. ... ok
+testBatchExecute (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test BatchExecute. ... ok
+testExecuteWithRetry (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test Execute is called and retries are triggered. ... ok
+testInitResourceHandle (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test InitResourceHandle. ... ok
+testListWithMultiPages (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test ListWithMultiPages. ... ok
+testCopyTo (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test CopyTo. ... ok
+testCopyToWithRetry (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test CopyTo with retry. ... ok
+testDownloadArtifact (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test DownloadArtifact. ... ok
+testDownloadArtifactOSError (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test DownloadArtifact when OSError is raised. ... ok
+testGetBranch (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test GetBuild. ... ok
+testGetLKGB (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test GetLKGB. ... ok
+testCheckBoot (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CheckBoot. ... ok
+testCheckMachineSizeDoesNotMeetRequirement (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CheckMachineSize when machine size does not meet requirement. ... ok
+testCheckMachineSizeMeetsRequirement (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CheckMachineSize when machine size meets requirement. ... ok
+testCreateImage (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CreateImage. ... ok
+testCreateInstance (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CreateInstance. ... ok
+testCreateInstance (internal.lib.goldfish_compute_client_test.GoldfishComputeClientTest)
+Test CreateInstance. ... ok
+testDelete (internal.lib.gstorage_client_test.StorageClientTest)
+Test Delete. ... ok
+testDeleteMultipleFiles (internal.lib.gstorage_client_test.StorageClientTest)
+Test Delete multiple files. ... ok
+testGet (internal.lib.gstorage_client_test.StorageClientTest)
+Test Get. ... ok
+testGetUrl (internal.lib.gstorage_client_test.StorageClientTest)
+Test GetUrl. ... ok
+testGetUrlNotFound (internal.lib.gstorage_client_test.StorageClientTest)
+Test GetUrl when object is not found. ... ok
+testList (internal.lib.gstorage_client_test.StorageClientTest)
+Test List. ... ok
+testUpload (internal.lib.gstorage_client_test.StorageClientTest)
+Test Upload. ... ok
+testUploadOSError (internal.lib.gstorage_client_test.StorageClientTest)
+Test Upload when OSError is raised. ... ok
+
+----------------------------------------------------------------------
+Ran 107 tests in 0.295s
+
+OK
diff --git a/tests/src/com/android/tradefed/UnitTests.java b/tests/src/com/android/tradefed/UnitTests.java
index 5ff7578..17af8b2 100644
--- a/tests/src/com/android/tradefed/UnitTests.java
+++ b/tests/src/com/android/tradefed/UnitTests.java
@@ -103,6 +103,7 @@
 import com.android.tradefed.log.HistoryLoggerTest;
 import com.android.tradefed.log.LogRegistryTest;
 import com.android.tradefed.log.TerribleFailureEmailHandlerTest;
+import com.android.tradefed.postprocessor.AggregatePostProcessorTest;
 import com.android.tradefed.postprocessor.AveragePostProcessorTest;
 import com.android.tradefed.postprocessor.BasePostProcessorTest;
 import com.android.tradefed.result.BugreportCollectorTest;
@@ -432,6 +433,7 @@
     TerribleFailureEmailHandlerTest.class,
 
     // postprocessor
+    AggregatePostProcessorTest.class,
     AveragePostProcessorTest.class,
     BasePostProcessorTest.class,
 
diff --git a/tests/src/com/android/tradefed/postprocessor/AggregatePostProcessorTest.java b/tests/src/com/android/tradefed/postprocessor/AggregatePostProcessorTest.java
new file mode 100644
index 0000000..d271328
--- /dev/null
+++ b/tests/src/com/android/tradefed/postprocessor/AggregatePostProcessorTest.java
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.postprocessor;
+
+import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ListMultimap;
+
+import java.util.Map;
+
+/** Unit tests for {@link AggregatePostProcessor} */
+@RunWith(JUnit4.class)
+public class AggregatePostProcessorTest {
+
+    private static final String TEST_CLASS = "test.class";
+    private static final String TEST_NAME = "test.name";
+
+    private static final Integer TEST_ITERATIONS = 3;
+
+    // Upload key suffixes for each aggregate metric
+    private static final String STATS_KEY_MIN = "min";
+    private static final String STATS_KEY_MAX = "max";
+    private static final String STATS_KEY_MEAN = "mean";
+    private static final String STATS_KEY_VAR = "var";
+    private static final String STATS_KEY_STDEV = "stdev";
+    // Separator for final upload
+    private static final String STATS_KEY_SEPARATOR = "-";
+
+    private AggregatePostProcessor mCollector;
+
+    @Before
+    public void setUp() {
+        mCollector = new AggregatePostProcessor();
+    }
+
+    /** Test corrrect aggregation of singular double metrics. */
+    @Test
+    public void testSingularDoubleMetric() {
+        // Singular double metrics test: Sample results and expected aggregate metric values.
+        final String singularDoubleKey = "singular_double";
+        final ImmutableList<String> singularDoubleMetrics = ImmutableList.of("1.1", "2", "2.9");
+        final ImmutableMap<String, String> singularDoubleStats =
+                ImmutableMap.of(
+                        STATS_KEY_MIN, "1.10",
+                        STATS_KEY_MAX, "2.90",
+                        STATS_KEY_MEAN, "2.00",
+                        STATS_KEY_VAR, "0.54",
+                        STATS_KEY_STDEV, "0.73");
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString(singularDoubleMetrics.get(i));
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(singularDoubleKey, currentTestMetric);
+        }
+
+        // Test that the correct aggregate metrics are returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MIN)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_MIN),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MIN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MAX)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_MAX),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MAX))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MEAN)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_MEAN),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MEAN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_VAR)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_VAR),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_VAR))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_STDEV)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_STDEV),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_STDEV))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+    }
+
+    /** Test correct aggregation of list double metrics. */
+    @Test
+    public void testListDoubleMetric() {
+        // List double metrics test: Sample results and expected aggregate metric values.
+        final String listDoubleKey = "list_double";
+        final ImmutableList<String> listDoubleMetrics =
+                ImmutableList.of("1.1, 2.2", "", "1.5, 2.5, 1.9, 2.9");
+        final ImmutableMap<String, String> listDoubleStats =
+                ImmutableMap.of(
+                        STATS_KEY_MIN, "1.10",
+                        STATS_KEY_MAX, "2.90",
+                        STATS_KEY_MEAN, "2.02",
+                        STATS_KEY_VAR, "0.36",
+                        STATS_KEY_STDEV, "0.60");
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString(listDoubleMetrics.get(i));
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(listDoubleKey, currentTestMetric);
+        }
+
+        // Test that the correct aggregate metrics are returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, listDoubleKey, STATS_KEY_MIN)));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_MIN),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MIN)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MAX));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_MAX),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MAX)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MEAN));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_MEAN),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MEAN)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_VAR));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_VAR),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_VAR)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_STDEV));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_STDEV),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_STDEV)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+    }
+
+    /** Test that non-numeric metric does not show up in the reported results. */
+    @Test
+    public void testNonNumericMetric() {
+        // Non-numeric metrics test: Sample results; should not show up in aggregate metrics
+        final String nonNumericKey = "non_numeric";
+        final ImmutableList<String> nonNumericMetrics = ImmutableList.of("1", "success", "failed");
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString(nonNumericMetrics.get(i));
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(nonNumericKey, currentTestMetric);
+        }
+
+        // Test that non-numeric metrics do not get returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_MIN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_MAX)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_MEAN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_VAR)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_STDEV)));
+    }
+
+    /** Test empty result. */
+    @Test
+    public void testEmptyResult() {
+        final String emptyResultKey = "empty_result";
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString("");
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(emptyResultKey, currentTestMetric);
+        }
+
+        // Test that test with empty results do not get returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_MIN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_MAX)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_MEAN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_VAR)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_STDEV)));
+    }
+
+    /** Test single run. */
+    @Test
+    public void testSingleRun() {
+        final String singleRunKey = "single_run";
+        final String singleRunVal = "1.00";
+        final String zeroStr = "0.00";
+
+        // Construct ListMultimap of a single iteration of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        Metric.Builder metricBuilder = Metric.newBuilder();
+        metricBuilder.getMeasurementsBuilder().setSingleString(singleRunVal);
+        Metric currentTestMetric = metricBuilder.build();
+        allTestMetrics.put(singleRunKey, currentTestMetric);
+
+        // Test that single runs still give the correct aggregate metrics.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MIN)));
+        Assert.assertEquals(
+                singleRunVal,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MIN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MAX)));
+        Assert.assertEquals(
+                singleRunVal,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MAX))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MEAN)));
+        Assert.assertEquals(
+                singleRunVal,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MEAN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_VAR)));
+        Assert.assertEquals(
+                zeroStr,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_VAR))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_STDEV)));
+        Assert.assertEquals(
+                zeroStr,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_STDEV))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+    }
+
+    /** Test zero runs. */
+    @Test
+    public void testZeroRun() {
+        // Test that tests with zero runs do not get added to the processed metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertEquals(0, aggregateMetrics.keySet().size());
+    }
+}
diff --git a/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java b/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
index 82a56d4..34edfbe 100644
--- a/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
+++ b/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
@@ -52,6 +52,7 @@
 public class PythonUnitTestResultParserTest {
 
     public static final String PYTHON_OUTPUT_FILE_1 = "python_output1.txt";
+    public static final String PYTHON_OUTPUT_FILE_2 = "python_output2.txt";
 
     private PythonUnitTestResultParser mParser;
     private ITestInvocationListener mMockListener;
@@ -518,6 +519,21 @@
         expectLastCall().times(1);
     }
 
+    /** Test another output starting by a warning */
+    @Test
+    public void testParseRealOutput2() {
+        String[] contents = readInFile(PYTHON_OUTPUT_FILE_2);
+        mMockListener.testRunStarted("test", 107);
+        for (int i = 0; i < 107; i++) {
+            mMockListener.testStarted(EasyMock.anyObject());
+            mMockListener.testEnded(EasyMock.anyObject(), (HashMap<String, Metric>) anyObject());
+        }
+        mMockListener.testRunEnded(295, new HashMap<String, Metric>());
+        replay(mMockListener);
+        mParser.processNewLines(contents);
+        verify(mMockListener);
+    }
+
     private void setTestIdChecks(TestDescription[] ids, boolean[] didPass) {
         for (int i = 0; i < ids.length; i++) {
             mMockListener.testStarted(ids[i]);