Merge "Ensure that we don't carry the prepended name in filters"
diff --git a/atest/result_reporter.py b/atest/result_reporter.py
index ccde52a..524c2fe 100644
--- a/atest/result_reporter.py
+++ b/atest/result_reporter.py
@@ -68,13 +68,15 @@
 class RunStat(object):
     """Class for storing stats of a test run."""
 
-    def __init__(self, passed=0, failed=0, ignored=0, run_errors=False):
+    def __init__(self, passed=0, failed=0, ignored=0, run_errors=False,
+                 assumption_failed=0):
         """Initialize a new instance of RunStat class.
 
         Args:
             passed: Count of passing tests.
             failed: Count of failed tests.
             ignored: Count of ignored tests.
+            assumption_failed: Count of assumption failure tests.
             run_errors: A boolean if there were run errors
         """
         # TODO(b/109822985): Track group and run estimated totals for updating
@@ -82,6 +84,7 @@
         self.passed = passed
         self.failed = failed
         self.ignored = ignored
+        self.assumption_failed = assumption_failed
         # Run errors are not for particular tests, they are runner errors.
         self.run_errors = run_errors
 
@@ -266,6 +269,7 @@
         passed_label = 'Passed'
         failed_label = 'Failed'
         ignored_label = 'Ignored'
+        assumption_failed_label = 'Assumption Failed'
         error_label = ''
         if stats.failed > 0:
             failed_label = au.colorize(failed_label, constants.RED)
@@ -273,14 +277,16 @@
             error_label = au.colorize('(Completed With ERRORS)', constants.RED)
         elif stats.failed == 0:
             passed_label = au.colorize(passed_label, constants.GREEN)
-        summary = '%s: %s: %s, %s: %s, %s: %s %s' % (name,
-                                                     passed_label,
-                                                     stats.passed,
-                                                     failed_label,
-                                                     stats.failed,
-                                                     ignored_label,
-                                                     stats.ignored,
-                                                     error_label)
+        summary = '%s: %s: %s, %s: %s, %s: %s, %s: %s %s' % (name,
+                                                             passed_label,
+                                                             stats.passed,
+                                                             failed_label,
+                                                             stats.failed,
+                                                             ignored_label,
+                                                             stats.ignored,
+                                                             assumption_failed_label,
+                                                             stats.assumption_failed,
+                                                             error_label)
         return summary
 
     def _update_stats(self, test, group):
@@ -298,6 +304,9 @@
         elif test.status == test_runner_base.IGNORED_STATUS:
             self.run_stats.ignored += 1
             group.ignored += 1
+        elif test.status == test_runner_base.ASSUMPTION_FAILED:
+            self.run_stats.assumption_failed += 1
+            group.assumption_failed += 1
         elif test.status == test_runner_base.FAILED_STATUS:
             self.run_stats.failed += 1
             self.failed_tests.append(test.test_name)
@@ -351,6 +360,12 @@
                                              test.test_name, au.colorize(
                                                  test.status, constants.MAGENTA),
                                              test.test_time))
+            elif test.status == test_runner_base.ASSUMPTION_FAILED:
+                # Example: [33/92] test_name: ASSUMPTION_FAILED (12ms)
+                print('[%s/%s] %s: %s %s' % (test.test_count, test.group_total,
+                                             test.test_name, au.colorize(
+                                                 test.status, constants.MAGENTA),
+                                             test.test_time))
             else:
                 # Example: [26/92] test_name: FAILED (32ms)
                 print('[%s/%s] %s: %s %s' % (test.test_count, test.group_total,
diff --git a/atest/result_reporter_unittest.py b/atest/result_reporter_unittest.py
index ddee8d6..ae2b5a5 100755
--- a/atest/result_reporter_unittest.py
+++ b/atest/result_reporter_unittest.py
@@ -106,6 +106,18 @@
     group_total=2
 )
 
+RESULT_ASSUMPTION_FAILED_TEST = test_runner_base.TestResult(
+    runner_name='someTestRunner',
+    group_name='someTestModule',
+    test_name='someClassName#sostName',
+    status=test_runner_base.ASSUMPTION_FAILED,
+    details=None,
+    test_count=1,
+    test_time='(10ms)',
+    runner_total=None,
+    group_total=2
+)
+
 #pylint: disable=protected-access
 #pylint: disable=invalid-name
 class ResultReporterUnittests(unittest.TestCase):
@@ -222,7 +234,7 @@
         self.assertEquals(group.ignored, 0)
         self.assertEquals(group.run_errors, True)
 
-    def test_update_stats_ignored(self):
+    def test_update_stats_ignored_and_assumption_failure(self):
         """Test _update_stats method."""
         # Passed Test
         group = result_reporter.RunStat()
@@ -257,6 +269,13 @@
         self.assertEquals(group.ignored, 2)
         self.assertEquals(group.run_errors, True)
 
+        # Assumption_Failure test
+        self.rr._update_stats(RESULT_ASSUMPTION_FAILED_TEST, group)
+        self.assertEquals(group.assumption_failed, 1)
+        # 2nd Assumption_Failure test
+        self.rr._update_stats(RESULT_ASSUMPTION_FAILED_TEST, group)
+        self.assertEquals(group.assumption_failed, 2)
+
     def test_print_summary_ret_val(self):
         """Test print_summary method's return value."""
         # PASS Case
diff --git a/atest/test_runners/event_handler.py b/atest/test_runners/event_handler.py
index 6c03144..2ff2ab7 100644
--- a/atest/test_runners/event_handler.py
+++ b/atest/test_runners/event_handler.py
@@ -37,7 +37,8 @@
                'run_failed': 'TEST_RUN_FAILED',
                'invocation_failed': 'INVOCATION_FAILED',
                'test_ignored': 'TEST_IGNORED',
-               'log_association':'LOG_ASSOCIATION'}
+               'test_assumption_failure': 'TEST_ASSUMPTION_FAILURE',
+               'log_association': 'LOG_ASSOCIATION'}
 
 EVENT_PAIRS = {EVENT_NAMES['module_started']: EVENT_NAMES['module_ended'],
                EVENT_NAMES['run_started']: EVENT_NAMES['run_ended'],
@@ -57,6 +58,7 @@
     'current_test': None,
     'last_failed': None,
     'last_ignored': None,
+    'last_assumption_failed': None,
     'current_group': None,
     'current_group_total': None,
     'test_count': 0,
@@ -105,6 +107,11 @@
                                      event_data['testName'])
         self.state['last_ignored'] = name
 
+    def _test_assumption_failure(self, event_data):
+        name = TEST_NAME_TEMPLATE % (event_data['className'],
+                                     event_data['testName'])
+        self.state['last_assumption_failed'] = name
+
     def _run_failed(self, event_data):
         # Module and Test Run probably started, but failure occurred.
         self.reporter.process_test_result(test_runner_base.TestResult(
@@ -144,11 +151,15 @@
         if self.state['test_start_time']:
             test_time = self._calc_duration(event_data['end_time'] -
                                             self.state['test_start_time'])
-
         if self.state['last_failed'] and name == self.state['last_failed']['name']:
             status = test_runner_base.FAILED_STATUS
             trace = self.state['last_failed']['trace']
             self.state['last_failed'] = None
+        elif (self.state['last_assumption_failed'] and
+              name == self.state['last_assumption_failed']):
+            status = test_runner_base.ASSUMPTION_FAILED
+            self.state['last_assumption_failed'] = None
+            trace = None
         elif self.state['last_ignored'] and name == self.state['last_ignored']:
             status = test_runner_base.IGNORED_STATUS
             self.state['last_ignored'] = None
@@ -176,6 +187,7 @@
                       EVENT_NAMES['test_started']: _test_started,
                       EVENT_NAMES['test_failed']: _test_failed,
                       EVENT_NAMES['test_ignored']: _test_ignored,
+                      EVENT_NAMES['test_assumption_failure']: _test_assumption_failure,
                       EVENT_NAMES['run_failed']: _run_failed,
                       EVENT_NAMES['invocation_failed']: _invocation_failed,
                       EVENT_NAMES['test_ended']: _test_ended,
diff --git a/atest/test_runners/test_runner_base.py b/atest/test_runners/test_runner_base.py
index d261ddb..4601e2d 100644
--- a/atest/test_runners/test_runner_base.py
+++ b/atest/test_runners/test_runner_base.py
@@ -41,6 +41,7 @@
                                        'test_name', 'status', 'details',
                                        'test_count', 'test_time',
                                        'runner_total', 'group_total'])
+ASSUMPTION_FAILED = 'ASSUMPTION_FAILED'
 FAILED_STATUS = 'FAILED'
 PASSED_STATUS = 'PASSED'
 IGNORED_STATUS = 'IGNORED'
diff --git a/src/com/android/tradefed/device/cloud/MultiUserSetupUtil.java b/src/com/android/tradefed/device/cloud/MultiUserSetupUtil.java
index 9c2e577..3a69667 100644
--- a/src/com/android/tradefed/device/cloud/MultiUserSetupUtil.java
+++ b/src/com/android/tradefed/device/cloud/MultiUserSetupUtil.java
@@ -28,13 +28,7 @@
 
     /** Files that must be copied between users to avoid conflicting ownership */
     private static final List<String> FILE_TO_BE_COPIED =
-            Arrays.asList(
-                    "android-info.txt",
-                    "boot.img",
-                    "cache.img",
-                    "product.img",
-                    "system.img",
-                    "vendor.img");
+            Arrays.asList("android-info.txt", "*.img");
 
     /** Files that can simply be shared between the different users */
     private static final List<String> FILE_TO_BE_LINKED = Arrays.asList("bin", "config", "lib64");
diff --git a/src/com/android/tradefed/result/JUnit4ResultForwarder.java b/src/com/android/tradefed/result/JUnit4ResultForwarder.java
index 34d183a..e2fc3b6 100644
--- a/src/com/android/tradefed/result/JUnit4ResultForwarder.java
+++ b/src/com/android/tradefed/result/JUnit4ResultForwarder.java
@@ -19,6 +19,7 @@
 import com.android.tradefed.testtype.DeviceJUnit4ClassRunner.LogAnnotation;
 import com.android.tradefed.testtype.DeviceJUnit4ClassRunner.MetricAnnotation;
 import com.android.tradefed.testtype.MetricTestCase.LogHolder;
+import com.android.tradefed.testtype.junit4.CarryDnaeError;
 import com.android.tradefed.util.StreamUtil;
 
 import org.junit.AssumptionViolatedException;
@@ -51,6 +52,10 @@
         if (description.getMethodName() == null) {
             // In case of exception in @BeforeClass, the method name will be null
             mListener.testRunFailed(String.format("Failed with trace: %s", failure.getTrace()));
+            // If the exception is ours thrown from before, rethrow it
+            if (failure.getException() instanceof CarryDnaeError) {
+                throw ((CarryDnaeError) failure.getException()).getDeviceNotAvailableException();
+            }
             return;
         }
         mTestCaseFailures.add(failure.getException());
@@ -62,7 +67,7 @@
     }
 
     @Override
-    public void testStarted(Description description) {
+    public void testStarted(Description description) throws Exception {
         mTestCaseFailures.clear();
         TestDescription testid =
                 new TestDescription(
@@ -73,32 +78,35 @@
     }
 
     @Override
-    public void testFinished(Description description) {
+    public void testFinished(Description description) throws Exception {
         TestDescription testid =
                 new TestDescription(
                         description.getClassName(),
                         description.getMethodName(),
                         description.getAnnotations());
-        handleFailures(testid);
-        // Explore the Description to see if we find any Annotation metrics carrier
-        HashMap<String, Metric> metrics = new HashMap<>();
-        for (Description child : description.getChildren()) {
-            for (Annotation a : child.getAnnotations()) {
-                if (a instanceof MetricAnnotation) {
-                    metrics.putAll(((MetricAnnotation) a).mMetrics);
-                }
-                if (a instanceof LogAnnotation) {
-                    // Log all the logs found.
-                    for (LogHolder log : ((LogAnnotation) a).mLogs) {
-                        mListener.testLog(log.mDataName, log.mDataType, log.mDataStream);
-                        StreamUtil.cancel(log.mDataStream);
+        try {
+            handleFailures(testid);
+        } finally {
+            // Explore the Description to see if we find any Annotation metrics carrier
+            HashMap<String, Metric> metrics = new HashMap<>();
+            for (Description child : description.getChildren()) {
+                for (Annotation a : child.getAnnotations()) {
+                    if (a instanceof MetricAnnotation) {
+                        metrics.putAll(((MetricAnnotation) a).mMetrics);
                     }
-                    ((LogAnnotation) a).mLogs.clear();
+                    if (a instanceof LogAnnotation) {
+                        // Log all the logs found.
+                        for (LogHolder log : ((LogAnnotation) a).mLogs) {
+                            mListener.testLog(log.mDataName, log.mDataType, log.mDataStream);
+                            StreamUtil.cancel(log.mDataStream);
+                        }
+                        ((LogAnnotation) a).mLogs.clear();
+                    }
                 }
             }
+            //description.
+            mListener.testEnded(testid, metrics);
         }
-        //description.
-        mListener.testEnded(testid, metrics);
     }
 
     @Override
diff --git a/src/com/android/tradefed/targetprep/RunCommandTargetPreparer.java b/src/com/android/tradefed/targetprep/RunCommandTargetPreparer.java
index 6478077..4b50c3d 100644
--- a/src/com/android/tradefed/targetprep/RunCommandTargetPreparer.java
+++ b/src/com/android/tradefed/targetprep/RunCommandTargetPreparer.java
@@ -60,13 +60,8 @@
             isTimeVal = true)
     private long mRunCmdTimeout = 0;
 
-    @Option(
-        name = "use-shell-v2",
-        description =
-                "Whether or not to use the shell v2 execution which provides status and output "
-                        + "for the shell command."
-    )
-    private boolean mUseShellV2 = false;
+    @Option(name = "throw-if-cmd-fail", description = "Whether or not to throw if a command fails")
+    private boolean mThrowIfFailed = false;
 
     private Map<BackgroundDeviceAction, CollectingOutputReceiver> mBgDeviceActionsMap =
             new HashMap<>();
@@ -88,42 +83,34 @@
         }
 
         for (String cmd : mCommands) {
-            CLog.d("About to run setup command on device %s: %s", device.getSerialNumber(), cmd);
             CommandResult result;
-            if (!mUseShellV2) {
-                // Shell v1 without command status.
-                if (mRunCmdTimeout > 0) {
-                    CollectingOutputReceiver receiver = new CollectingOutputReceiver();
-                    device.executeShellCommand(
-                            cmd, receiver, mRunCmdTimeout, TimeUnit.MILLISECONDS, 0);
-                    CLog.v("cmd: '%s', returned:\n%s", cmd, receiver.getOutput());
-                } else {
-                    String output = device.executeShellCommand(cmd);
-                    CLog.v("cmd: '%s', returned:\n%s", cmd, output);
-                }
+            // Shell v2 with command status checks
+            if (mRunCmdTimeout > 0) {
+                result =
+                        device.executeShellV2Command(cmd, mRunCmdTimeout, TimeUnit.MILLISECONDS, 0);
             } else {
-                // Shell v2 with command status checks
-                if (mRunCmdTimeout > 0) {
-                    result =
-                            device.executeShellV2Command(
-                                    cmd, mRunCmdTimeout, TimeUnit.MILLISECONDS, 0);
-                } else {
-                    result = device.executeShellV2Command(cmd);
-                }
-                // Ensure the command ran successfully.
-                if (!CommandStatus.SUCCESS.equals(result.getStatus())) {
+                result = device.executeShellV2Command(cmd);
+            }
+            // Ensure the command ran successfully.
+            if (!CommandStatus.SUCCESS.equals(result.getStatus())) {
+                if (mThrowIfFailed) {
                     throw new TargetSetupError(
                             String.format(
                                     "Failed to run '%s' without error. stdout: '%s'\nstderr: '%s'",
                                     cmd, result.getStdout(), result.getStderr()),
                             device.getDeviceDescriptor());
+                } else {
+                    CLog.d(
+                            "cmd: '%s' failed, returned:\nstdout:%s\nstderr:%s",
+                            cmd, result.getStdout(), result.getStderr());
                 }
-                CLog.v("cmd: '%s', returned:\n%s", cmd, result.getStdout());
             }
         }
 
-        CLog.d("Sleeping %d msecs on device %s", mDelayMsecs, device.getSerialNumber());
-        RunUtil.getDefault().sleep(mDelayMsecs);
+        if (mDelayMsecs > 0) {
+            CLog.d("Sleeping %d msecs on device %s", mDelayMsecs, device.getSerialNumber());
+            RunUtil.getDefault().sleep(mDelayMsecs);
+        }
     }
 
     /**
@@ -146,12 +133,13 @@
             return;
         }
         for (String cmd : mTeardownCommands) {
-            CLog.d("About to run tearDown command on device %s: %s", device.getSerialNumber(),
-                    cmd);
-            String output = device.executeShellCommand(cmd);
-            CLog.v("tearDown cmd: '%s', returned:\n%s", cmd, output);
+            CommandResult result = device.executeShellV2Command(cmd);
+            if (!CommandStatus.SUCCESS.equals(result.getStatus())) {
+                CLog.d(
+                        "tearDown cmd: '%s' failed, returned:\nstdout:%s\nstderr:%s",
+                        cmd, result.getStdout(), result.getStderr());
+            }
         }
-
     }
 }
 
diff --git a/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java b/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
index 778f36f..b69cf75 100644
--- a/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
+++ b/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
@@ -25,6 +25,8 @@
 import com.android.tradefed.result.InputStreamSource;
 import com.android.tradefed.result.LogDataType;
 import com.android.tradefed.testtype.MetricTestCase.LogHolder;
+import com.android.tradefed.testtype.junit4.CarryDnaeError;
+import com.android.tradefed.testtype.junit4.RunNotifierWrapper;
 import com.android.tradefed.util.FileUtil;
 import com.android.tradefed.util.proto.TfMetricProtoUtil;
 
@@ -110,13 +112,27 @@
 
     @Override
     protected void runChild(FrameworkMethod method, RunNotifier notifier) {
+        RunNotifierWrapper wrapper = new RunNotifierWrapper(notifier);
         try {
-            super.runChild(method, notifier);
+            super.runChild(method, wrapper);
         } finally {
             for (File f : mDownloadedFiles) {
                 FileUtil.recursiveDelete(f);
             }
         }
+        if (wrapper.getDeviceNotAvailableException() != null) {
+            throw new CarryDnaeError(wrapper.getDeviceNotAvailableException());
+        }
+    }
+
+    @Override
+    public void run(RunNotifier notifier) {
+        RunNotifierWrapper wrapper = new RunNotifierWrapper(notifier);
+        super.run(wrapper);
+
+        if (wrapper.getDeviceNotAvailableException() != null) {
+            throw new CarryDnaeError(wrapper.getDeviceNotAvailableException());
+        }
     }
 
     @Override
diff --git a/src/com/android/tradefed/testtype/HostTest.java b/src/com/android/tradefed/testtype/HostTest.java
index 0d3a55a..f6c9160 100644
--- a/src/com/android/tradefed/testtype/HostTest.java
+++ b/src/com/android/tradefed/testtype/HostTest.java
@@ -33,6 +33,7 @@
 import com.android.tradefed.result.ResultForwarder;
 import com.android.tradefed.result.TestDescription;
 import com.android.tradefed.testtype.host.PrettyTestEventLogger;
+import com.android.tradefed.testtype.junit4.CarryDnaeError;
 import com.android.tradefed.util.FileUtil;
 import com.android.tradefed.util.JUnit4TestFilter;
 import com.android.tradefed.util.StreamUtil;
@@ -627,7 +628,8 @@
     }
 
     private void runJUnit4Tests(
-            ITestInvocationListener listener, Runner checkRunner, String className) {
+            ITestInvocationListener listener, Runner checkRunner, String className)
+            throws DeviceNotAvailableException {
         JUnitCore runnerCore = new JUnitCore();
         JUnit4ResultForwarder list = new JUnit4ResultForwarder(listener);
         runnerCore.addListener(list);
@@ -636,14 +638,19 @@
         if (!(checkRunner instanceof ErrorReportingRunner)) {
             long startTime = System.currentTimeMillis();
             listener.testRunStarted(className, checkRunner.testCount());
-            if (mCollectTestsOnly) {
-                fakeDescriptionExecution(checkRunner.getDescription(), list);
-            } else {
-                setTestObjectInformation(checkRunner);
-                runnerCore.run(checkRunner);
+            try {
+                if (mCollectTestsOnly) {
+                    fakeDescriptionExecution(checkRunner.getDescription(), list);
+                } else {
+                    setTestObjectInformation(checkRunner);
+                    runnerCore.run(checkRunner);
+                }
+            } catch (CarryDnaeError e) {
+                throw e.getDeviceNotAvailableException();
+            } finally {
+                listener.testRunEnded(
+                        System.currentTimeMillis() - startTime, new HashMap<String, Metric>());
             }
-            listener.testRunEnded(
-                    System.currentTimeMillis() - startTime, new HashMap<String, Metric>());
         } else {
             // Special case where filtering leaves no tests to run, we report no failure
             // in this case.
@@ -671,8 +678,13 @@
                 fakeDescriptionExecution(child, listener);
             }
         } else {
-            listener.testStarted(desc);
-            listener.testFinished(desc);
+            try {
+                listener.testStarted(desc);
+                listener.testFinished(desc);
+            } catch (Exception e) {
+                // Should never happen
+                CLog.e(e);
+            }
         }
     }
 
diff --git a/src/com/android/tradefed/testtype/junit4/CarryDnaeError.java b/src/com/android/tradefed/testtype/junit4/CarryDnaeError.java
new file mode 100644
index 0000000..9b714fa
--- /dev/null
+++ b/src/com/android/tradefed/testtype/junit4/CarryDnaeError.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.testtype.junit4;
+
+import com.android.tradefed.device.DeviceNotAvailableException;
+
+/**
+ * Internal {@link RuntimeException} to carry {@link DeviceNotAvailableException} through the JUnit4
+ * framework.
+ */
+public class CarryDnaeError extends RuntimeException {
+    static final long serialVersionUID = 4980196508277280342L;
+
+    private DeviceNotAvailableException mException;
+
+    public CarryDnaeError(DeviceNotAvailableException e) {
+        mException = e;
+    }
+
+    /** Returns the {@link DeviceNotAvailableException} carried by this wrapper. */
+    public DeviceNotAvailableException getDeviceNotAvailableException() {
+        return mException;
+    }
+}
diff --git a/src/com/android/tradefed/testtype/junit4/RunNotifierWrapper.java b/src/com/android/tradefed/testtype/junit4/RunNotifierWrapper.java
new file mode 100644
index 0000000..1099e28
--- /dev/null
+++ b/src/com/android/tradefed/testtype/junit4/RunNotifierWrapper.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.testtype.junit4;
+
+import com.android.tradefed.device.DeviceNotAvailableException;
+
+import org.junit.runner.Description;
+import org.junit.runner.notification.Failure;
+import org.junit.runner.notification.RunNotifier;
+
+/** Wrapper of {@link RunNotifier} so we can carry the {@link DeviceNotAvailableException}. */
+public class RunNotifierWrapper extends RunNotifier {
+
+    private DeviceNotAvailableException mDnae;
+    private RunNotifier notifier;
+
+    public RunNotifierWrapper(RunNotifier notifier) {
+        this.notifier = notifier;
+    }
+
+    @Override
+    public void fireTestFailure(Failure failure) {
+        notifier.fireTestFailure(failure);
+        if (failure.getException() instanceof DeviceNotAvailableException) {
+            mDnae = (DeviceNotAvailableException) failure.getException();
+        }
+    }
+
+    @Override
+    public void fireTestAssumptionFailed(Failure failure) {
+        notifier.fireTestAssumptionFailed(failure);
+    }
+
+    @Override
+    public void fireTestFinished(Description description) {
+        notifier.fireTestFinished(description);
+    }
+
+    @Override
+    public void fireTestStarted(Description description) {
+        notifier.fireTestStarted(description);
+    }
+
+    @Override
+    public void fireTestIgnored(Description description) {
+        notifier.fireTestIgnored(description);
+    }
+
+    /** Returns the {@link DeviceNotAvailableException} if any was thrown. */
+    public DeviceNotAvailableException getDeviceNotAvailableException() {
+        return mDnae;
+    }
+}
diff --git a/src/com/android/tradefed/testtype/suite/ModuleDefinition.java b/src/com/android/tradefed/testtype/suite/ModuleDefinition.java
index c01adde..117e120 100644
--- a/src/com/android/tradefed/testtype/suite/ModuleDefinition.java
+++ b/src/com/android/tradefed/testtype/suite/ModuleDefinition.java
@@ -733,7 +733,7 @@
             // If disabled skip completely.
             return null;
         }
-        CLog.d("Preparer: %s", preparer.getClass().getSimpleName());
+        CLog.d("Running setup preparer: %s", preparer.getClass().getSimpleName());
         try {
             // set the logger in case they need it.
             if (preparer instanceof ITestLoggerReceiver) {
@@ -765,7 +765,7 @@
             // If disabled skip completely.
             return null;
         }
-        CLog.d("Multi preparer: %s", preparer.getClass().getSimpleName());
+        CLog.d("Running setup multi preparer: %s", preparer.getClass().getSimpleName());
         try {
             // set the logger in case they need it.
             if (preparer instanceof ITestLoggerReceiver) {
@@ -801,7 +801,7 @@
                 // If disabled skip completely.
                 continue;
             }
-            CLog.d("Multi cleaner: %s", multiCleaner.getClass().getSimpleName());
+            CLog.d("Running teardown multi cleaner: %s", multiCleaner.getClass().getSimpleName());
             multiCleaner.tearDown(mModuleInvocationContext, exception);
         }
 
diff --git a/src/com/android/tradefed/util/TestFilterHelper.java b/src/com/android/tradefed/util/TestFilterHelper.java
index b58b0e5..0c34dc6 100644
--- a/src/com/android/tradefed/util/TestFilterHelper.java
+++ b/src/com/android/tradefed/util/TestFilterHelper.java
@@ -15,8 +15,6 @@
  */
 package com.android.tradefed.util;
 
-import com.android.tradefed.log.LogUtil.CLog;
-
 import org.junit.runner.Description;
 
 import java.lang.annotation.Annotation;
@@ -178,7 +176,6 @@
             for (Annotation a : annotationsList) {
                 if (mExcludeAnnotations.contains(a.annotationType().getName())) {
                     // If any of the method annotation match an ExcludeAnnotation, don't run it
-                    CLog.i("Skipping %s, ExcludeAnnotation exclude it", a);
                     return true;
                 }
             }
@@ -197,7 +194,6 @@
             }
             if (neededAnnotation.size() != 0) {
                 // The test needs to have all the include annotation to pass.
-                CLog.i("Skipping, IncludeAnnotation filtered it");
                 return false;
             }
         }
@@ -279,17 +275,14 @@
     private boolean shouldRunFilter(String packageName, String className, String methodName) {
         if (mExcludeFilters.contains(packageName)) {
             // Skip package because it was excluded
-            CLog.i("Skip package %s because it was excluded", packageName);
             return false;
         }
         if (mExcludeFilters.contains(className)) {
             // Skip class because it was excluded
-            CLog.i("Skip class %s because it was excluded", className);
             return false;
         }
         if (mExcludeFilters.contains(methodName)) {
             // Skip method because it was excluded
-            CLog.i("Skip method %s in class %s because it was excluded", methodName, className);
             return false;
         }
         return true;
diff --git a/tests/src/com/android/tradefed/targetprep/RunCommandTargetPreparerTest.java b/tests/src/com/android/tradefed/targetprep/RunCommandTargetPreparerTest.java
index fbbb1e0..da059a3 100644
--- a/tests/src/com/android/tradefed/targetprep/RunCommandTargetPreparerTest.java
+++ b/tests/src/com/android/tradefed/targetprep/RunCommandTargetPreparerTest.java
@@ -34,12 +34,15 @@
 import org.easymock.EasyMock;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
 
 import java.io.IOException;
 import java.util.concurrent.TimeUnit;
 
 
 /** Unit Tests for {@link RunCommandTargetPreparer} */
+@RunWith(JUnit4.class)
 public class RunCommandTargetPreparerTest {
 
     private static final int LONG_WAIT_TIME_MS = 200;
@@ -52,8 +55,6 @@
     @Before
     public void setUp() throws Exception {
         mPreparer = new RunCommandTargetPreparer();
-        OptionSetter setter = new OptionSetter(mPreparer);
-        setter.setOptionValue("use-shell-v2", "true");
         mMockDevice = EasyMock.createMock(ITestDevice.class);
         mMockBuildInfo = EasyMock.createMock(IBuildInfo.class);
     }
@@ -67,7 +68,6 @@
         final String command = "mkdir test";
         OptionSetter setter = new OptionSetter(mPreparer);
         setter.setOptionValue("run-command", command);
-        EasyMock.expect(mMockDevice.getSerialNumber()).andReturn("SERIAL").times(2);
         CommandResult res = new CommandResult();
         res.setStatus(CommandStatus.SUCCESS);
         res.setStdout("");
@@ -87,7 +87,6 @@
         OptionSetter setter = new OptionSetter(mPreparer);
         setter.setOptionValue("run-command", command);
         setter.setOptionValue("run-command-timeout", "100");
-        EasyMock.expect(mMockDevice.getSerialNumber()).andReturn("SERIAL").times(2);
         CommandResult res = new CommandResult();
         res.setStatus(CommandStatus.SUCCESS);
         res.setStdout("");
@@ -129,8 +128,8 @@
         final String command = "mkdir test";
         OptionSetter setter = new OptionSetter(mPreparer);
         setter.setOptionValue("teardown-command", command);
-        EasyMock.expect(mMockDevice.getSerialNumber()).andReturn("SERIAL").times(1);
-        EasyMock.expect(mMockDevice.executeShellCommand(EasyMock.eq(command))).andReturn("");
+        CommandResult result = new CommandResult(CommandStatus.SUCCESS);
+        EasyMock.expect(mMockDevice.executeShellV2Command(EasyMock.eq(command))).andReturn(result);
         EasyMock.replay(mMockDevice, mMockBuildInfo);
         mPreparer.tearDown(mMockDevice, mMockBuildInfo, null);
         EasyMock.verify(mMockDevice, mMockBuildInfo);
diff --git a/tests/src/com/android/tradefed/testtype/HostTestTest.java b/tests/src/com/android/tradefed/testtype/HostTestTest.java
index d1c39d7..a5d8387 100644
--- a/tests/src/com/android/tradefed/testtype/HostTestTest.java
+++ b/tests/src/com/android/tradefed/testtype/HostTestTest.java
@@ -300,6 +300,20 @@
     }
 
     @RunWith(DeviceJUnit4ClassRunner.class)
+    public static class JUnit4TestClassMultiExceptionDnae {
+
+        @org.junit.Test
+        public void testPass5() {
+            Assume.assumeTrue(false);
+        }
+
+        @After
+        public void tearDown() throws Exception {
+            throw new DeviceNotAvailableException("dnae", "serial");
+        }
+    }
+
+    @RunWith(DeviceJUnit4ClassRunner.class)
     public static class Junit4TestClassMulti implements IMultiDeviceTest {
         private Map<ITestDevice, IBuildInfo> mDeviceMap;
 
@@ -1191,6 +1205,29 @@
         EasyMock.verify(mListener);
     }
 
+    public void testRun_junit4style_multiException_dnae() throws Exception {
+        mListener = EasyMock.createStrictMock(ITestInvocationListener.class);
+        mHostTest.setClassName(JUnit4TestClassMultiExceptionDnae.class.getName());
+        TestDescription test1 =
+                new TestDescription(JUnit4TestClassMultiExceptionDnae.class.getName(), "testPass5");
+        mListener.testRunStarted((String) EasyMock.anyObject(), EasyMock.eq(1));
+        mListener.testStarted(EasyMock.eq(test1));
+        mListener.testFailed(
+                EasyMock.eq(test1),
+                EasyMock.contains("MultipleFailureException: There were 2 errors:"));
+        mListener.testEnded(EasyMock.eq(test1), (HashMap<String, Metric>) EasyMock.anyObject());
+        mListener.testRunFailed(EasyMock.anyObject());
+        mListener.testRunEnded(EasyMock.anyLong(), (HashMap<String, Metric>) EasyMock.anyObject());
+        EasyMock.replay(mListener);
+        try {
+            mHostTest.run(mListener);
+            fail("Should have thrown an exception.");
+        } catch (DeviceNotAvailableException expected) {
+
+        }
+        EasyMock.verify(mListener);
+    }
+
     /**
      * Test for {@link HostTest#run(ITestInvocationListener)}, for test with Junit4 style properly
      * pass to the test the {@link IMultiDeviceTest} information.
@@ -1878,6 +1915,7 @@
         mListener.testRunStarted(
                 EasyMock.eq("com.android.tradefed.testtype.HostTestTest$Junit4RegularClass"),
                 EasyMock.eq(1));
+        mListener.testRunEnded(EasyMock.anyLong(), EasyMock.<HashMap<String, Metric>>anyObject());
         EasyMock.replay(mListener);
         try {
             mHostTest.run(mListener);