The Android Open Source Project | 9066cfe | 2009-03-03 19:31:44 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2007 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | package android.test; |
| 18 | |
| 19 | import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE; |
| 20 | import android.app.Activity; |
| 21 | import android.app.Instrumentation; |
| 22 | import android.os.Bundle; |
| 23 | import android.os.Debug; |
| 24 | import android.os.Looper; |
| 25 | import android.test.suitebuilder.TestMethod; |
| 26 | import android.test.suitebuilder.TestPredicates; |
| 27 | import android.test.suitebuilder.TestSuiteBuilder; |
| 28 | import android.util.Log; |
| 29 | |
| 30 | import com.android.internal.util.Predicate; |
| 31 | |
| 32 | import junit.framework.AssertionFailedError; |
| 33 | import junit.framework.Test; |
| 34 | import junit.framework.TestCase; |
| 35 | import junit.framework.TestListener; |
| 36 | import junit.framework.TestResult; |
| 37 | import junit.framework.TestSuite; |
| 38 | import junit.runner.BaseTestRunner; |
| 39 | import junit.textui.ResultPrinter; |
| 40 | |
| 41 | import java.io.ByteArrayOutputStream; |
| 42 | import java.io.File; |
| 43 | import java.io.PrintStream; |
| 44 | import java.lang.reflect.InvocationTargetException; |
| 45 | import java.lang.reflect.Method; |
| 46 | |
| 47 | |
| 48 | /** |
| 49 | * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against |
| 50 | * an Android package (application). Typical usage: |
| 51 | * <ol> |
| 52 | * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests |
| 53 | * against the classes in your package. Typically these are subclassed from: |
| 54 | * <ul><li>{@link android.test.ActivityInstrumentationTestCase}</li> |
| 55 | * <li>{@link android.test.ActivityUnitTestCase}</li> |
| 56 | * <li>{@link android.test.AndroidTestCase}</li> |
| 57 | * <li>{@link android.test.ApplicationTestCase}</li> |
| 58 | * <li>{@link android.test.InstrumentationTestCase}</li> |
| 59 | * <li>{@link android.test.ProviderTestCase}</li> |
| 60 | * <li>{@link android.test.ServiceTestCase}</li> |
| 61 | * <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul> |
| 62 | * <li>In an appropriate AndroidManifest.xml, define the this instrumentation with |
| 63 | * the appropriate android:targetPackage set. |
| 64 | * <li>Run the instrumentation using "adb shell am instrument -w", |
| 65 | * with no optional arguments, to run all tests (except performance tests). |
| 66 | * <li>Run the instrumentation using "adb shell am instrument -w", |
| 67 | * with the argument '-e func true' to run all functional tests. These are tests that derive from |
| 68 | * {@link android.test.InstrumentationTestCase}. |
| 69 | * <li>Run the instrumentation using "adb shell am instrument -w", |
| 70 | * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive |
| 71 | * from {@link android.test.InstrumentationTestCase} (and are not performance tests). |
| 72 | * <li>Run the instrumentation using "adb shell am instrument -w", |
| 73 | * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}. |
| 74 | * </ol> |
| 75 | * <p/> |
| 76 | * <b>Running all tests:</b> adb shell am instrument -w |
| 77 | * com.android.foo/android.test.InstrumentationTestRunner |
| 78 | * <p/> |
| 79 | * <b>Running all small tests:</b> adb shell am instrument -w |
| 80 | * -e size small |
| 81 | * com.android.foo/android.test.InstrumentationTestRunner |
| 82 | * <p/> |
| 83 | * <b>Running all medium tests:</b> adb shell am instrument -w |
| 84 | * -e size medium |
| 85 | * com.android.foo/android.test.InstrumentationTestRunner |
| 86 | * <p/> |
| 87 | * <b>Running all large tests:</b> adb shell am instrument -w |
| 88 | * -e size large |
| 89 | * com.android.foo/android.test.InstrumentationTestRunner |
| 90 | * <p/> |
| 91 | * <b>Running a single testcase:</b> adb shell am instrument -w |
| 92 | * -e class com.android.foo.FooTest |
| 93 | * com.android.foo/android.test.InstrumentationTestRunner |
| 94 | * <p/> |
| 95 | * <b>Running a single test:</b> adb shell am instrument -w |
| 96 | * -e class com.android.foo.FooTest#testFoo |
| 97 | * com.android.foo/android.test.InstrumentationTestRunner |
| 98 | * <p/> |
| 99 | * <b>Running multiple tests:</b> adb shell am instrument -w |
| 100 | * -e class com.android.foo.FooTest,com.android.foo.TooTest |
| 101 | * com.android.foo/android.test.InstrumentationTestRunner |
| 102 | * <p/> |
| 103 | * <b>Including performance tests:</b> adb shell am instrument -w |
| 104 | * -e perf true |
| 105 | * com.android.foo/android.test.InstrumentationTestRunner |
| 106 | * <p/> |
| 107 | * <b>To debug your tests, set a break point in your code and pass:</b> |
| 108 | * -e debug true |
| 109 | * <p/> |
| 110 | * <b>To run in 'log only' mode</b> |
| 111 | * -e log true |
| 112 | * This option will load and iterate through all test classes and methods, but will bypass actual |
| 113 | * test execution. Useful for quickly obtaining info on the tests to be executed by an |
| 114 | * instrumentation command. |
| 115 | * <p/> |
| 116 | * <b>To generate EMMA code coverage:</b> |
| 117 | * -e coverage true |
| 118 | * Note: this requires an emma instrumented build. By default, the code coverage results file |
| 119 | * will be saved as /sdcard/coverage.ec, unless overridden by coverageFile flag (see below) |
| 120 | * <p/> |
| 121 | * <b> To specify EMMA code coverage results file path:</b> |
| 122 | * -e coverageFile /sdcard/myFile.ec |
| 123 | * <br/> |
| 124 | * in addition to the other arguments. |
| 125 | */ |
| 126 | |
| 127 | /* (not JavaDoc) |
| 128 | * Although not necessary in most case, another way to use this class is to extend it and have the |
| 129 | * derived class return |
| 130 | * the desired test suite from the {@link #getTestSuite()} method. The test suite returned from this |
| 131 | * method will be used if no target class is defined in the meta-data or command line argument |
| 132 | * parameters. If a derived class is used it needs to be added as an instrumentation to the |
| 133 | * AndroidManifest.xml and the command to run it would look like: |
| 134 | * <p/> |
| 135 | * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i> |
| 136 | * <p/> |
| 137 | * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class. |
| 138 | * |
| 139 | * This model is used by many existing app tests, but can probably be deprecated. |
| 140 | */ |
| 141 | public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider { |
| 142 | |
| 143 | /** @hide */ |
| 144 | public static final String ARGUMENT_TEST_CLASS = "class"; |
| 145 | /** @hide */ |
| 146 | public static final String ARGUMENT_TEST_PACKAGE = "package"; |
| 147 | /** @hide */ |
| 148 | public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size"; |
| 149 | /** @hide */ |
| 150 | public static final String ARGUMENT_INCLUDE_PERF = "perf"; |
| 151 | /** @hide */ |
| 152 | public static final String ARGUMENT_DELAY_MSEC = "delay_msec"; |
| 153 | |
| 154 | private static final String SMALL_SUITE = "small"; |
| 155 | private static final String MEDIUM_SUITE = "medium"; |
| 156 | private static final String LARGE_SUITE = "large"; |
| 157 | |
| 158 | private static final String ARGUMENT_LOG_ONLY = "log"; |
| 159 | |
| 160 | |
| 161 | /** |
| 162 | * This constant defines the maximum allowed runtime (in ms) for a test included in the "small" suite. |
| 163 | * It is used to make an educated guess at what suite an unlabeled test belongs. |
| 164 | */ |
| 165 | private static final float SMALL_SUITE_MAX_RUNTIME = 100; |
| 166 | |
| 167 | /** |
| 168 | * This constant defines the maximum allowed runtime (in ms) for a test included in the "medium" suite. |
| 169 | * It is used to make an educated guess at what suite an unlabeled test belongs. |
| 170 | */ |
| 171 | private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000; |
| 172 | |
| 173 | /** |
| 174 | * The following keys are used in the status bundle to provide structured reports to |
| 175 | * an IInstrumentationWatcher. |
| 176 | */ |
| 177 | |
| 178 | /** |
| 179 | * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER}, |
| 180 | * identifies InstrumentationTestRunner as the source of the report. This is sent with all |
| 181 | * status messages. |
| 182 | */ |
| 183 | public static final String REPORT_VALUE_ID = "InstrumentationTestRunner"; |
| 184 | /** |
| 185 | * If included in the status or final bundle sent to an IInstrumentationWatcher, this key |
| 186 | * identifies the total number of tests that are being run. This is sent with all status |
| 187 | * messages. |
| 188 | */ |
| 189 | public static final String REPORT_KEY_NUM_TOTAL = "numtests"; |
| 190 | /** |
| 191 | * If included in the status or final bundle sent to an IInstrumentationWatcher, this key |
| 192 | * identifies the sequence number of the current test. This is sent with any status message |
| 193 | * describing a specific test being started or completed. |
| 194 | */ |
| 195 | public static final String REPORT_KEY_NUM_CURRENT = "current"; |
| 196 | /** |
| 197 | * If included in the status or final bundle sent to an IInstrumentationWatcher, this key |
| 198 | * identifies the name of the current test class. This is sent with any status message |
| 199 | * describing a specific test being started or completed. |
| 200 | */ |
| 201 | public static final String REPORT_KEY_NAME_CLASS = "class"; |
| 202 | /** |
| 203 | * If included in the status or final bundle sent to an IInstrumentationWatcher, this key |
| 204 | * identifies the name of the current test. This is sent with any status message |
| 205 | * describing a specific test being started or completed. |
| 206 | */ |
| 207 | public static final String REPORT_KEY_NAME_TEST = "test"; |
| 208 | /** |
| 209 | * If included in the status or final bundle sent to an IInstrumentationWatcher, this key |
| 210 | * reports the run time in seconds of the current test. |
| 211 | */ |
| 212 | private static final String REPORT_KEY_RUN_TIME = "runtime"; |
| 213 | /** |
| 214 | * If included in the status or final bundle sent to an IInstrumentationWatcher, this key |
| 215 | * reports the guessed suite assignment for the current test. |
| 216 | */ |
| 217 | private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment"; |
| 218 | /** |
| 219 | * The test is starting. |
| 220 | */ |
| 221 | public static final int REPORT_VALUE_RESULT_START = 1; |
| 222 | /** |
| 223 | * The test completed successfully. |
| 224 | */ |
| 225 | public static final int REPORT_VALUE_RESULT_OK = 0; |
| 226 | /** |
| 227 | * The test completed with an error. |
| 228 | */ |
| 229 | public static final int REPORT_VALUE_RESULT_ERROR = -1; |
| 230 | /** |
| 231 | * The test completed with a failure. |
| 232 | */ |
| 233 | public static final int REPORT_VALUE_RESULT_FAILURE = -2; |
| 234 | /** |
| 235 | * If included in the status bundle sent to an IInstrumentationWatcher, this key |
| 236 | * identifies a stack trace describing an error or failure. This is sent with any status |
| 237 | * message describing a specific test being completed. |
| 238 | */ |
| 239 | public static final String REPORT_KEY_STACK = "stack"; |
| 240 | |
| 241 | private static final String DEFAULT_COVERAGE_FILE_PATH = "/sdcard/coverage.ec"; |
| 242 | |
| 243 | private static final String LOG_TAG = "InstrumentationTestRunner"; |
| 244 | |
| 245 | private final Bundle mResults = new Bundle(); |
| 246 | private AndroidTestRunner mTestRunner; |
| 247 | private boolean mDebug; |
| 248 | private boolean mJustCount; |
| 249 | private boolean mSuiteAssignmentMode; |
| 250 | private int mTestCount; |
| 251 | private String mPackageOfTests; |
| 252 | private boolean mCoverage; |
| 253 | private String mCoverageFilePath; |
| 254 | private int mDelayMsec; |
| 255 | |
| 256 | @Override |
| 257 | public void onCreate(Bundle arguments) { |
| 258 | super.onCreate(arguments); |
| 259 | |
| 260 | // Apk paths used to search for test classes when using TestSuiteBuilders. |
| 261 | String[] apkPaths = |
| 262 | {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()}; |
| 263 | ClassPathPackageInfoSource.setApkPaths(apkPaths); |
| 264 | |
| 265 | Predicate<TestMethod> testSizePredicate = null; |
| 266 | boolean includePerformance = false; |
| 267 | String testClassesArg = null; |
| 268 | boolean logOnly = false; |
| 269 | |
| 270 | if (arguments != null) { |
| 271 | // Test class name passed as an argument should override any meta-data declaration. |
| 272 | testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS); |
| 273 | mDebug = getBooleanArgument(arguments, "debug"); |
| 274 | mJustCount = getBooleanArgument(arguments, "count"); |
| 275 | mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment"); |
| 276 | mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE); |
| 277 | testSizePredicate = getSizePredicateFromArg( |
| 278 | arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE)); |
| 279 | includePerformance = getBooleanArgument(arguments, ARGUMENT_INCLUDE_PERF); |
| 280 | logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY); |
| 281 | mCoverage = getBooleanArgument(arguments, "coverage"); |
| 282 | mCoverageFilePath = arguments.getString("coverageFile"); |
| 283 | |
| 284 | try { |
| 285 | Object delay = arguments.get(ARGUMENT_DELAY_MSEC); // Accept either string or int |
| 286 | if (delay != null) mDelayMsec = Integer.parseInt(delay.toString()); |
| 287 | } catch (NumberFormatException e) { |
| 288 | Log.e(LOG_TAG, "Invalid delay_msec parameter", e); |
| 289 | } |
| 290 | } |
| 291 | |
| 292 | TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(), |
| 293 | getTargetContext().getClassLoader()); |
| 294 | |
| 295 | if (testSizePredicate != null) { |
| 296 | testSuiteBuilder.addRequirements(testSizePredicate); |
| 297 | } |
| 298 | if (!includePerformance) { |
| 299 | testSuiteBuilder.addRequirements(REJECT_PERFORMANCE); |
| 300 | } |
| 301 | |
| 302 | if (testClassesArg == null) { |
| 303 | TestSuite testSuite = null; |
| 304 | if (mPackageOfTests != null) { |
| 305 | testSuiteBuilder.includePackages(mPackageOfTests); |
| 306 | } else { |
| 307 | testSuite = getTestSuite(); |
| 308 | testSuiteBuilder.addTestSuite(testSuite); |
| 309 | } |
| 310 | |
| 311 | if (testSuite == null) { |
| 312 | testSuiteBuilder.includePackages(getTargetContext().getPackageName()); |
| 313 | } |
| 314 | } else { |
| 315 | parseTestClasses(testClassesArg, testSuiteBuilder); |
| 316 | } |
| 317 | |
| 318 | mTestRunner = getAndroidTestRunner(); |
| 319 | mTestRunner.setContext(getTargetContext()); |
| 320 | mTestRunner.setInstrumentaiton(this); |
| 321 | mTestRunner.setSkipExecution(logOnly); |
| 322 | mTestRunner.setTest(testSuiteBuilder.build()); |
| 323 | mTestCount = mTestRunner.getTestCases().size(); |
| 324 | if (mSuiteAssignmentMode) { |
| 325 | mTestRunner.addTestListener(new SuiteAssignmentPrinter()); |
| 326 | } else { |
| 327 | mTestRunner.addTestListener(new TestPrinter("TestRunner", false)); |
| 328 | mTestRunner.addTestListener(new WatcherResultPrinter(mTestCount)); |
| 329 | } |
| 330 | start(); |
| 331 | } |
| 332 | |
| 333 | /** |
| 334 | * Parses and loads the specified set of test classes |
| 335 | * @param testClassArg - comma-separated list of test classes and methods |
| 336 | * @param testSuiteBuilder - builder to add tests to |
| 337 | */ |
| 338 | private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) { |
| 339 | String[] testClasses = testClassArg.split(","); |
| 340 | for (String testClass : testClasses) { |
| 341 | parseTestClass(testClass, testSuiteBuilder); |
| 342 | } |
| 343 | } |
| 344 | |
| 345 | /** |
| 346 | * Parse and load the given test class and, optionally, method |
| 347 | * @param testClassName - full package name of test class and optionally method to add. Expected |
| 348 | * format: com.android.TestClass#testMethod |
| 349 | * @param testSuiteBuilder - builder to add tests to |
| 350 | */ |
| 351 | private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) { |
| 352 | int methodSeparatorIndex = testClassName.indexOf('#'); |
| 353 | String testMethodName = null; |
| 354 | |
| 355 | if (methodSeparatorIndex > 0) { |
| 356 | testMethodName = testClassName.substring(methodSeparatorIndex + 1); |
| 357 | testClassName = testClassName.substring(0, methodSeparatorIndex); |
| 358 | } |
| 359 | testSuiteBuilder.addTestClassByName(testClassName, testMethodName, |
| 360 | getTargetContext()); |
| 361 | } |
| 362 | |
| 363 | protected AndroidTestRunner getAndroidTestRunner() { |
| 364 | return new AndroidTestRunner(); |
| 365 | } |
| 366 | |
| 367 | private boolean getBooleanArgument(Bundle arguments, String tag) { |
| 368 | String tagString = arguments.getString(tag); |
| 369 | return tagString != null && Boolean.parseBoolean(tagString); |
| 370 | } |
| 371 | |
| 372 | /* |
| 373 | * Returns the size predicate object, corresponding to the "size" argument value. |
| 374 | */ |
| 375 | private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) { |
| 376 | |
| 377 | if (SMALL_SUITE.equals(sizeArg)) { |
| 378 | return TestPredicates.SELECT_SMALL; |
| 379 | } else if (MEDIUM_SUITE.equals(sizeArg)) { |
| 380 | return TestPredicates.SELECT_MEDIUM; |
| 381 | } else if (LARGE_SUITE.equals(sizeArg)) { |
| 382 | return TestPredicates.SELECT_LARGE; |
| 383 | } else { |
| 384 | return null; |
| 385 | } |
| 386 | } |
| 387 | |
| 388 | @Override |
| 389 | public void onStart() { |
| 390 | Looper.prepare(); |
| 391 | |
| 392 | if (mJustCount) { |
| 393 | mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); |
| 394 | mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount); |
| 395 | finish(Activity.RESULT_OK, mResults); |
| 396 | } else { |
| 397 | if (mDebug) { |
| 398 | Debug.waitForDebugger(); |
| 399 | } |
| 400 | |
| 401 | ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); |
| 402 | PrintStream writer = new PrintStream(byteArrayOutputStream); |
| 403 | try { |
| 404 | StringResultPrinter resultPrinter = new StringResultPrinter(writer); |
| 405 | |
| 406 | mTestRunner.addTestListener(resultPrinter); |
| 407 | |
| 408 | long startTime = System.currentTimeMillis(); |
| 409 | mTestRunner.runTest(); |
| 410 | long runTime = System.currentTimeMillis() - startTime; |
| 411 | |
| 412 | resultPrinter.print(mTestRunner.getTestResult(), runTime); |
| 413 | } finally { |
| 414 | mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, |
| 415 | String.format("\nTest results for %s=%s", |
| 416 | mTestRunner.getTestClassName(), |
| 417 | byteArrayOutputStream.toString())); |
| 418 | |
| 419 | if (mCoverage) { |
| 420 | generateCoverageReport(); |
| 421 | } |
| 422 | writer.close(); |
| 423 | |
| 424 | finish(Activity.RESULT_OK, mResults); |
| 425 | } |
| 426 | } |
| 427 | } |
| 428 | |
| 429 | public TestSuite getTestSuite() { |
| 430 | return getAllTests(); |
| 431 | } |
| 432 | |
| 433 | /** |
| 434 | * Override this to define all of the tests to run in your package. |
| 435 | */ |
| 436 | public TestSuite getAllTests() { |
| 437 | return null; |
| 438 | } |
| 439 | |
| 440 | /** |
| 441 | * Override this to provide access to the class loader of your package. |
| 442 | */ |
| 443 | public ClassLoader getLoader() { |
| 444 | return null; |
| 445 | } |
| 446 | |
| 447 | private void generateCoverageReport() { |
| 448 | // use reflection to call emma dump coverage method, to avoid |
| 449 | // always statically compiling against emma jar |
| 450 | java.io.File coverageFile = new java.io.File(getCoverageFilePath()); |
| 451 | try { |
| 452 | Class emmaRTClass = Class.forName("com.vladium.emma.rt.RT"); |
| 453 | Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData", |
| 454 | coverageFile.getClass(), boolean.class, boolean.class); |
| 455 | |
| 456 | dumpCoverageMethod.invoke(null, coverageFile, false, false); |
| 457 | |
| 458 | } catch (ClassNotFoundException e) { |
| 459 | reportEmmaError("Is emma jar on classpath?", e); |
| 460 | } catch (SecurityException e) { |
| 461 | reportEmmaError(e); |
| 462 | } catch (NoSuchMethodException e) { |
| 463 | reportEmmaError(e); |
| 464 | } catch (IllegalArgumentException e) { |
| 465 | reportEmmaError(e); |
| 466 | } catch (IllegalAccessException e) { |
| 467 | reportEmmaError(e); |
| 468 | } catch (InvocationTargetException e) { |
| 469 | reportEmmaError(e); |
| 470 | } |
| 471 | } |
| 472 | |
| 473 | private String getCoverageFilePath() { |
| 474 | if (mCoverageFilePath == null) { |
| 475 | return DEFAULT_COVERAGE_FILE_PATH; |
| 476 | } |
| 477 | else { |
| 478 | return mCoverageFilePath; |
| 479 | } |
| 480 | } |
| 481 | |
| 482 | private void reportEmmaError(Exception e) { |
| 483 | reportEmmaError("", e); |
| 484 | } |
| 485 | |
| 486 | private void reportEmmaError(String hint, Exception e) { |
| 487 | String msg = "Failed to generate emma coverage. " + hint; |
| 488 | Log.e(LOG_TAG, msg, e); |
| 489 | mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg); |
| 490 | } |
| 491 | |
| 492 | // TODO kill this, use status() and prettyprint model for better output |
| 493 | private class StringResultPrinter extends ResultPrinter { |
| 494 | |
| 495 | public StringResultPrinter(PrintStream writer) { |
| 496 | super(writer); |
| 497 | } |
| 498 | |
| 499 | synchronized void print(TestResult result, long runTime) { |
| 500 | printHeader(runTime); |
| 501 | printFooter(result); |
| 502 | } |
| 503 | } |
| 504 | |
| 505 | /** |
| 506 | * This class sends status reports back to the IInstrumentationWatcher about |
| 507 | * which suite each test belongs. |
| 508 | */ |
| 509 | private class SuiteAssignmentPrinter implements TestListener |
| 510 | { |
| 511 | |
| 512 | private Bundle mTestResult; |
| 513 | private long mStartTime; |
| 514 | private long mEndTime; |
| 515 | private boolean mTimingValid; |
| 516 | |
| 517 | public SuiteAssignmentPrinter() { |
| 518 | } |
| 519 | |
| 520 | /** |
| 521 | * send a status for the start of a each test, so long tests can be seen as "running" |
| 522 | */ |
| 523 | public void startTest(Test test) { |
| 524 | mTimingValid = true; |
| 525 | mStartTime = System.currentTimeMillis(); |
| 526 | } |
| 527 | |
| 528 | /** |
| 529 | * @see junit.framework.TestListener#addError(Test, Throwable) |
| 530 | */ |
| 531 | public void addError(Test test, Throwable t) { |
| 532 | mTimingValid = false; |
| 533 | } |
| 534 | |
| 535 | /** |
| 536 | * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) |
| 537 | */ |
| 538 | public void addFailure(Test test, AssertionFailedError t) { |
| 539 | mTimingValid = false; |
| 540 | } |
| 541 | |
| 542 | /** |
| 543 | * @see junit.framework.TestListener#endTest(Test) |
| 544 | */ |
| 545 | public void endTest(Test test) { |
| 546 | float runTime; |
| 547 | String assignmentSuite; |
| 548 | mEndTime = System.currentTimeMillis(); |
| 549 | mTestResult = new Bundle(); |
| 550 | |
| 551 | if (!mTimingValid || mStartTime < 0) { |
| 552 | assignmentSuite = "NA"; |
| 553 | runTime = -1; |
| 554 | } else { |
| 555 | runTime = mEndTime - mStartTime; |
| 556 | if (runTime < SMALL_SUITE_MAX_RUNTIME |
| 557 | && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) { |
| 558 | assignmentSuite = SMALL_SUITE; |
| 559 | } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) { |
| 560 | assignmentSuite = MEDIUM_SUITE; |
| 561 | } else { |
| 562 | assignmentSuite = LARGE_SUITE; |
| 563 | } |
| 564 | } |
| 565 | // Clear mStartTime so that we can verify that it gets set next time. |
| 566 | mStartTime = -1; |
| 567 | |
| 568 | mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, |
| 569 | test.getClass().getName() + "#" + ((TestCase) test).getName() |
| 570 | + "\nin " + assignmentSuite + " suite\nrunTime: " |
| 571 | + String.valueOf(runTime) + "\n"); |
| 572 | mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime); |
| 573 | mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite); |
| 574 | |
| 575 | sendStatus(0, mTestResult); |
| 576 | } |
| 577 | } |
| 578 | |
| 579 | /** |
| 580 | * This class sends status reports back to the IInstrumentationWatcher |
| 581 | */ |
| 582 | private class WatcherResultPrinter implements TestListener |
| 583 | { |
| 584 | private final Bundle mResultTemplate; |
| 585 | Bundle mTestResult; |
| 586 | int mTestNum = 0; |
| 587 | int mTestResultCode = 0; |
| 588 | String mTestClass = null; |
| 589 | |
| 590 | public WatcherResultPrinter(int numTests) { |
| 591 | mResultTemplate = new Bundle(); |
| 592 | mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); |
| 593 | mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests); |
| 594 | } |
| 595 | |
| 596 | /** |
| 597 | * send a status for the start of a each test, so long tests can be seen as "running" |
| 598 | */ |
| 599 | public void startTest(Test test) { |
| 600 | String testClass = test.getClass().getName(); |
| 601 | mTestResult = new Bundle(mResultTemplate); |
| 602 | mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass); |
| 603 | mTestResult.putString(REPORT_KEY_NAME_TEST, ((TestCase) test).getName()); |
| 604 | mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum); |
| 605 | // pretty printing |
| 606 | if (testClass != null && !testClass.equals(mTestClass)) { |
| 607 | mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, |
| 608 | String.format("\n%s:", testClass)); |
| 609 | mTestClass = testClass; |
| 610 | } else { |
| 611 | mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ""); |
| 612 | } |
| 613 | |
| 614 | // The delay_msec parameter is normally used to provide buffers of idle time |
| 615 | // for power measurement purposes. To make sure there is a delay before and after |
| 616 | // every test in a suite, we delay *after* every test (see endTest below) and also |
| 617 | // delay *before* the first test. So, delay test1 delay test2 delay. |
| 618 | |
| 619 | try { |
| 620 | if (mTestNum == 1) Thread.sleep(mDelayMsec); |
| 621 | } catch (InterruptedException e) { |
| 622 | throw new IllegalStateException(e); |
| 623 | } |
| 624 | |
| 625 | sendStatus(REPORT_VALUE_RESULT_START, mTestResult); |
| 626 | mTestResultCode = 0; |
| 627 | } |
| 628 | |
| 629 | /** |
| 630 | * @see junit.framework.TestListener#addError(Test, Throwable) |
| 631 | */ |
| 632 | public void addError(Test test, Throwable t) { |
| 633 | mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); |
| 634 | mTestResultCode = REPORT_VALUE_RESULT_ERROR; |
| 635 | // pretty printing |
| 636 | mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, |
| 637 | String.format("\nError in %s:\n%s", |
| 638 | ((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t))); |
| 639 | } |
| 640 | |
| 641 | /** |
| 642 | * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) |
| 643 | */ |
| 644 | public void addFailure(Test test, AssertionFailedError t) { |
| 645 | mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); |
| 646 | mTestResultCode = REPORT_VALUE_RESULT_FAILURE; |
| 647 | // pretty printing |
| 648 | mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, |
| 649 | String.format("\nFailure in %s:\n%s", |
| 650 | ((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t))); |
| 651 | } |
| 652 | |
| 653 | /** |
| 654 | * @see junit.framework.TestListener#endTest(Test) |
| 655 | */ |
| 656 | public void endTest(Test test) { |
| 657 | if (mTestResultCode == 0) { |
| 658 | mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "."); |
| 659 | } |
| 660 | sendStatus(mTestResultCode, mTestResult); |
| 661 | |
| 662 | try { // Sleep after every test, if specified |
| 663 | Thread.sleep(mDelayMsec); |
| 664 | } catch (InterruptedException e) { |
| 665 | throw new IllegalStateException(e); |
| 666 | } |
| 667 | } |
| 668 | |
| 669 | // TODO report the end of the cycle |
| 670 | // TODO report runtime for each test |
| 671 | } |
| 672 | } |