blob: 044f55581a3d9097049820b5cab28e8d127be544 [file] [log] [blame]
The Android Open Source Project9066cfe2009-03-03 19:31:44 -08001/*
2 * Copyright (C) 2007 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.test;
18
19import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE;
20import android.app.Activity;
21import android.app.Instrumentation;
22import android.os.Bundle;
23import android.os.Debug;
24import android.os.Looper;
25import android.test.suitebuilder.TestMethod;
26import android.test.suitebuilder.TestPredicates;
27import android.test.suitebuilder.TestSuiteBuilder;
28import android.util.Log;
29
30import com.android.internal.util.Predicate;
31
32import junit.framework.AssertionFailedError;
33import junit.framework.Test;
34import junit.framework.TestCase;
35import junit.framework.TestListener;
36import junit.framework.TestResult;
37import junit.framework.TestSuite;
38import junit.runner.BaseTestRunner;
39import junit.textui.ResultPrinter;
40
41import java.io.ByteArrayOutputStream;
42import java.io.File;
43import java.io.PrintStream;
44import java.lang.reflect.InvocationTargetException;
45import java.lang.reflect.Method;
46
47
48/**
49 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
50 * an Android package (application). Typical usage:
51 * <ol>
52 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
53 * against the classes in your package. Typically these are subclassed from:
54 * <ul><li>{@link android.test.ActivityInstrumentationTestCase}</li>
55 * <li>{@link android.test.ActivityUnitTestCase}</li>
56 * <li>{@link android.test.AndroidTestCase}</li>
57 * <li>{@link android.test.ApplicationTestCase}</li>
58 * <li>{@link android.test.InstrumentationTestCase}</li>
59 * <li>{@link android.test.ProviderTestCase}</li>
60 * <li>{@link android.test.ServiceTestCase}</li>
61 * <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
62 * <li>In an appropriate AndroidManifest.xml, define the this instrumentation with
63 * the appropriate android:targetPackage set.
64 * <li>Run the instrumentation using "adb shell am instrument -w",
65 * with no optional arguments, to run all tests (except performance tests).
66 * <li>Run the instrumentation using "adb shell am instrument -w",
67 * with the argument '-e func true' to run all functional tests. These are tests that derive from
68 * {@link android.test.InstrumentationTestCase}.
69 * <li>Run the instrumentation using "adb shell am instrument -w",
70 * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
71 * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
72 * <li>Run the instrumentation using "adb shell am instrument -w",
73 * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
74 * </ol>
75 * <p/>
76 * <b>Running all tests:</b> adb shell am instrument -w
77 * com.android.foo/android.test.InstrumentationTestRunner
78 * <p/>
79 * <b>Running all small tests:</b> adb shell am instrument -w
80 * -e size small
81 * com.android.foo/android.test.InstrumentationTestRunner
82 * <p/>
83 * <b>Running all medium tests:</b> adb shell am instrument -w
84 * -e size medium
85 * com.android.foo/android.test.InstrumentationTestRunner
86 * <p/>
87 * <b>Running all large tests:</b> adb shell am instrument -w
88 * -e size large
89 * com.android.foo/android.test.InstrumentationTestRunner
90 * <p/>
91 * <b>Running a single testcase:</b> adb shell am instrument -w
92 * -e class com.android.foo.FooTest
93 * com.android.foo/android.test.InstrumentationTestRunner
94 * <p/>
95 * <b>Running a single test:</b> adb shell am instrument -w
96 * -e class com.android.foo.FooTest#testFoo
97 * com.android.foo/android.test.InstrumentationTestRunner
98 * <p/>
99 * <b>Running multiple tests:</b> adb shell am instrument -w
100 * -e class com.android.foo.FooTest,com.android.foo.TooTest
101 * com.android.foo/android.test.InstrumentationTestRunner
102 * <p/>
103 * <b>Including performance tests:</b> adb shell am instrument -w
104 * -e perf true
105 * com.android.foo/android.test.InstrumentationTestRunner
106 * <p/>
107 * <b>To debug your tests, set a break point in your code and pass:</b>
108 * -e debug true
109 * <p/>
110 * <b>To run in 'log only' mode</b>
111 * -e log true
112 * This option will load and iterate through all test classes and methods, but will bypass actual
113 * test execution. Useful for quickly obtaining info on the tests to be executed by an
114 * instrumentation command.
115 * <p/>
116 * <b>To generate EMMA code coverage:</b>
117 * -e coverage true
118 * Note: this requires an emma instrumented build. By default, the code coverage results file
119 * will be saved as /sdcard/coverage.ec, unless overridden by coverageFile flag (see below)
120 * <p/>
121 * <b> To specify EMMA code coverage results file path:</b>
122 * -e coverageFile /sdcard/myFile.ec
123 * <br/>
124 * in addition to the other arguments.
125 */
126
127/* (not JavaDoc)
128 * Although not necessary in most case, another way to use this class is to extend it and have the
129 * derived class return
130 * the desired test suite from the {@link #getTestSuite()} method. The test suite returned from this
131 * method will be used if no target class is defined in the meta-data or command line argument
132 * parameters. If a derived class is used it needs to be added as an instrumentation to the
133 * AndroidManifest.xml and the command to run it would look like:
134 * <p/>
135 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
136 * <p/>
137 * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
138 *
139 * This model is used by many existing app tests, but can probably be deprecated.
140 */
141public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
142
143 /** @hide */
144 public static final String ARGUMENT_TEST_CLASS = "class";
145 /** @hide */
146 public static final String ARGUMENT_TEST_PACKAGE = "package";
147 /** @hide */
148 public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
149 /** @hide */
150 public static final String ARGUMENT_INCLUDE_PERF = "perf";
151 /** @hide */
152 public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
153
154 private static final String SMALL_SUITE = "small";
155 private static final String MEDIUM_SUITE = "medium";
156 private static final String LARGE_SUITE = "large";
157
158 private static final String ARGUMENT_LOG_ONLY = "log";
159
160
161 /**
162 * This constant defines the maximum allowed runtime (in ms) for a test included in the "small" suite.
163 * It is used to make an educated guess at what suite an unlabeled test belongs.
164 */
165 private static final float SMALL_SUITE_MAX_RUNTIME = 100;
166
167 /**
168 * This constant defines the maximum allowed runtime (in ms) for a test included in the "medium" suite.
169 * It is used to make an educated guess at what suite an unlabeled test belongs.
170 */
171 private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
172
173 /**
174 * The following keys are used in the status bundle to provide structured reports to
175 * an IInstrumentationWatcher.
176 */
177
178 /**
179 * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
180 * identifies InstrumentationTestRunner as the source of the report. This is sent with all
181 * status messages.
182 */
183 public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
184 /**
185 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
186 * identifies the total number of tests that are being run. This is sent with all status
187 * messages.
188 */
189 public static final String REPORT_KEY_NUM_TOTAL = "numtests";
190 /**
191 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
192 * identifies the sequence number of the current test. This is sent with any status message
193 * describing a specific test being started or completed.
194 */
195 public static final String REPORT_KEY_NUM_CURRENT = "current";
196 /**
197 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
198 * identifies the name of the current test class. This is sent with any status message
199 * describing a specific test being started or completed.
200 */
201 public static final String REPORT_KEY_NAME_CLASS = "class";
202 /**
203 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
204 * identifies the name of the current test. This is sent with any status message
205 * describing a specific test being started or completed.
206 */
207 public static final String REPORT_KEY_NAME_TEST = "test";
208 /**
209 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
210 * reports the run time in seconds of the current test.
211 */
212 private static final String REPORT_KEY_RUN_TIME = "runtime";
213 /**
214 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
215 * reports the guessed suite assignment for the current test.
216 */
217 private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
218 /**
219 * The test is starting.
220 */
221 public static final int REPORT_VALUE_RESULT_START = 1;
222 /**
223 * The test completed successfully.
224 */
225 public static final int REPORT_VALUE_RESULT_OK = 0;
226 /**
227 * The test completed with an error.
228 */
229 public static final int REPORT_VALUE_RESULT_ERROR = -1;
230 /**
231 * The test completed with a failure.
232 */
233 public static final int REPORT_VALUE_RESULT_FAILURE = -2;
234 /**
235 * If included in the status bundle sent to an IInstrumentationWatcher, this key
236 * identifies a stack trace describing an error or failure. This is sent with any status
237 * message describing a specific test being completed.
238 */
239 public static final String REPORT_KEY_STACK = "stack";
240
241 private static final String DEFAULT_COVERAGE_FILE_PATH = "/sdcard/coverage.ec";
242
243 private static final String LOG_TAG = "InstrumentationTestRunner";
244
245 private final Bundle mResults = new Bundle();
246 private AndroidTestRunner mTestRunner;
247 private boolean mDebug;
248 private boolean mJustCount;
249 private boolean mSuiteAssignmentMode;
250 private int mTestCount;
251 private String mPackageOfTests;
252 private boolean mCoverage;
253 private String mCoverageFilePath;
254 private int mDelayMsec;
255
256 @Override
257 public void onCreate(Bundle arguments) {
258 super.onCreate(arguments);
259
260 // Apk paths used to search for test classes when using TestSuiteBuilders.
261 String[] apkPaths =
262 {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
263 ClassPathPackageInfoSource.setApkPaths(apkPaths);
264
265 Predicate<TestMethod> testSizePredicate = null;
266 boolean includePerformance = false;
267 String testClassesArg = null;
268 boolean logOnly = false;
269
270 if (arguments != null) {
271 // Test class name passed as an argument should override any meta-data declaration.
272 testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
273 mDebug = getBooleanArgument(arguments, "debug");
274 mJustCount = getBooleanArgument(arguments, "count");
275 mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
276 mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
277 testSizePredicate = getSizePredicateFromArg(
278 arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
279 includePerformance = getBooleanArgument(arguments, ARGUMENT_INCLUDE_PERF);
280 logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
281 mCoverage = getBooleanArgument(arguments, "coverage");
282 mCoverageFilePath = arguments.getString("coverageFile");
283
284 try {
285 Object delay = arguments.get(ARGUMENT_DELAY_MSEC); // Accept either string or int
286 if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
287 } catch (NumberFormatException e) {
288 Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
289 }
290 }
291
292 TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
293 getTargetContext().getClassLoader());
294
295 if (testSizePredicate != null) {
296 testSuiteBuilder.addRequirements(testSizePredicate);
297 }
298 if (!includePerformance) {
299 testSuiteBuilder.addRequirements(REJECT_PERFORMANCE);
300 }
301
302 if (testClassesArg == null) {
The Android Open Source Project9066cfe2009-03-03 19:31:44 -0800303 if (mPackageOfTests != null) {
304 testSuiteBuilder.includePackages(mPackageOfTests);
305 } else {
Brett Chabot61b10ac2009-03-31 17:04:34 -0700306 TestSuite testSuite = getTestSuite();
307 if (testSuite != null) {
308 testSuiteBuilder.addTestSuite(testSuite);
309 } else {
310 // no package or class bundle arguments were supplied, and no test suite
311 // provided so add all tests in application
312 testSuiteBuilder.includePackages("");
313 }
The Android Open Source Project9066cfe2009-03-03 19:31:44 -0800314 }
315 } else {
316 parseTestClasses(testClassesArg, testSuiteBuilder);
317 }
318
319 mTestRunner = getAndroidTestRunner();
320 mTestRunner.setContext(getTargetContext());
321 mTestRunner.setInstrumentaiton(this);
322 mTestRunner.setSkipExecution(logOnly);
323 mTestRunner.setTest(testSuiteBuilder.build());
324 mTestCount = mTestRunner.getTestCases().size();
325 if (mSuiteAssignmentMode) {
326 mTestRunner.addTestListener(new SuiteAssignmentPrinter());
327 } else {
328 mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
329 mTestRunner.addTestListener(new WatcherResultPrinter(mTestCount));
330 }
331 start();
332 }
333
334 /**
335 * Parses and loads the specified set of test classes
336 * @param testClassArg - comma-separated list of test classes and methods
337 * @param testSuiteBuilder - builder to add tests to
338 */
339 private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
340 String[] testClasses = testClassArg.split(",");
341 for (String testClass : testClasses) {
342 parseTestClass(testClass, testSuiteBuilder);
343 }
344 }
345
346 /**
347 * Parse and load the given test class and, optionally, method
348 * @param testClassName - full package name of test class and optionally method to add. Expected
349 * format: com.android.TestClass#testMethod
350 * @param testSuiteBuilder - builder to add tests to
351 */
352 private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
353 int methodSeparatorIndex = testClassName.indexOf('#');
354 String testMethodName = null;
355
356 if (methodSeparatorIndex > 0) {
357 testMethodName = testClassName.substring(methodSeparatorIndex + 1);
358 testClassName = testClassName.substring(0, methodSeparatorIndex);
359 }
360 testSuiteBuilder.addTestClassByName(testClassName, testMethodName,
361 getTargetContext());
362 }
363
364 protected AndroidTestRunner getAndroidTestRunner() {
365 return new AndroidTestRunner();
366 }
367
368 private boolean getBooleanArgument(Bundle arguments, String tag) {
369 String tagString = arguments.getString(tag);
370 return tagString != null && Boolean.parseBoolean(tagString);
371 }
372
373 /*
374 * Returns the size predicate object, corresponding to the "size" argument value.
375 */
376 private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
377
378 if (SMALL_SUITE.equals(sizeArg)) {
379 return TestPredicates.SELECT_SMALL;
380 } else if (MEDIUM_SUITE.equals(sizeArg)) {
381 return TestPredicates.SELECT_MEDIUM;
382 } else if (LARGE_SUITE.equals(sizeArg)) {
383 return TestPredicates.SELECT_LARGE;
384 } else {
385 return null;
386 }
387 }
388
389 @Override
390 public void onStart() {
391 Looper.prepare();
392
393 if (mJustCount) {
394 mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
395 mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
396 finish(Activity.RESULT_OK, mResults);
397 } else {
398 if (mDebug) {
399 Debug.waitForDebugger();
400 }
401
402 ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
403 PrintStream writer = new PrintStream(byteArrayOutputStream);
404 try {
405 StringResultPrinter resultPrinter = new StringResultPrinter(writer);
406
407 mTestRunner.addTestListener(resultPrinter);
408
409 long startTime = System.currentTimeMillis();
410 mTestRunner.runTest();
411 long runTime = System.currentTimeMillis() - startTime;
412
413 resultPrinter.print(mTestRunner.getTestResult(), runTime);
414 } finally {
415 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
416 String.format("\nTest results for %s=%s",
417 mTestRunner.getTestClassName(),
418 byteArrayOutputStream.toString()));
419
420 if (mCoverage) {
421 generateCoverageReport();
422 }
423 writer.close();
424
425 finish(Activity.RESULT_OK, mResults);
426 }
427 }
428 }
429
430 public TestSuite getTestSuite() {
431 return getAllTests();
432 }
433
434 /**
435 * Override this to define all of the tests to run in your package.
436 */
437 public TestSuite getAllTests() {
438 return null;
439 }
440
441 /**
442 * Override this to provide access to the class loader of your package.
443 */
444 public ClassLoader getLoader() {
445 return null;
446 }
447
448 private void generateCoverageReport() {
449 // use reflection to call emma dump coverage method, to avoid
450 // always statically compiling against emma jar
451 java.io.File coverageFile = new java.io.File(getCoverageFilePath());
452 try {
453 Class emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
454 Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
455 coverageFile.getClass(), boolean.class, boolean.class);
456
457 dumpCoverageMethod.invoke(null, coverageFile, false, false);
458
459 } catch (ClassNotFoundException e) {
460 reportEmmaError("Is emma jar on classpath?", e);
461 } catch (SecurityException e) {
462 reportEmmaError(e);
463 } catch (NoSuchMethodException e) {
464 reportEmmaError(e);
465 } catch (IllegalArgumentException e) {
466 reportEmmaError(e);
467 } catch (IllegalAccessException e) {
468 reportEmmaError(e);
469 } catch (InvocationTargetException e) {
470 reportEmmaError(e);
471 }
472 }
473
474 private String getCoverageFilePath() {
475 if (mCoverageFilePath == null) {
476 return DEFAULT_COVERAGE_FILE_PATH;
477 }
478 else {
479 return mCoverageFilePath;
480 }
481 }
482
483 private void reportEmmaError(Exception e) {
484 reportEmmaError("", e);
485 }
486
487 private void reportEmmaError(String hint, Exception e) {
488 String msg = "Failed to generate emma coverage. " + hint;
489 Log.e(LOG_TAG, msg, e);
490 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
491 }
492
493 // TODO kill this, use status() and prettyprint model for better output
494 private class StringResultPrinter extends ResultPrinter {
495
496 public StringResultPrinter(PrintStream writer) {
497 super(writer);
498 }
499
500 synchronized void print(TestResult result, long runTime) {
501 printHeader(runTime);
502 printFooter(result);
503 }
504 }
505
506 /**
507 * This class sends status reports back to the IInstrumentationWatcher about
508 * which suite each test belongs.
509 */
510 private class SuiteAssignmentPrinter implements TestListener
511 {
512
513 private Bundle mTestResult;
514 private long mStartTime;
515 private long mEndTime;
516 private boolean mTimingValid;
517
518 public SuiteAssignmentPrinter() {
519 }
520
521 /**
522 * send a status for the start of a each test, so long tests can be seen as "running"
523 */
524 public void startTest(Test test) {
525 mTimingValid = true;
526 mStartTime = System.currentTimeMillis();
527 }
528
529 /**
530 * @see junit.framework.TestListener#addError(Test, Throwable)
531 */
532 public void addError(Test test, Throwable t) {
533 mTimingValid = false;
534 }
535
536 /**
537 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
538 */
539 public void addFailure(Test test, AssertionFailedError t) {
540 mTimingValid = false;
541 }
542
543 /**
544 * @see junit.framework.TestListener#endTest(Test)
545 */
546 public void endTest(Test test) {
547 float runTime;
548 String assignmentSuite;
549 mEndTime = System.currentTimeMillis();
550 mTestResult = new Bundle();
551
552 if (!mTimingValid || mStartTime < 0) {
553 assignmentSuite = "NA";
554 runTime = -1;
555 } else {
556 runTime = mEndTime - mStartTime;
557 if (runTime < SMALL_SUITE_MAX_RUNTIME
558 && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
559 assignmentSuite = SMALL_SUITE;
560 } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
561 assignmentSuite = MEDIUM_SUITE;
562 } else {
563 assignmentSuite = LARGE_SUITE;
564 }
565 }
566 // Clear mStartTime so that we can verify that it gets set next time.
567 mStartTime = -1;
568
569 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
570 test.getClass().getName() + "#" + ((TestCase) test).getName()
571 + "\nin " + assignmentSuite + " suite\nrunTime: "
572 + String.valueOf(runTime) + "\n");
573 mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
574 mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
575
576 sendStatus(0, mTestResult);
577 }
578 }
579
580 /**
581 * This class sends status reports back to the IInstrumentationWatcher
582 */
583 private class WatcherResultPrinter implements TestListener
584 {
585 private final Bundle mResultTemplate;
586 Bundle mTestResult;
587 int mTestNum = 0;
588 int mTestResultCode = 0;
589 String mTestClass = null;
590
591 public WatcherResultPrinter(int numTests) {
592 mResultTemplate = new Bundle();
593 mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
594 mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
595 }
596
597 /**
598 * send a status for the start of a each test, so long tests can be seen as "running"
599 */
600 public void startTest(Test test) {
601 String testClass = test.getClass().getName();
602 mTestResult = new Bundle(mResultTemplate);
603 mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
604 mTestResult.putString(REPORT_KEY_NAME_TEST, ((TestCase) test).getName());
605 mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
606 // pretty printing
607 if (testClass != null && !testClass.equals(mTestClass)) {
608 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
609 String.format("\n%s:", testClass));
610 mTestClass = testClass;
611 } else {
612 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
613 }
614
615 // The delay_msec parameter is normally used to provide buffers of idle time
616 // for power measurement purposes. To make sure there is a delay before and after
617 // every test in a suite, we delay *after* every test (see endTest below) and also
618 // delay *before* the first test. So, delay test1 delay test2 delay.
619
620 try {
621 if (mTestNum == 1) Thread.sleep(mDelayMsec);
622 } catch (InterruptedException e) {
623 throw new IllegalStateException(e);
624 }
625
626 sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
627 mTestResultCode = 0;
628 }
629
630 /**
631 * @see junit.framework.TestListener#addError(Test, Throwable)
632 */
633 public void addError(Test test, Throwable t) {
634 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
635 mTestResultCode = REPORT_VALUE_RESULT_ERROR;
636 // pretty printing
637 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
638 String.format("\nError in %s:\n%s",
639 ((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t)));
640 }
641
642 /**
643 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
644 */
645 public void addFailure(Test test, AssertionFailedError t) {
646 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
647 mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
648 // pretty printing
649 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
650 String.format("\nFailure in %s:\n%s",
651 ((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t)));
652 }
653
654 /**
655 * @see junit.framework.TestListener#endTest(Test)
656 */
657 public void endTest(Test test) {
658 if (mTestResultCode == 0) {
659 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
660 }
661 sendStatus(mTestResultCode, mTestResult);
662
663 try { // Sleep after every test, if specified
664 Thread.sleep(mDelayMsec);
665 } catch (InterruptedException e) {
666 throw new IllegalStateException(e);
667 }
668 }
669
670 // TODO report the end of the cycle
671 // TODO report runtime for each test
672 }
673}