blob: d5e64596c7319b4deab3a58b9d28fa784f0cb6e9 [file] [log] [blame]
The Android Open Source Project9066cfe2009-03-03 19:31:44 -08001/*
2 * Copyright (C) 2007 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.test;
18
19import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE;
20import android.app.Activity;
21import android.app.Instrumentation;
22import android.os.Bundle;
23import android.os.Debug;
24import android.os.Looper;
25import android.test.suitebuilder.TestMethod;
26import android.test.suitebuilder.TestPredicates;
27import android.test.suitebuilder.TestSuiteBuilder;
28import android.util.Log;
29
30import com.android.internal.util.Predicate;
31
32import junit.framework.AssertionFailedError;
33import junit.framework.Test;
34import junit.framework.TestCase;
35import junit.framework.TestListener;
36import junit.framework.TestResult;
37import junit.framework.TestSuite;
38import junit.runner.BaseTestRunner;
39import junit.textui.ResultPrinter;
40
41import java.io.ByteArrayOutputStream;
42import java.io.File;
43import java.io.PrintStream;
44import java.lang.reflect.InvocationTargetException;
45import java.lang.reflect.Method;
Urs Grob7d41b8c2009-04-18 22:47:14 -070046import java.util.ArrayList;
47import java.util.List;
The Android Open Source Project9066cfe2009-03-03 19:31:44 -080048
49
50/**
51 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
52 * an Android package (application). Typical usage:
53 * <ol>
54 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
55 * against the classes in your package. Typically these are subclassed from:
56 * <ul><li>{@link android.test.ActivityInstrumentationTestCase}</li>
57 * <li>{@link android.test.ActivityUnitTestCase}</li>
58 * <li>{@link android.test.AndroidTestCase}</li>
59 * <li>{@link android.test.ApplicationTestCase}</li>
60 * <li>{@link android.test.InstrumentationTestCase}</li>
61 * <li>{@link android.test.ProviderTestCase}</li>
62 * <li>{@link android.test.ServiceTestCase}</li>
63 * <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
64 * <li>In an appropriate AndroidManifest.xml, define the this instrumentation with
65 * the appropriate android:targetPackage set.
66 * <li>Run the instrumentation using "adb shell am instrument -w",
67 * with no optional arguments, to run all tests (except performance tests).
68 * <li>Run the instrumentation using "adb shell am instrument -w",
69 * with the argument '-e func true' to run all functional tests. These are tests that derive from
70 * {@link android.test.InstrumentationTestCase}.
71 * <li>Run the instrumentation using "adb shell am instrument -w",
72 * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
73 * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
74 * <li>Run the instrumentation using "adb shell am instrument -w",
75 * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
76 * </ol>
77 * <p/>
78 * <b>Running all tests:</b> adb shell am instrument -w
79 * com.android.foo/android.test.InstrumentationTestRunner
80 * <p/>
81 * <b>Running all small tests:</b> adb shell am instrument -w
82 * -e size small
83 * com.android.foo/android.test.InstrumentationTestRunner
84 * <p/>
85 * <b>Running all medium tests:</b> adb shell am instrument -w
86 * -e size medium
87 * com.android.foo/android.test.InstrumentationTestRunner
88 * <p/>
89 * <b>Running all large tests:</b> adb shell am instrument -w
90 * -e size large
91 * com.android.foo/android.test.InstrumentationTestRunner
92 * <p/>
93 * <b>Running a single testcase:</b> adb shell am instrument -w
94 * -e class com.android.foo.FooTest
95 * com.android.foo/android.test.InstrumentationTestRunner
96 * <p/>
97 * <b>Running a single test:</b> adb shell am instrument -w
98 * -e class com.android.foo.FooTest#testFoo
99 * com.android.foo/android.test.InstrumentationTestRunner
100 * <p/>
101 * <b>Running multiple tests:</b> adb shell am instrument -w
102 * -e class com.android.foo.FooTest,com.android.foo.TooTest
103 * com.android.foo/android.test.InstrumentationTestRunner
104 * <p/>
105 * <b>Including performance tests:</b> adb shell am instrument -w
106 * -e perf true
107 * com.android.foo/android.test.InstrumentationTestRunner
108 * <p/>
109 * <b>To debug your tests, set a break point in your code and pass:</b>
110 * -e debug true
111 * <p/>
112 * <b>To run in 'log only' mode</b>
113 * -e log true
114 * This option will load and iterate through all test classes and methods, but will bypass actual
115 * test execution. Useful for quickly obtaining info on the tests to be executed by an
116 * instrumentation command.
117 * <p/>
118 * <b>To generate EMMA code coverage:</b>
119 * -e coverage true
120 * Note: this requires an emma instrumented build. By default, the code coverage results file
121 * will be saved as /sdcard/coverage.ec, unless overridden by coverageFile flag (see below)
122 * <p/>
123 * <b> To specify EMMA code coverage results file path:</b>
124 * -e coverageFile /sdcard/myFile.ec
125 * <br/>
126 * in addition to the other arguments.
127 */
128
129/* (not JavaDoc)
130 * Although not necessary in most case, another way to use this class is to extend it and have the
131 * derived class return
132 * the desired test suite from the {@link #getTestSuite()} method. The test suite returned from this
133 * method will be used if no target class is defined in the meta-data or command line argument
134 * parameters. If a derived class is used it needs to be added as an instrumentation to the
135 * AndroidManifest.xml and the command to run it would look like:
136 * <p/>
137 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
138 * <p/>
139 * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
140 *
141 * This model is used by many existing app tests, but can probably be deprecated.
142 */
143public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
144
145 /** @hide */
146 public static final String ARGUMENT_TEST_CLASS = "class";
147 /** @hide */
148 public static final String ARGUMENT_TEST_PACKAGE = "package";
149 /** @hide */
150 public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
151 /** @hide */
152 public static final String ARGUMENT_INCLUDE_PERF = "perf";
153 /** @hide */
154 public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
155
156 private static final String SMALL_SUITE = "small";
157 private static final String MEDIUM_SUITE = "medium";
158 private static final String LARGE_SUITE = "large";
159
160 private static final String ARGUMENT_LOG_ONLY = "log";
161
162
163 /**
164 * This constant defines the maximum allowed runtime (in ms) for a test included in the "small" suite.
165 * It is used to make an educated guess at what suite an unlabeled test belongs.
166 */
167 private static final float SMALL_SUITE_MAX_RUNTIME = 100;
168
169 /**
170 * This constant defines the maximum allowed runtime (in ms) for a test included in the "medium" suite.
171 * It is used to make an educated guess at what suite an unlabeled test belongs.
172 */
173 private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
174
175 /**
176 * The following keys are used in the status bundle to provide structured reports to
177 * an IInstrumentationWatcher.
178 */
179
180 /**
181 * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
182 * identifies InstrumentationTestRunner as the source of the report. This is sent with all
183 * status messages.
184 */
185 public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
186 /**
187 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
188 * identifies the total number of tests that are being run. This is sent with all status
189 * messages.
190 */
191 public static final String REPORT_KEY_NUM_TOTAL = "numtests";
192 /**
193 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
194 * identifies the sequence number of the current test. This is sent with any status message
195 * describing a specific test being started or completed.
196 */
197 public static final String REPORT_KEY_NUM_CURRENT = "current";
198 /**
199 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
200 * identifies the name of the current test class. This is sent with any status message
201 * describing a specific test being started or completed.
202 */
203 public static final String REPORT_KEY_NAME_CLASS = "class";
204 /**
205 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
206 * identifies the name of the current test. This is sent with any status message
207 * describing a specific test being started or completed.
208 */
209 public static final String REPORT_KEY_NAME_TEST = "test";
210 /**
211 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
212 * reports the run time in seconds of the current test.
213 */
214 private static final String REPORT_KEY_RUN_TIME = "runtime";
215 /**
216 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
217 * reports the guessed suite assignment for the current test.
218 */
219 private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
220 /**
221 * The test is starting.
222 */
223 public static final int REPORT_VALUE_RESULT_START = 1;
224 /**
225 * The test completed successfully.
226 */
227 public static final int REPORT_VALUE_RESULT_OK = 0;
228 /**
229 * The test completed with an error.
230 */
231 public static final int REPORT_VALUE_RESULT_ERROR = -1;
232 /**
233 * The test completed with a failure.
234 */
235 public static final int REPORT_VALUE_RESULT_FAILURE = -2;
236 /**
237 * If included in the status bundle sent to an IInstrumentationWatcher, this key
238 * identifies a stack trace describing an error or failure. This is sent with any status
239 * message describing a specific test being completed.
240 */
241 public static final String REPORT_KEY_STACK = "stack";
242
243 private static final String DEFAULT_COVERAGE_FILE_PATH = "/sdcard/coverage.ec";
244
245 private static final String LOG_TAG = "InstrumentationTestRunner";
246
247 private final Bundle mResults = new Bundle();
248 private AndroidTestRunner mTestRunner;
249 private boolean mDebug;
250 private boolean mJustCount;
251 private boolean mSuiteAssignmentMode;
252 private int mTestCount;
253 private String mPackageOfTests;
254 private boolean mCoverage;
255 private String mCoverageFilePath;
256 private int mDelayMsec;
257
258 @Override
259 public void onCreate(Bundle arguments) {
260 super.onCreate(arguments);
261
262 // Apk paths used to search for test classes when using TestSuiteBuilders.
263 String[] apkPaths =
264 {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
265 ClassPathPackageInfoSource.setApkPaths(apkPaths);
266
267 Predicate<TestMethod> testSizePredicate = null;
268 boolean includePerformance = false;
269 String testClassesArg = null;
270 boolean logOnly = false;
271
272 if (arguments != null) {
273 // Test class name passed as an argument should override any meta-data declaration.
274 testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
275 mDebug = getBooleanArgument(arguments, "debug");
276 mJustCount = getBooleanArgument(arguments, "count");
277 mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
278 mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
279 testSizePredicate = getSizePredicateFromArg(
280 arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
281 includePerformance = getBooleanArgument(arguments, ARGUMENT_INCLUDE_PERF);
282 logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
283 mCoverage = getBooleanArgument(arguments, "coverage");
284 mCoverageFilePath = arguments.getString("coverageFile");
285
286 try {
287 Object delay = arguments.get(ARGUMENT_DELAY_MSEC); // Accept either string or int
288 if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
289 } catch (NumberFormatException e) {
290 Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
291 }
292 }
293
294 TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
295 getTargetContext().getClassLoader());
296
297 if (testSizePredicate != null) {
298 testSuiteBuilder.addRequirements(testSizePredicate);
299 }
300 if (!includePerformance) {
301 testSuiteBuilder.addRequirements(REJECT_PERFORMANCE);
302 }
303
304 if (testClassesArg == null) {
The Android Open Source Project9066cfe2009-03-03 19:31:44 -0800305 if (mPackageOfTests != null) {
306 testSuiteBuilder.includePackages(mPackageOfTests);
307 } else {
Brett Chabot2c62f842009-03-31 17:07:19 -0700308 TestSuite testSuite = getTestSuite();
309 if (testSuite != null) {
310 testSuiteBuilder.addTestSuite(testSuite);
311 } else {
312 // no package or class bundle arguments were supplied, and no test suite
313 // provided so add all tests in application
314 testSuiteBuilder.includePackages("");
315 }
The Android Open Source Project9066cfe2009-03-03 19:31:44 -0800316 }
317 } else {
318 parseTestClasses(testClassesArg, testSuiteBuilder);
319 }
Urs Grob7d41b8c2009-04-18 22:47:14 -0700320
321 testSuiteBuilder.addRequirements(getBuilderRequirements());
The Android Open Source Project9066cfe2009-03-03 19:31:44 -0800322
323 mTestRunner = getAndroidTestRunner();
324 mTestRunner.setContext(getTargetContext());
325 mTestRunner.setInstrumentaiton(this);
326 mTestRunner.setSkipExecution(logOnly);
327 mTestRunner.setTest(testSuiteBuilder.build());
328 mTestCount = mTestRunner.getTestCases().size();
329 if (mSuiteAssignmentMode) {
330 mTestRunner.addTestListener(new SuiteAssignmentPrinter());
331 } else {
332 mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
333 mTestRunner.addTestListener(new WatcherResultPrinter(mTestCount));
334 }
335 start();
336 }
337
Urs Grob7d41b8c2009-04-18 22:47:14 -0700338 List<Predicate<TestMethod>> getBuilderRequirements() {
339 return new ArrayList<Predicate<TestMethod>>();
340 }
341
The Android Open Source Project9066cfe2009-03-03 19:31:44 -0800342 /**
343 * Parses and loads the specified set of test classes
344 * @param testClassArg - comma-separated list of test classes and methods
345 * @param testSuiteBuilder - builder to add tests to
346 */
347 private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
348 String[] testClasses = testClassArg.split(",");
349 for (String testClass : testClasses) {
350 parseTestClass(testClass, testSuiteBuilder);
351 }
352 }
353
354 /**
355 * Parse and load the given test class and, optionally, method
356 * @param testClassName - full package name of test class and optionally method to add. Expected
357 * format: com.android.TestClass#testMethod
358 * @param testSuiteBuilder - builder to add tests to
359 */
360 private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
361 int methodSeparatorIndex = testClassName.indexOf('#');
362 String testMethodName = null;
363
364 if (methodSeparatorIndex > 0) {
365 testMethodName = testClassName.substring(methodSeparatorIndex + 1);
366 testClassName = testClassName.substring(0, methodSeparatorIndex);
367 }
368 testSuiteBuilder.addTestClassByName(testClassName, testMethodName,
369 getTargetContext());
370 }
371
372 protected AndroidTestRunner getAndroidTestRunner() {
373 return new AndroidTestRunner();
374 }
375
376 private boolean getBooleanArgument(Bundle arguments, String tag) {
377 String tagString = arguments.getString(tag);
378 return tagString != null && Boolean.parseBoolean(tagString);
379 }
380
381 /*
382 * Returns the size predicate object, corresponding to the "size" argument value.
383 */
384 private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
385
386 if (SMALL_SUITE.equals(sizeArg)) {
387 return TestPredicates.SELECT_SMALL;
388 } else if (MEDIUM_SUITE.equals(sizeArg)) {
389 return TestPredicates.SELECT_MEDIUM;
390 } else if (LARGE_SUITE.equals(sizeArg)) {
391 return TestPredicates.SELECT_LARGE;
392 } else {
393 return null;
394 }
395 }
396
397 @Override
398 public void onStart() {
399 Looper.prepare();
400
401 if (mJustCount) {
402 mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
403 mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
404 finish(Activity.RESULT_OK, mResults);
405 } else {
406 if (mDebug) {
407 Debug.waitForDebugger();
408 }
409
410 ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
411 PrintStream writer = new PrintStream(byteArrayOutputStream);
412 try {
413 StringResultPrinter resultPrinter = new StringResultPrinter(writer);
414
415 mTestRunner.addTestListener(resultPrinter);
416
417 long startTime = System.currentTimeMillis();
418 mTestRunner.runTest();
419 long runTime = System.currentTimeMillis() - startTime;
420
421 resultPrinter.print(mTestRunner.getTestResult(), runTime);
422 } finally {
423 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
424 String.format("\nTest results for %s=%s",
425 mTestRunner.getTestClassName(),
426 byteArrayOutputStream.toString()));
427
428 if (mCoverage) {
429 generateCoverageReport();
430 }
431 writer.close();
432
433 finish(Activity.RESULT_OK, mResults);
434 }
435 }
436 }
437
438 public TestSuite getTestSuite() {
439 return getAllTests();
440 }
441
442 /**
443 * Override this to define all of the tests to run in your package.
444 */
445 public TestSuite getAllTests() {
446 return null;
447 }
448
449 /**
450 * Override this to provide access to the class loader of your package.
451 */
452 public ClassLoader getLoader() {
453 return null;
454 }
455
456 private void generateCoverageReport() {
457 // use reflection to call emma dump coverage method, to avoid
458 // always statically compiling against emma jar
459 java.io.File coverageFile = new java.io.File(getCoverageFilePath());
460 try {
461 Class emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
462 Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
463 coverageFile.getClass(), boolean.class, boolean.class);
464
465 dumpCoverageMethod.invoke(null, coverageFile, false, false);
466
467 } catch (ClassNotFoundException e) {
468 reportEmmaError("Is emma jar on classpath?", e);
469 } catch (SecurityException e) {
470 reportEmmaError(e);
471 } catch (NoSuchMethodException e) {
472 reportEmmaError(e);
473 } catch (IllegalArgumentException e) {
474 reportEmmaError(e);
475 } catch (IllegalAccessException e) {
476 reportEmmaError(e);
477 } catch (InvocationTargetException e) {
478 reportEmmaError(e);
479 }
480 }
481
482 private String getCoverageFilePath() {
483 if (mCoverageFilePath == null) {
484 return DEFAULT_COVERAGE_FILE_PATH;
485 }
486 else {
487 return mCoverageFilePath;
488 }
489 }
490
491 private void reportEmmaError(Exception e) {
492 reportEmmaError("", e);
493 }
494
495 private void reportEmmaError(String hint, Exception e) {
496 String msg = "Failed to generate emma coverage. " + hint;
497 Log.e(LOG_TAG, msg, e);
498 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
499 }
500
501 // TODO kill this, use status() and prettyprint model for better output
502 private class StringResultPrinter extends ResultPrinter {
503
504 public StringResultPrinter(PrintStream writer) {
505 super(writer);
506 }
507
508 synchronized void print(TestResult result, long runTime) {
509 printHeader(runTime);
510 printFooter(result);
511 }
512 }
513
514 /**
515 * This class sends status reports back to the IInstrumentationWatcher about
516 * which suite each test belongs.
517 */
518 private class SuiteAssignmentPrinter implements TestListener
519 {
520
521 private Bundle mTestResult;
522 private long mStartTime;
523 private long mEndTime;
524 private boolean mTimingValid;
525
526 public SuiteAssignmentPrinter() {
527 }
528
529 /**
530 * send a status for the start of a each test, so long tests can be seen as "running"
531 */
532 public void startTest(Test test) {
533 mTimingValid = true;
534 mStartTime = System.currentTimeMillis();
535 }
536
537 /**
538 * @see junit.framework.TestListener#addError(Test, Throwable)
539 */
540 public void addError(Test test, Throwable t) {
541 mTimingValid = false;
542 }
543
544 /**
545 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
546 */
547 public void addFailure(Test test, AssertionFailedError t) {
548 mTimingValid = false;
549 }
550
551 /**
552 * @see junit.framework.TestListener#endTest(Test)
553 */
554 public void endTest(Test test) {
555 float runTime;
556 String assignmentSuite;
557 mEndTime = System.currentTimeMillis();
558 mTestResult = new Bundle();
559
560 if (!mTimingValid || mStartTime < 0) {
561 assignmentSuite = "NA";
562 runTime = -1;
563 } else {
564 runTime = mEndTime - mStartTime;
565 if (runTime < SMALL_SUITE_MAX_RUNTIME
566 && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
567 assignmentSuite = SMALL_SUITE;
568 } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
569 assignmentSuite = MEDIUM_SUITE;
570 } else {
571 assignmentSuite = LARGE_SUITE;
572 }
573 }
574 // Clear mStartTime so that we can verify that it gets set next time.
575 mStartTime = -1;
576
577 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
578 test.getClass().getName() + "#" + ((TestCase) test).getName()
579 + "\nin " + assignmentSuite + " suite\nrunTime: "
580 + String.valueOf(runTime) + "\n");
581 mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
582 mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
583
584 sendStatus(0, mTestResult);
585 }
586 }
587
588 /**
589 * This class sends status reports back to the IInstrumentationWatcher
590 */
591 private class WatcherResultPrinter implements TestListener
592 {
593 private final Bundle mResultTemplate;
594 Bundle mTestResult;
595 int mTestNum = 0;
596 int mTestResultCode = 0;
597 String mTestClass = null;
598
599 public WatcherResultPrinter(int numTests) {
600 mResultTemplate = new Bundle();
601 mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
602 mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
603 }
604
605 /**
606 * send a status for the start of a each test, so long tests can be seen as "running"
607 */
608 public void startTest(Test test) {
609 String testClass = test.getClass().getName();
610 mTestResult = new Bundle(mResultTemplate);
611 mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
612 mTestResult.putString(REPORT_KEY_NAME_TEST, ((TestCase) test).getName());
613 mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
614 // pretty printing
615 if (testClass != null && !testClass.equals(mTestClass)) {
616 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
617 String.format("\n%s:", testClass));
618 mTestClass = testClass;
619 } else {
620 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
621 }
622
623 // The delay_msec parameter is normally used to provide buffers of idle time
624 // for power measurement purposes. To make sure there is a delay before and after
625 // every test in a suite, we delay *after* every test (see endTest below) and also
626 // delay *before* the first test. So, delay test1 delay test2 delay.
627
628 try {
629 if (mTestNum == 1) Thread.sleep(mDelayMsec);
630 } catch (InterruptedException e) {
631 throw new IllegalStateException(e);
632 }
633
634 sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
635 mTestResultCode = 0;
636 }
637
638 /**
639 * @see junit.framework.TestListener#addError(Test, Throwable)
640 */
641 public void addError(Test test, Throwable t) {
642 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
643 mTestResultCode = REPORT_VALUE_RESULT_ERROR;
644 // pretty printing
645 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
646 String.format("\nError in %s:\n%s",
647 ((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t)));
648 }
649
650 /**
651 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
652 */
653 public void addFailure(Test test, AssertionFailedError t) {
654 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
655 mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
656 // pretty printing
657 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
658 String.format("\nFailure in %s:\n%s",
659 ((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t)));
660 }
661
662 /**
663 * @see junit.framework.TestListener#endTest(Test)
664 */
665 public void endTest(Test test) {
666 if (mTestResultCode == 0) {
667 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
668 }
669 sendStatus(mTestResultCode, mTestResult);
670
671 try { // Sleep after every test, if specified
672 Thread.sleep(mDelayMsec);
673 } catch (InterruptedException e) {
674 throw new IllegalStateException(e);
675 }
676 }
677
678 // TODO report the end of the cycle
679 // TODO report runtime for each test
680 }
681}