Revert r8535



git-svn-id: http://skia.googlecode.com/svn/trunk@8536 2bbb7eff-a529-9590-31e7-b0007b416f81
diff --git a/gm/gm_error.h b/gm/gm_error.h
index aab0ec0..ced3391 100644
--- a/gm/gm_error.h
+++ b/gm/gm_error.h
@@ -26,8 +26,7 @@
         // or off (as long as the number of these errors is 0).
         kNoGpuContext_ErrorType,
 
-        kRenderModeMismatch_ErrorType,
-        kExpectationsMismatch_ErrorType,
+        kImageMismatch_ErrorType,
         kMissingExpectations_ErrorType,
         kWritingReferenceImage_ErrorType,
         kLast_ErrorType = kWritingReferenceImage_ErrorType
@@ -40,10 +39,8 @@
         switch(type) {
         case kNoGpuContext_ErrorType:
             return "NoGpuContext";
-        case kRenderModeMismatch_ErrorType:
-            return "RenderModeMismatch";
-        case kExpectationsMismatch_ErrorType:
-            return "ExpectationsMismatch";
+        case kImageMismatch_ErrorType:
+            return "ImageMismatch";
         case kMissingExpectations_ErrorType:
             return "MissingExpectations";
         case kWritingReferenceImage_ErrorType:
diff --git a/gm/gmmain.cpp b/gm/gmmain.cpp
index 8501064..6cb47aa 100644
--- a/gm/gmmain.cpp
+++ b/gm/gmmain.cpp
@@ -272,8 +272,7 @@
         }
 
         // Things to do only if there is some error condition.
-        SkString fullName = name;
-        fullName.append(renderModeDescriptor);
+        SkString fullName = make_name(name.c_str(), renderModeDescriptor);
         for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
             ErrorType type = static_cast<ErrorType>(typeInt);
             if (errorCombination.includes(type)) {
@@ -684,17 +683,18 @@
      * @param baseNameString name of test without renderModeDescriptor added
      * @param renderModeDescriptor e.g., "-rtree", "-deferred"
      * @param addToJsonSummary whether to add these results (both actual and
-     *        expected) to the JSON summary. Regardless of this setting, if
-     *        we find an image mismatch in this test, we will write these
-     *        results to the JSON summary.  (This is so that we will always
-     *        report errors across rendering modes, such as pipe vs tiled.
-     *        See https://codereview.chromium.org/13650002/ )
+     *        expected) to the JSON summary
+     *
+     * TODO: For now, addToJsonSummary is only set to true within
+     * compare_test_results_to_stored_expectations(), so results of our
+     * in-memory comparisons (Rtree vs regular, etc.) are not written to the
+     * JSON summary.  We may wish to change that.
      */
     ErrorCombination compare_to_expectations(Expectations expectations,
                                              const SkBitmap& actualBitmap,
                                              const SkString& baseNameString,
                                              const char renderModeDescriptor[],
-                                             bool addToJsonSummary) {
+                                             bool addToJsonSummary=false) {
         ErrorCombination errors;
         Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap);
         SkString completeNameString = baseNameString;
@@ -704,14 +704,7 @@
         if (expectations.empty()) {
             errors.add(kMissingExpectations_ErrorType);
         } else if (!expectations.match(actualChecksum)) {
-            addToJsonSummary = true;
-            // The error mode we record depends on whether this was running
-            // in a non-standard renderMode.
-            if ('\0' == *renderModeDescriptor) {
-                errors.add(kExpectationsMismatch_ErrorType);
-            } else {
-                errors.add(kRenderModeMismatch_ErrorType);
-            }
+            errors.add(kImageMismatch_ErrorType);
 
             // Write out the "actuals" for any mismatches, if we have
             // been directed to do so.
@@ -760,7 +753,7 @@
                 // (where we can set ignore-failure to either true or
                 // false), add test cases that exercise ignored
                 // failures (both for kMissingExpectations_ErrorType
-                // and kExpectationsMismatch_ErrorType).
+                // and kImageMismatch_ErrorType).
                 this->fJsonActualResults_FailureIgnored[testName] =
                     actualResults;
             } else {
@@ -779,8 +772,7 @@
                     this->fJsonActualResults_NoComparison[testName] =
                         actualResults;
                 }
-                if (result.includes(kExpectationsMismatch_ErrorType) ||
-                    result.includes(kRenderModeMismatch_ErrorType)) {
+                if (result.includes(kImageMismatch_ErrorType)) {
                     this->fJsonActualResults_Failed[testName] = actualResults;
                 }
             }
@@ -874,11 +866,18 @@
         GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
         SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
 
+        // TODO(epoger): This method is run to compare results across
+        // different rendering modes (as opposed to
+        // compare_test_results_to_stored_expectations(), which
+        // compares results against expectations stored on disk).  If
+        // we would like the GenerateGMs step to distinguish between
+        // those two types of mismatches, we should report image
+        // mismatches in here with a different ErrorType.
         SkASSERT(referenceBitmap);
         SkString name = make_name(gm->shortName(), gRec.fName);
         Expectations expectations(*referenceBitmap);
         return compare_to_expectations(expectations, actualBitmap,
-                                       name, renderModeDescriptor, false);
+                                       name, renderModeDescriptor);
     }
 
     static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags,
@@ -997,8 +996,9 @@
         return kEmpty_ErrorCombination;
     }
 
-    ErrorCombination test_pipe_playback(GM* gm, const ConfigData& gRec,
-                                        const SkBitmap& referenceBitmap, bool simulateFailure) {
+    ErrorCombination test_pipe_playback(GM* gm,
+                                        const ConfigData& gRec,
+                                        const SkBitmap& referenceBitmap) {
         ErrorCombination errors;
         for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
             SkBitmap bitmap;
@@ -1010,9 +1010,7 @@
             SkGPipeWriter writer;
             SkCanvas* pipeCanvas = writer.startRecording(
               &pipeController, gPipeWritingFlagCombos[i].flags);
-            if (!simulateFailure) {
-                invokeGM(gm, pipeCanvas, false, false);
-            }
+            invokeGM(gm, pipeCanvas, false, false);
             complete_bitmap(&bitmap);
             writer.endRecording();
             SkString string("-pipe");
@@ -1179,7 +1177,6 @@
 DEFINE_string2(resourcePath, i, "", "Directory that stores image resources.");
 DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass.");
 DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserialization test pass.");
-DEFINE_bool(simulatePipePlaybackFailure, false, "Simulate a rendering failure in pipe mode only.");
 DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay.");
 DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture.");
 DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point scale "
@@ -1414,6 +1411,10 @@
         errorsForAllModes.add(pictErrors);
     }
 
+    // TODO: add a test in which the RTree rendering results in a
+    // different bitmap than the standard rendering.  It should
+    // show up as failed in the JSON summary, and should be listed
+    // in the stdout also.
     if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_rtree) {
         SkPicture* pict = gmmain.generate_new_picture(
             gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
@@ -1458,8 +1459,7 @@
         ErrorCombination pipeErrors;
 
         if (FLAGS_pipe) {
-            pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap,
-                                                     FLAGS_simulatePipePlaybackFailure));
+            pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap));
         }
 
         if ((pipeErrors.isEmpty()) &&
diff --git a/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout b/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
index 8b8e61d..3067409 100644
--- a/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
@@ -4,10 +4,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
+GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
 GM: [ ] 0 MissingExpectations:
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout b/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
index 2fd0269..84b2737 100644
--- a/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
@@ -4,10 +4,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
+GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
 GM: [ ] 0 MissingExpectations:
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout b/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
index 829c05e..a57916e 100644
--- a/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
@@ -4,10 +4,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 0 ExpectationsMismatch:
+GM: [*] 0 ImageMismatch:
 GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout b/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
index 5788d69..c67e5bc 100644
--- a/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
@@ -4,10 +4,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 0 ExpectationsMismatch:
+GM: [*] 0 ImageMismatch:
 GM: [ ] 0 MissingExpectations:
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout b/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
index 303a23f..25f51b5 100644
--- a/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
@@ -4,10 +4,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 0 ExpectationsMismatch:
+GM: [*] 0 ImageMismatch:
 GM: [ ] 0 MissingExpectations:
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout b/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
index 4c4edba..d23ed86 100644
--- a/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
@@ -4,10 +4,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 0 ExpectationsMismatch:
+GM: [*] 0 ImageMismatch:
 GM: [ ] 0 MissingExpectations:
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout b/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
index 9a9d91b..147c383 100644
--- a/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
@@ -4,10 +4,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 0 ExpectationsMismatch:
+GM: [*] 0 ImageMismatch:
 GM: [ ] 0 MissingExpectations:
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/no-readpath/output-expected/stdout b/gm/tests/outputs/no-readpath/output-expected/stdout
index 1bdc86d..e3b101e 100644
--- a/gm/tests/outputs/no-readpath/output-expected/stdout
+++ b/gm/tests/outputs/no-readpath/output-expected/stdout
@@ -3,10 +3,9 @@
 GM: ... over  2 configs ["8888", "565"]
 GM: ...  and  7 modes   ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
 GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
 GM: [*] 0 NoGpuContext:
-GM: [*] 0 RenderModeMismatch:
-GM: [*] 0 ExpectationsMismatch:
+GM: [*] 0 ImageMismatch:
 GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
 GM: [*] 0 WritingReferenceImage:
 GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/command_line b/gm/tests/outputs/pipe-playback-failure/output-expected/command_line
deleted file mode 100644
index 950339d..0000000
--- a/gm/tests/outputs/pipe-playback-failure/output-expected/command_line
+++ /dev/null
@@ -1 +0,0 @@
-out/Debug/gm --simulatePipePlaybackFailure --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-pixels.json --writeJsonSummaryPath gm/tests/outputs/pipe-playback-failure/output-actual/json-summary.txt
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/json-summary.txt b/gm/tests/outputs/pipe-playback-failure/output-expected/json-summary.txt
deleted file mode 100644
index 249fe09..0000000
--- a/gm/tests/outputs/pipe-playback-failure/output-expected/json-summary.txt
+++ /dev/null
@@ -1,33 +0,0 @@
-{
-   "actual-results" : {
-      "failed" : {
-         "comparison/selftest1-pipe" : {
-            "checksum" : 4259036727585789440
-         }
-      },
-      "failure-ignored" : null,
-      "no-comparison" : null,
-      "succeeded" : {
-         "565/selftest1" : {
-            "checksum" : 9512553915271796906
-         },
-         "8888/selftest1" : {
-            "checksum" : 14022967492765711532
-         }
-      }
-   },
-   "expected-results" : {
-      "565/selftest1" : {
-         "checksums" : [ 9512553915271796906 ],
-         "ignore-failure" : false
-      },
-      "8888/selftest1" : {
-         "checksums" : [ 14022967492765711532 ],
-         "ignore-failure" : false
-      },
-      "comparison/selftest1-pipe" : {
-         "checksums" : [ 14022967492765711532 ],
-         "ignore-failure" : false
-      }
-   }
-}
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/return_value b/gm/tests/outputs/pipe-playback-failure/output-expected/return_value
deleted file mode 100644
index ace9d03..0000000
--- a/gm/tests/outputs/pipe-playback-failure/output-expected/return_value
+++ /dev/null
@@ -1 +0,0 @@
-255
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/stderr b/gm/tests/outputs/pipe-playback-failure/output-expected/stderr
deleted file mode 100644
index e65d374..0000000
--- a/gm/tests/outputs/pipe-playback-failure/output-expected/stderr
+++ /dev/null
@@ -1 +0,0 @@
-GM: ---- comparison/selftest1-pipe: 60000 (of 60000) differing pixels, max per-channel mismatch R=135 G=246 B=135 A=0
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/stdout b/gm/tests/outputs/pipe-playback-failure/output-expected/stdout
deleted file mode 100644
index 52fc54f..0000000
--- a/gm/tests/outputs/pipe-playback-failure/output-expected/stdout
+++ /dev/null
@@ -1,13 +0,0 @@
-GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-pixels.json
-GM: drawing... selftest1 [300 200]
-GM: Ran 1 GMs
-GM: ... over  2 configs ["8888", "565"]
-GM: ...  and  5 modes   ["pipe", "replay", "rtree", "serialize", "tilegrid"]
-GM: ... so there should be a total of 7 tests.
-GM: Ran 7 tests: NoGpuContext=0 RenderModeMismatch=1 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
-GM: [*] 0 NoGpuContext:
-GM: [*] 1 RenderModeMismatch: comparison/selftest1-pipe
-GM: [*] 0 ExpectationsMismatch:
-GM: [ ] 0 MissingExpectations:
-GM: [*] 0 WritingReferenceImage:
-GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/run.sh b/gm/tests/run.sh
index 093f7d7..7ec08be 100755
--- a/gm/tests/run.sh
+++ b/gm/tests/run.sh
@@ -156,7 +156,4 @@
 # section should be empty.
 gm_test "--hierarchy --match selftest1 $CONFIGS" "$GM_OUTPUTS/no-readpath"
 
-# Test what happens if a subset of the renderModes fail (e.g. pipe)
-gm_test "--simulatePipePlaybackFailure --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/pipe-playback-failure"
-
 echo "All tests passed."