senorblanco@chromium.org | 782f3b4 | 2012-10-29 18:06:26 +0000 | [diff] [blame] | 1 | #!/usr/bin/python |
| 2 | |
| 3 | ''' |
| 4 | Copyright 2012 Google Inc. |
| 5 | |
| 6 | Use of this source code is governed by a BSD-style license that can be |
| 7 | found in the LICENSE file. |
| 8 | ''' |
| 9 | |
| 10 | ''' |
senorblanco@chromium.org | 123a0b5 | 2012-11-29 21:50:34 +0000 | [diff] [blame] | 11 | Rebaselines the given GM tests, on all bots and all configurations. |
| 12 | Must be run from the gm-expected directory. If run from a git or SVN |
| 13 | checkout, the files will be added to the staging area for commit. |
senorblanco@chromium.org | 782f3b4 | 2012-10-29 18:06:26 +0000 | [diff] [blame] | 14 | ''' |
| 15 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 16 | # System-level imports |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 17 | import argparse |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 18 | import os |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 19 | import re |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 20 | import subprocess |
| 21 | import sys |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 22 | import urllib2 |
| 23 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 24 | # Imports from local directory |
| 25 | import rebaseline_imagefiles |
| 26 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 27 | # Imports from within Skia |
| 28 | # |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 29 | # We need to add the 'gm' directory, so that we can import gm_json.py within |
| 30 | # that directory. That script allows us to parse the actual-results.json file |
| 31 | # written out by the GM tool. |
| 32 | # Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end* |
| 33 | # so any dirs that are already in the PYTHONPATH will be preferred. |
| 34 | # |
| 35 | # This assumes that the 'gm' directory has been checked out as a sibling of |
| 36 | # the 'tools' directory containing this script, which will be the case if |
| 37 | # 'trunk' was checked out as a single unit. |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 38 | GM_DIRECTORY = os.path.realpath( |
| 39 | os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm')) |
| 40 | if GM_DIRECTORY not in sys.path: |
| 41 | sys.path.append(GM_DIRECTORY) |
| 42 | import gm_json |
| 43 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 44 | JSON_EXPECTATIONS_FILENAME='expected-results.json' |
senorblanco@chromium.org | 782f3b4 | 2012-10-29 18:06:26 +0000 | [diff] [blame] | 45 | |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 46 | # Mapping of gm-expectations subdir (under |
| 47 | # https://skia.googlecode.com/svn/gm-expected/ ) |
| 48 | # to builder name (see list at http://108.170.217.252:10117/builders ) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 49 | SUBDIR_MAPPING = { |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 50 | 'base-shuttle-win7-intel-float': |
| 51 | 'Test-Win7-ShuttleA-HD2000-x86-Release', |
| 52 | 'base-shuttle-win7-intel-angle': |
| 53 | 'Test-Win7-ShuttleA-HD2000-x86-Release-ANGLE', |
| 54 | 'base-shuttle-win7-intel-directwrite': |
| 55 | 'Test-Win7-ShuttleA-HD2000-x86-Release-DirectWrite', |
| 56 | 'base-shuttle_ubuntu12_ati5770': |
| 57 | 'Test-Ubuntu12-ShuttleA-ATI5770-x86_64-Release', |
| 58 | 'base-macmini': |
| 59 | 'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Release', |
| 60 | 'base-macmini-lion-float': |
| 61 | 'Test-Mac10.7-MacMini4.1-GeForce320M-x86-Release', |
| 62 | 'base-android-galaxy-nexus': |
| 63 | 'Test-Android-GalaxyNexus-SGX540-Arm7-Debug', |
| 64 | 'base-android-nexus-7': |
| 65 | 'Test-Android-Nexus7-Tegra3-Arm7-Release', |
| 66 | 'base-android-nexus-s': |
| 67 | 'Test-Android-NexusS-SGX540-Arm7-Release', |
| 68 | 'base-android-xoom': |
| 69 | 'Test-Android-Xoom-Tegra2-Arm7-Release', |
| 70 | 'base-android-nexus-10': |
| 71 | 'Test-Android-Nexus10-MaliT604-Arm7-Release', |
robertphillips@google.com | 63e9627 | 2013-07-02 12:54:37 +0000 | [diff] [blame^] | 72 | 'base-android-nexus-4': |
| 73 | 'Test-Android-Nexus4-Adreno320-Arm7-Release', |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 74 | } |
| 75 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 76 | |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 77 | class CommandFailedException(Exception): |
| 78 | pass |
| 79 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 80 | # Object that rebaselines a JSON expectations file (not individual image files). |
| 81 | # |
| 82 | # TODO(epoger): Most of this is just the code from the old ImageRebaseliner... |
| 83 | # some of it will need to be updated in order to properly rebaseline JSON files. |
| 84 | # There is a lot of code duplicated between here and ImageRebaseliner, but |
| 85 | # that's fine because we will delete ImageRebaseliner soon. |
| 86 | class JsonRebaseliner(object): |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 87 | |
| 88 | # params: |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 89 | # expectations_root: root directory of all expectations |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 90 | # json_base_url: base URL from which to read json_filename |
| 91 | # json_filename: filename (under json_base_url) from which to read a |
| 92 | # summary of results; typically "actual-results.json" |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 93 | # tests: list of tests to rebaseline, or None if we should rebaseline |
| 94 | # whatever files the JSON results summary file tells us to |
| 95 | # configs: which configs to run for each test; this should only be |
| 96 | # specified if the list of tests was also specified (otherwise, |
| 97 | # the JSON file will give us test names and configs) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 98 | # dry_run: if True, instead of actually downloading files or adding |
| 99 | # files to checkout, display a list of operations that |
| 100 | # we would normally perform |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 101 | # add_new: if True, add expectations for tests which don't have any yet |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 102 | # missing_json_is_fatal: whether to halt execution if we cannot read a |
| 103 | # JSON actual result summary file |
| 104 | def __init__(self, expectations_root, json_base_url, json_filename, |
| 105 | tests=None, configs=None, dry_run=False, |
| 106 | add_new=False, missing_json_is_fatal=False): |
| 107 | raise ValueError('JsonRebaseliner not yet implemented') # TODO(epoger) |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 108 | if configs and not tests: |
| 109 | raise ValueError('configs should only be specified if tests ' + |
| 110 | 'were specified also') |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 111 | self._expectations_root = expectations_root |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 112 | self._tests = tests |
| 113 | self._configs = configs |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 114 | self._json_base_url = json_base_url |
| 115 | self._json_filename = json_filename |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 116 | self._dry_run = dry_run |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 117 | self._add_new = add_new |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 118 | self._missing_json_is_fatal = missing_json_is_fatal |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 119 | self._googlestorage_gm_actuals_root = ( |
| 120 | 'http://chromium-skia-gm.commondatastorage.googleapis.com/gm') |
| 121 | self._testname_pattern = re.compile('(\S+)_(\S+).png') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 122 | self._is_svn_checkout = ( |
| 123 | os.path.exists('.svn') or |
| 124 | os.path.exists(os.path.join(os.pardir, '.svn'))) |
| 125 | self._is_git_checkout = ( |
| 126 | os.path.exists('.git') or |
| 127 | os.path.exists(os.path.join(os.pardir, '.git'))) |
| 128 | |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 129 | # If dry_run is False, execute subprocess.call(cmd). |
| 130 | # If dry_run is True, print the command we would have otherwise run. |
| 131 | # Raises a CommandFailedException if the command fails. |
| 132 | def _Call(self, cmd): |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 133 | if self._dry_run: |
| 134 | print '%s' % ' '.join(cmd) |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 135 | return |
| 136 | if subprocess.call(cmd) != 0: |
| 137 | raise CommandFailedException('error running command: ' + |
| 138 | ' '.join(cmd)) |
| 139 | |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 140 | # Download a single actual result from GoogleStorage, returning True if it |
| 141 | # succeeded. |
| 142 | def _DownloadFromGoogleStorage(self, infilename, outfilename, all_results): |
| 143 | test_name = self._testname_pattern.match(infilename).group(1) |
| 144 | if not test_name: |
| 145 | print '# unable to find test_name for infilename %s' % infilename |
| 146 | return False |
| 147 | try: |
| 148 | hash_type, hash_value = all_results[infilename] |
| 149 | except KeyError: |
| 150 | print ('# unable to find filename %s in all_results dict' % |
| 151 | infilename) |
| 152 | return False |
epoger@google.com | bd4af3a | 2013-06-12 19:07:00 +0000 | [diff] [blame] | 153 | except ValueError as e: |
| 154 | print '# ValueError reading filename %s from all_results dict: %s'%( |
| 155 | infilename, e) |
| 156 | return False |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 157 | url = '%s/%s/%s/%s.png' % (self._googlestorage_gm_actuals_root, |
| 158 | hash_type, test_name, hash_value) |
| 159 | try: |
| 160 | self._DownloadFile(source_url=url, dest_filename=outfilename) |
| 161 | return True |
| 162 | except CommandFailedException: |
| 163 | print '# Couldn\'t fetch gs_url %s' % url |
| 164 | return False |
| 165 | |
| 166 | # Download a single actual result from skia-autogen, returning True if it |
| 167 | # succeeded. |
| 168 | def _DownloadFromAutogen(self, infilename, outfilename, |
| 169 | expectations_subdir, builder_name): |
| 170 | url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' + |
| 171 | expectations_subdir + '/' + builder_name + '/' + |
| 172 | expectations_subdir + '/' + infilename) |
| 173 | try: |
| 174 | self._DownloadFile(source_url=url, dest_filename=outfilename) |
| 175 | return True |
| 176 | except CommandFailedException: |
| 177 | print '# Couldn\'t fetch autogen_url %s' % url |
| 178 | return False |
| 179 | |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 180 | # Download a single file, raising a CommandFailedException if it fails. |
| 181 | def _DownloadFile(self, source_url, dest_filename): |
| 182 | # Download into a temporary file and then rename it afterwards, |
| 183 | # so that we don't corrupt the existing file if it fails midway thru. |
| 184 | temp_filename = os.path.join(os.path.dirname(dest_filename), |
| 185 | '.temp-' + os.path.basename(dest_filename)) |
| 186 | |
| 187 | # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on |
| 188 | # Unix) with a Python HTTP library (which should work cross-platform) |
| 189 | self._Call([ 'curl', '--fail', '--silent', source_url, |
| 190 | '--output', temp_filename ]) |
| 191 | self._Call([ 'mv', temp_filename, dest_filename ]) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 192 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 193 | # Returns the full contents of a URL, as a single string. |
| 194 | # |
| 195 | # Unlike standard URL handling, we allow relative "file:" URLs; |
| 196 | # for example, "file:one/two" resolves to the file ./one/two |
| 197 | # (relative to current working dir) |
| 198 | def _GetContentsOfUrl(self, url): |
| 199 | file_prefix = 'file:' |
| 200 | if url.startswith(file_prefix): |
| 201 | filename = url[len(file_prefix):] |
| 202 | return open(filename, 'r').read() |
| 203 | else: |
| 204 | return urllib2.urlopen(url).read() |
| 205 | |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 206 | # Returns a dictionary of actual results from actual-results.json file. |
| 207 | # |
| 208 | # The dictionary returned has this format: |
| 209 | # { |
| 210 | # u'imageblur_565.png': [u'bitmap-64bitMD5', 3359963596899141322], |
| 211 | # u'imageblur_8888.png': [u'bitmap-64bitMD5', 4217923806027861152], |
| 212 | # u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716] |
| 213 | # } |
| 214 | # |
| 215 | # If the JSON actual result summary file cannot be loaded, the behavior |
| 216 | # depends on self._missing_json_is_fatal: |
| 217 | # - if true: execution will halt with an exception |
| 218 | # - if false: we will log an error message but return an empty dictionary |
| 219 | # |
| 220 | # params: |
| 221 | # json_url: URL pointing to a JSON actual result summary file |
| 222 | # sections: a list of section names to include in the results, e.g. |
| 223 | # [gm_json.JSONKEY_ACTUALRESULTS_FAILED, |
| 224 | # gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ; |
| 225 | # if None, then include ALL sections. |
| 226 | def _GetActualResults(self, json_url, sections=None): |
| 227 | try: |
| 228 | json_contents = self._GetContentsOfUrl(json_url) |
| 229 | except (urllib2.HTTPError, IOError): |
| 230 | message = 'unable to load JSON summary URL %s' % json_url |
| 231 | if self._missing_json_is_fatal: |
| 232 | raise ValueError(message) |
| 233 | else: |
| 234 | print '# %s' % message |
| 235 | return {} |
| 236 | |
| 237 | json_dict = gm_json.LoadFromString(json_contents) |
| 238 | results_to_return = {} |
| 239 | actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] |
| 240 | if not sections: |
| 241 | sections = actual_results.keys() |
| 242 | for section in sections: |
| 243 | section_results = actual_results[section] |
| 244 | if section_results: |
| 245 | results_to_return.update(section_results) |
| 246 | return results_to_return |
| 247 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 248 | # Returns a list of files that require rebaselining. |
| 249 | # |
| 250 | # Note that this returns a list of FILES, like this: |
| 251 | # ['imageblur_565.png', 'xfermodes_pdf.png'] |
| 252 | # rather than a list of TESTS, like this: |
| 253 | # ['imageblur', 'xfermodes'] |
| 254 | # |
| 255 | # params: |
| 256 | # json_url: URL pointing to a JSON actual result summary file |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 257 | # add_new: if True, then return files listed in any of these sections: |
| 258 | # - JSONKEY_ACTUALRESULTS_FAILED |
| 259 | # - JSONKEY_ACTUALRESULTS_NOCOMPARISON |
| 260 | # if False, then return files listed in these sections: |
| 261 | # - JSONKEY_ACTUALRESULTS_FAILED |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 262 | # |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 263 | def _GetFilesToRebaseline(self, json_url, add_new): |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 264 | if self._dry_run: |
| 265 | print '' |
| 266 | print '#' |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 267 | print ('# Getting files to rebaseline from JSON summary URL %s ...' |
| 268 | % json_url) |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 269 | sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED] |
| 270 | if add_new: |
| 271 | sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON) |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 272 | results_to_rebaseline = self._GetActualResults(json_url=json_url, |
| 273 | sections=sections) |
| 274 | files_to_rebaseline = results_to_rebaseline.keys() |
| 275 | files_to_rebaseline.sort() |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 276 | print '# ... found files_to_rebaseline %s' % files_to_rebaseline |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 277 | if self._dry_run: |
| 278 | print '#' |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 279 | return files_to_rebaseline |
| 280 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 281 | # Rebaseline a single file. |
| 282 | def _RebaselineOneFile(self, expectations_subdir, builder_name, |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 283 | infilename, outfilename, all_results): |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 284 | if self._dry_run: |
| 285 | print '' |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 286 | print '# ' + infilename |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 287 | |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 288 | # First try to download this result image from Google Storage. |
| 289 | # If that fails, try skia-autogen. |
| 290 | # If that fails too, just go on to the next file. |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 291 | # |
| 292 | # This not treated as a fatal failure because not all |
| 293 | # platforms generate all configs (e.g., Android does not |
| 294 | # generate PDF). |
| 295 | # |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 296 | # TODO(epoger): Once we are downloading only files that the |
| 297 | # actual-results.json file told us to, this should become a |
| 298 | # fatal error. (If the actual-results.json file told us that |
| 299 | # the test failed with XXX results, we should be able to download |
| 300 | # those results every time.) |
| 301 | if not self._DownloadFromGoogleStorage(infilename=infilename, |
| 302 | outfilename=outfilename, |
| 303 | all_results=all_results): |
| 304 | if not self._DownloadFromAutogen(infilename=infilename, |
| 305 | outfilename=outfilename, |
| 306 | expectations_subdir=expectations_subdir, |
| 307 | builder_name=builder_name): |
| 308 | print '# Couldn\'t fetch infilename ' + infilename |
| 309 | return |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 310 | |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 311 | # Add this file to version control (if appropriate). |
| 312 | if self._add_new: |
| 313 | if self._is_svn_checkout: |
| 314 | cmd = [ 'svn', 'add', '--quiet', outfilename ] |
| 315 | self._Call(cmd) |
| 316 | cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', |
| 317 | 'image/png', outfilename ]; |
| 318 | self._Call(cmd) |
| 319 | elif self._is_git_checkout: |
| 320 | cmd = [ 'git', 'add', outfilename ] |
| 321 | self._Call(cmd) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 322 | |
| 323 | # Rebaseline the given configs for a single test. |
| 324 | # |
| 325 | # params: |
| 326 | # expectations_subdir |
| 327 | # builder_name |
| 328 | # test: a single test to rebaseline |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 329 | # all_results: a dictionary of all actual results |
| 330 | def _RebaselineOneTest(self, expectations_subdir, builder_name, test, |
| 331 | all_results): |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 332 | if self._configs: |
| 333 | configs = self._configs |
| 334 | else: |
| 335 | if (expectations_subdir == 'base-shuttle-win7-intel-angle'): |
| 336 | configs = [ 'angle', 'anglemsaa16' ] |
| 337 | else: |
| 338 | configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16', |
| 339 | 'msaa4' ] |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 340 | if self._dry_run: |
| 341 | print '' |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 342 | print '# ' + expectations_subdir + ':' |
| 343 | for config in configs: |
| 344 | infilename = test + '_' + config + '.png' |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 345 | outfilename = os.path.join(expectations_subdir, infilename); |
| 346 | self._RebaselineOneFile(expectations_subdir=expectations_subdir, |
| 347 | builder_name=builder_name, |
| 348 | infilename=infilename, |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 349 | outfilename=outfilename, |
| 350 | all_results=all_results) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 351 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 352 | # Rebaseline all tests/types we specified in the constructor, |
| 353 | # within this gm-expectations subdir. |
| 354 | # |
| 355 | # params: |
| 356 | # subdir : e.g. 'base-shuttle-win7-intel-float' |
| 357 | # builder : e.g. 'Test-Win7-ShuttleA-HD2000-x86-Release' |
| 358 | def RebaselineSubdir(self, subdir, builder): |
| 359 | json_url = '/'.join([self._json_base_url, |
| 360 | subdir, builder, subdir, |
| 361 | self._json_filename]) |
| 362 | all_results = self._GetActualResults(json_url=json_url) |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 363 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 364 | if self._tests: |
| 365 | for test in self._tests: |
| 366 | self._RebaselineOneTest(expectations_subdir=subdir, |
| 367 | builder_name=builder, |
| 368 | test=test, all_results=all_results) |
| 369 | else: # get the raw list of files that need rebaselining from JSON |
| 370 | filenames = self._GetFilesToRebaseline(json_url=json_url, |
| 371 | add_new=self._add_new) |
| 372 | for filename in filenames: |
| 373 | outfilename = os.path.join(subdir, filename); |
| 374 | self._RebaselineOneFile(expectations_subdir=subdir, |
| 375 | builder_name=builder, |
| 376 | infilename=filename, |
| 377 | outfilename=outfilename, |
| 378 | all_results=all_results) |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 379 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 380 | # main... |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 381 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 382 | parser = argparse.ArgumentParser() |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 383 | parser.add_argument('--add-new', action='store_true', |
| 384 | help='in addition to the standard behavior of ' + |
| 385 | 'updating expectations for failing tests, add ' + |
| 386 | 'expectations for tests which don\'t have expectations ' + |
| 387 | 'yet.') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 388 | parser.add_argument('--configs', metavar='CONFIG', nargs='+', |
| 389 | help='which configurations to rebaseline, e.g. ' + |
| 390 | '"--configs 565 8888"; if unspecified, run a default ' + |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 391 | 'set of configs. This should ONLY be specified if ' + |
| 392 | '--tests has also been specified.') |
epoger@google.com | 82f3178 | 2013-06-11 15:45:46 +0000 | [diff] [blame] | 393 | parser.add_argument('--dry-run', action='store_true', |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 394 | help='instead of actually downloading files or adding ' + |
| 395 | 'files to checkout, display a list of operations that ' + |
| 396 | 'we would normally perform') |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 397 | parser.add_argument('--expectations-root', |
| 398 | help='root of expectations directory to update-- should ' + |
| 399 | 'contain one or more base-* subdirectories. Defaults to ' + |
| 400 | '%(default)s', |
| 401 | default='.') |
epoger@google.com | 82f3178 | 2013-06-11 15:45:46 +0000 | [diff] [blame] | 402 | parser.add_argument('--json-base-url', |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 403 | help='base URL from which to read JSON_FILENAME ' + |
| 404 | 'files; defaults to %(default)s', |
| 405 | default='http://skia-autogen.googlecode.com/svn/gm-actual') |
epoger@google.com | 82f3178 | 2013-06-11 15:45:46 +0000 | [diff] [blame] | 406 | parser.add_argument('--json-filename', |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 407 | help='filename (under JSON_BASE_URL) to read a summary ' + |
| 408 | 'of results from; defaults to %(default)s', |
| 409 | default='actual-results.json') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 410 | parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+', |
| 411 | help='which platform subdirectories to rebaseline; ' + |
| 412 | 'if unspecified, rebaseline all subdirs, same as ' + |
| 413 | '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys()))) |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 414 | parser.add_argument('--tests', metavar='TEST', nargs='+', |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 415 | help='which tests to rebaseline, e.g. ' + |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 416 | '"--tests aaclip bigmatrix"; if unspecified, then all ' + |
| 417 | 'failing tests (according to the actual-results.json ' + |
| 418 | 'file) will be rebaselined.') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 419 | args = parser.parse_args() |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame] | 420 | if args.subdirs: |
| 421 | subdirs = args.subdirs |
| 422 | missing_json_is_fatal = True |
| 423 | else: |
| 424 | subdirs = sorted(SUBDIR_MAPPING.keys()) |
| 425 | missing_json_is_fatal = False |
| 426 | for subdir in subdirs: |
| 427 | if not subdir in SUBDIR_MAPPING.keys(): |
| 428 | raise Exception(('unrecognized platform subdir "%s"; ' + |
| 429 | 'should be one of %s') % ( |
| 430 | subdir, SUBDIR_MAPPING.keys())) |
| 431 | builder = SUBDIR_MAPPING[subdir] |
| 432 | |
| 433 | # We instantiate different Rebaseliner objects depending |
| 434 | # on whether we are rebaselining an expected-results.json file, or |
| 435 | # individual image files. Different gm-expected subdirectories may move |
| 436 | # from individual image files to JSON-format expectations at different |
| 437 | # times, so we need to make this determination per subdirectory. |
| 438 | # |
| 439 | # See https://goto.google.com/ChecksumTransitionDetail |
| 440 | expectations_json_file = os.path.join(args.expectations_root, subdir, |
| 441 | JSON_EXPECTATIONS_FILENAME) |
| 442 | if os.path.isfile(expectations_json_file): |
| 443 | sys.stderr.write('ERROR: JsonRebaseliner is not implemented yet.\n') |
| 444 | sys.exit(1) |
| 445 | rebaseliner = JsonRebaseliner( |
| 446 | expectations_root=args.expectations_root, |
| 447 | tests=args.tests, configs=args.configs, |
| 448 | dry_run=args.dry_run, |
| 449 | json_base_url=args.json_base_url, |
| 450 | json_filename=args.json_filename, |
| 451 | add_new=args.add_new, |
| 452 | missing_json_is_fatal=missing_json_is_fatal) |
| 453 | else: |
| 454 | rebaseliner = rebaseline_imagefiles.ImageRebaseliner( |
| 455 | expectations_root=args.expectations_root, |
| 456 | tests=args.tests, configs=args.configs, |
| 457 | dry_run=args.dry_run, |
| 458 | json_base_url=args.json_base_url, |
| 459 | json_filename=args.json_filename, |
| 460 | add_new=args.add_new, |
| 461 | missing_json_is_fatal=missing_json_is_fatal) |
| 462 | rebaseliner.RebaselineSubdir(subdir=subdir, builder=builder) |