senorblanco@chromium.org | 782f3b4 | 2012-10-29 18:06:26 +0000 | [diff] [blame] | 1 | #!/usr/bin/python |
| 2 | |
| 3 | ''' |
| 4 | Copyright 2012 Google Inc. |
| 5 | |
| 6 | Use of this source code is governed by a BSD-style license that can be |
| 7 | found in the LICENSE file. |
| 8 | ''' |
| 9 | |
| 10 | ''' |
senorblanco@chromium.org | 123a0b5 | 2012-11-29 21:50:34 +0000 | [diff] [blame] | 11 | Rebaselines the given GM tests, on all bots and all configurations. |
| 12 | Must be run from the gm-expected directory. If run from a git or SVN |
| 13 | checkout, the files will be added to the staging area for commit. |
senorblanco@chromium.org | 782f3b4 | 2012-10-29 18:06:26 +0000 | [diff] [blame] | 14 | ''' |
| 15 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 16 | # System-level imports |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 17 | import argparse |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 18 | import os |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 19 | import re |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 20 | import subprocess |
| 21 | import sys |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 22 | import urllib2 |
| 23 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 24 | # Imports from local directory |
| 25 | import rebaseline_imagefiles |
| 26 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 27 | # Imports from within Skia |
| 28 | # |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 29 | # We need to add the 'gm' directory, so that we can import gm_json.py within |
| 30 | # that directory. That script allows us to parse the actual-results.json file |
| 31 | # written out by the GM tool. |
| 32 | # Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end* |
| 33 | # so any dirs that are already in the PYTHONPATH will be preferred. |
| 34 | # |
| 35 | # This assumes that the 'gm' directory has been checked out as a sibling of |
| 36 | # the 'tools' directory containing this script, which will be the case if |
| 37 | # 'trunk' was checked out as a single unit. |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 38 | GM_DIRECTORY = os.path.realpath( |
| 39 | os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm')) |
| 40 | if GM_DIRECTORY not in sys.path: |
| 41 | sys.path.append(GM_DIRECTORY) |
| 42 | import gm_json |
| 43 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 44 | JSON_EXPECTATIONS_FILENAME='expected-results.json' |
senorblanco@chromium.org | 782f3b4 | 2012-10-29 18:06:26 +0000 | [diff] [blame] | 45 | |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 46 | # Mapping of gm-expectations subdir (under |
| 47 | # https://skia.googlecode.com/svn/gm-expected/ ) |
| 48 | # to builder name (see list at http://108.170.217.252:10117/builders ) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 49 | SUBDIR_MAPPING = { |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 50 | 'base-shuttle-win7-intel-float': |
| 51 | 'Test-Win7-ShuttleA-HD2000-x86-Release', |
| 52 | 'base-shuttle-win7-intel-angle': |
| 53 | 'Test-Win7-ShuttleA-HD2000-x86-Release-ANGLE', |
| 54 | 'base-shuttle-win7-intel-directwrite': |
| 55 | 'Test-Win7-ShuttleA-HD2000-x86-Release-DirectWrite', |
| 56 | 'base-shuttle_ubuntu12_ati5770': |
| 57 | 'Test-Ubuntu12-ShuttleA-ATI5770-x86_64-Release', |
| 58 | 'base-macmini': |
| 59 | 'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Release', |
| 60 | 'base-macmini-lion-float': |
| 61 | 'Test-Mac10.7-MacMini4.1-GeForce320M-x86-Release', |
| 62 | 'base-android-galaxy-nexus': |
| 63 | 'Test-Android-GalaxyNexus-SGX540-Arm7-Debug', |
| 64 | 'base-android-nexus-7': |
| 65 | 'Test-Android-Nexus7-Tegra3-Arm7-Release', |
| 66 | 'base-android-nexus-s': |
| 67 | 'Test-Android-NexusS-SGX540-Arm7-Release', |
| 68 | 'base-android-xoom': |
| 69 | 'Test-Android-Xoom-Tegra2-Arm7-Release', |
| 70 | 'base-android-nexus-10': |
| 71 | 'Test-Android-Nexus10-MaliT604-Arm7-Release', |
| 72 | } |
| 73 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 74 | |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 75 | class CommandFailedException(Exception): |
| 76 | pass |
| 77 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 78 | # Object that rebaselines a JSON expectations file (not individual image files). |
| 79 | # |
| 80 | # TODO(epoger): Most of this is just the code from the old ImageRebaseliner... |
| 81 | # some of it will need to be updated in order to properly rebaseline JSON files. |
| 82 | # There is a lot of code duplicated between here and ImageRebaseliner, but |
| 83 | # that's fine because we will delete ImageRebaseliner soon. |
| 84 | class JsonRebaseliner(object): |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 85 | |
| 86 | # params: |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 87 | # expectations_root: root directory of all expectations |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 88 | # json_base_url: base URL from which to read json_filename |
| 89 | # json_filename: filename (under json_base_url) from which to read a |
| 90 | # summary of results; typically "actual-results.json" |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 91 | # tests: list of tests to rebaseline, or None if we should rebaseline |
| 92 | # whatever files the JSON results summary file tells us to |
| 93 | # configs: which configs to run for each test; this should only be |
| 94 | # specified if the list of tests was also specified (otherwise, |
| 95 | # the JSON file will give us test names and configs) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 96 | # dry_run: if True, instead of actually downloading files or adding |
| 97 | # files to checkout, display a list of operations that |
| 98 | # we would normally perform |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 99 | # add_new: if True, add expectations for tests which don't have any yet |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 100 | # missing_json_is_fatal: whether to halt execution if we cannot read a |
| 101 | # JSON actual result summary file |
| 102 | def __init__(self, expectations_root, json_base_url, json_filename, |
| 103 | tests=None, configs=None, dry_run=False, |
| 104 | add_new=False, missing_json_is_fatal=False): |
| 105 | raise ValueError('JsonRebaseliner not yet implemented') # TODO(epoger) |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 106 | if configs and not tests: |
| 107 | raise ValueError('configs should only be specified if tests ' + |
| 108 | 'were specified also') |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 109 | self._expectations_root = expectations_root |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 110 | self._tests = tests |
| 111 | self._configs = configs |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 112 | self._json_base_url = json_base_url |
| 113 | self._json_filename = json_filename |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 114 | self._dry_run = dry_run |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 115 | self._add_new = add_new |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 116 | self._missing_json_is_fatal = missing_json_is_fatal |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 117 | self._googlestorage_gm_actuals_root = ( |
| 118 | 'http://chromium-skia-gm.commondatastorage.googleapis.com/gm') |
| 119 | self._testname_pattern = re.compile('(\S+)_(\S+).png') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 120 | self._is_svn_checkout = ( |
| 121 | os.path.exists('.svn') or |
| 122 | os.path.exists(os.path.join(os.pardir, '.svn'))) |
| 123 | self._is_git_checkout = ( |
| 124 | os.path.exists('.git') or |
| 125 | os.path.exists(os.path.join(os.pardir, '.git'))) |
| 126 | |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 127 | # If dry_run is False, execute subprocess.call(cmd). |
| 128 | # If dry_run is True, print the command we would have otherwise run. |
| 129 | # Raises a CommandFailedException if the command fails. |
| 130 | def _Call(self, cmd): |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 131 | if self._dry_run: |
| 132 | print '%s' % ' '.join(cmd) |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 133 | return |
| 134 | if subprocess.call(cmd) != 0: |
| 135 | raise CommandFailedException('error running command: ' + |
| 136 | ' '.join(cmd)) |
| 137 | |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 138 | # Download a single actual result from GoogleStorage, returning True if it |
| 139 | # succeeded. |
| 140 | def _DownloadFromGoogleStorage(self, infilename, outfilename, all_results): |
| 141 | test_name = self._testname_pattern.match(infilename).group(1) |
| 142 | if not test_name: |
| 143 | print '# unable to find test_name for infilename %s' % infilename |
| 144 | return False |
| 145 | try: |
| 146 | hash_type, hash_value = all_results[infilename] |
| 147 | except KeyError: |
| 148 | print ('# unable to find filename %s in all_results dict' % |
| 149 | infilename) |
| 150 | return False |
epoger@google.com | bd4af3a | 2013-06-12 19:07:00 +0000 | [diff] [blame] | 151 | except ValueError as e: |
| 152 | print '# ValueError reading filename %s from all_results dict: %s'%( |
| 153 | infilename, e) |
| 154 | return False |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 155 | url = '%s/%s/%s/%s.png' % (self._googlestorage_gm_actuals_root, |
| 156 | hash_type, test_name, hash_value) |
| 157 | try: |
| 158 | self._DownloadFile(source_url=url, dest_filename=outfilename) |
| 159 | return True |
| 160 | except CommandFailedException: |
| 161 | print '# Couldn\'t fetch gs_url %s' % url |
| 162 | return False |
| 163 | |
| 164 | # Download a single actual result from skia-autogen, returning True if it |
| 165 | # succeeded. |
| 166 | def _DownloadFromAutogen(self, infilename, outfilename, |
| 167 | expectations_subdir, builder_name): |
| 168 | url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' + |
| 169 | expectations_subdir + '/' + builder_name + '/' + |
| 170 | expectations_subdir + '/' + infilename) |
| 171 | try: |
| 172 | self._DownloadFile(source_url=url, dest_filename=outfilename) |
| 173 | return True |
| 174 | except CommandFailedException: |
| 175 | print '# Couldn\'t fetch autogen_url %s' % url |
| 176 | return False |
| 177 | |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 178 | # Download a single file, raising a CommandFailedException if it fails. |
| 179 | def _DownloadFile(self, source_url, dest_filename): |
| 180 | # Download into a temporary file and then rename it afterwards, |
| 181 | # so that we don't corrupt the existing file if it fails midway thru. |
| 182 | temp_filename = os.path.join(os.path.dirname(dest_filename), |
| 183 | '.temp-' + os.path.basename(dest_filename)) |
| 184 | |
| 185 | # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on |
| 186 | # Unix) with a Python HTTP library (which should work cross-platform) |
| 187 | self._Call([ 'curl', '--fail', '--silent', source_url, |
| 188 | '--output', temp_filename ]) |
| 189 | self._Call([ 'mv', temp_filename, dest_filename ]) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 190 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 191 | # Returns the full contents of a URL, as a single string. |
| 192 | # |
| 193 | # Unlike standard URL handling, we allow relative "file:" URLs; |
| 194 | # for example, "file:one/two" resolves to the file ./one/two |
| 195 | # (relative to current working dir) |
| 196 | def _GetContentsOfUrl(self, url): |
| 197 | file_prefix = 'file:' |
| 198 | if url.startswith(file_prefix): |
| 199 | filename = url[len(file_prefix):] |
| 200 | return open(filename, 'r').read() |
| 201 | else: |
| 202 | return urllib2.urlopen(url).read() |
| 203 | |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 204 | # Returns a dictionary of actual results from actual-results.json file. |
| 205 | # |
| 206 | # The dictionary returned has this format: |
| 207 | # { |
| 208 | # u'imageblur_565.png': [u'bitmap-64bitMD5', 3359963596899141322], |
| 209 | # u'imageblur_8888.png': [u'bitmap-64bitMD5', 4217923806027861152], |
| 210 | # u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716] |
| 211 | # } |
| 212 | # |
| 213 | # If the JSON actual result summary file cannot be loaded, the behavior |
| 214 | # depends on self._missing_json_is_fatal: |
| 215 | # - if true: execution will halt with an exception |
| 216 | # - if false: we will log an error message but return an empty dictionary |
| 217 | # |
| 218 | # params: |
| 219 | # json_url: URL pointing to a JSON actual result summary file |
| 220 | # sections: a list of section names to include in the results, e.g. |
| 221 | # [gm_json.JSONKEY_ACTUALRESULTS_FAILED, |
| 222 | # gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ; |
| 223 | # if None, then include ALL sections. |
| 224 | def _GetActualResults(self, json_url, sections=None): |
| 225 | try: |
| 226 | json_contents = self._GetContentsOfUrl(json_url) |
| 227 | except (urllib2.HTTPError, IOError): |
| 228 | message = 'unable to load JSON summary URL %s' % json_url |
| 229 | if self._missing_json_is_fatal: |
| 230 | raise ValueError(message) |
| 231 | else: |
| 232 | print '# %s' % message |
| 233 | return {} |
| 234 | |
| 235 | json_dict = gm_json.LoadFromString(json_contents) |
| 236 | results_to_return = {} |
| 237 | actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] |
| 238 | if not sections: |
| 239 | sections = actual_results.keys() |
| 240 | for section in sections: |
| 241 | section_results = actual_results[section] |
| 242 | if section_results: |
| 243 | results_to_return.update(section_results) |
| 244 | return results_to_return |
| 245 | |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 246 | # Returns a list of files that require rebaselining. |
| 247 | # |
| 248 | # Note that this returns a list of FILES, like this: |
| 249 | # ['imageblur_565.png', 'xfermodes_pdf.png'] |
| 250 | # rather than a list of TESTS, like this: |
| 251 | # ['imageblur', 'xfermodes'] |
| 252 | # |
| 253 | # params: |
| 254 | # json_url: URL pointing to a JSON actual result summary file |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 255 | # add_new: if True, then return files listed in any of these sections: |
| 256 | # - JSONKEY_ACTUALRESULTS_FAILED |
| 257 | # - JSONKEY_ACTUALRESULTS_NOCOMPARISON |
| 258 | # if False, then return files listed in these sections: |
| 259 | # - JSONKEY_ACTUALRESULTS_FAILED |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 260 | # |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 261 | def _GetFilesToRebaseline(self, json_url, add_new): |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 262 | if self._dry_run: |
| 263 | print '' |
| 264 | print '#' |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 265 | print ('# Getting files to rebaseline from JSON summary URL %s ...' |
| 266 | % json_url) |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 267 | sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED] |
| 268 | if add_new: |
| 269 | sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON) |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 270 | results_to_rebaseline = self._GetActualResults(json_url=json_url, |
| 271 | sections=sections) |
| 272 | files_to_rebaseline = results_to_rebaseline.keys() |
| 273 | files_to_rebaseline.sort() |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 274 | print '# ... found files_to_rebaseline %s' % files_to_rebaseline |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 275 | if self._dry_run: |
| 276 | print '#' |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 277 | return files_to_rebaseline |
| 278 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 279 | # Rebaseline a single file. |
| 280 | def _RebaselineOneFile(self, expectations_subdir, builder_name, |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 281 | infilename, outfilename, all_results): |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 282 | if self._dry_run: |
| 283 | print '' |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 284 | print '# ' + infilename |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 285 | |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 286 | # First try to download this result image from Google Storage. |
| 287 | # If that fails, try skia-autogen. |
| 288 | # If that fails too, just go on to the next file. |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 289 | # |
| 290 | # This not treated as a fatal failure because not all |
| 291 | # platforms generate all configs (e.g., Android does not |
| 292 | # generate PDF). |
| 293 | # |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 294 | # TODO(epoger): Once we are downloading only files that the |
| 295 | # actual-results.json file told us to, this should become a |
| 296 | # fatal error. (If the actual-results.json file told us that |
| 297 | # the test failed with XXX results, we should be able to download |
| 298 | # those results every time.) |
| 299 | if not self._DownloadFromGoogleStorage(infilename=infilename, |
| 300 | outfilename=outfilename, |
| 301 | all_results=all_results): |
| 302 | if not self._DownloadFromAutogen(infilename=infilename, |
| 303 | outfilename=outfilename, |
| 304 | expectations_subdir=expectations_subdir, |
| 305 | builder_name=builder_name): |
| 306 | print '# Couldn\'t fetch infilename ' + infilename |
| 307 | return |
epoger@google.com | db29a31 | 2013-06-04 14:58:47 +0000 | [diff] [blame] | 308 | |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 309 | # Add this file to version control (if appropriate). |
| 310 | if self._add_new: |
| 311 | if self._is_svn_checkout: |
| 312 | cmd = [ 'svn', 'add', '--quiet', outfilename ] |
| 313 | self._Call(cmd) |
| 314 | cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', |
| 315 | 'image/png', outfilename ]; |
| 316 | self._Call(cmd) |
| 317 | elif self._is_git_checkout: |
| 318 | cmd = [ 'git', 'add', outfilename ] |
| 319 | self._Call(cmd) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 320 | |
| 321 | # Rebaseline the given configs for a single test. |
| 322 | # |
| 323 | # params: |
| 324 | # expectations_subdir |
| 325 | # builder_name |
| 326 | # test: a single test to rebaseline |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 327 | # all_results: a dictionary of all actual results |
| 328 | def _RebaselineOneTest(self, expectations_subdir, builder_name, test, |
| 329 | all_results): |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 330 | if self._configs: |
| 331 | configs = self._configs |
| 332 | else: |
| 333 | if (expectations_subdir == 'base-shuttle-win7-intel-angle'): |
| 334 | configs = [ 'angle', 'anglemsaa16' ] |
| 335 | else: |
| 336 | configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16', |
| 337 | 'msaa4' ] |
epoger@google.com | cc2e1cf | 2013-06-11 16:24:37 +0000 | [diff] [blame] | 338 | if self._dry_run: |
| 339 | print '' |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 340 | print '# ' + expectations_subdir + ':' |
| 341 | for config in configs: |
| 342 | infilename = test + '_' + config + '.png' |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 343 | outfilename = os.path.join(expectations_subdir, infilename); |
| 344 | self._RebaselineOneFile(expectations_subdir=expectations_subdir, |
| 345 | builder_name=builder_name, |
| 346 | infilename=infilename, |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 347 | outfilename=outfilename, |
| 348 | all_results=all_results) |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 349 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 350 | # Rebaseline all tests/types we specified in the constructor, |
| 351 | # within this gm-expectations subdir. |
| 352 | # |
| 353 | # params: |
| 354 | # subdir : e.g. 'base-shuttle-win7-intel-float' |
| 355 | # builder : e.g. 'Test-Win7-ShuttleA-HD2000-x86-Release' |
| 356 | def RebaselineSubdir(self, subdir, builder): |
| 357 | json_url = '/'.join([self._json_base_url, |
| 358 | subdir, builder, subdir, |
| 359 | self._json_filename]) |
| 360 | all_results = self._GetActualResults(json_url=json_url) |
epoger@google.com | e78d207 | 2013-06-12 17:44:14 +0000 | [diff] [blame] | 361 | |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 362 | if self._tests: |
| 363 | for test in self._tests: |
| 364 | self._RebaselineOneTest(expectations_subdir=subdir, |
| 365 | builder_name=builder, |
| 366 | test=test, all_results=all_results) |
| 367 | else: # get the raw list of files that need rebaselining from JSON |
| 368 | filenames = self._GetFilesToRebaseline(json_url=json_url, |
| 369 | add_new=self._add_new) |
| 370 | for filename in filenames: |
| 371 | outfilename = os.path.join(subdir, filename); |
| 372 | self._RebaselineOneFile(expectations_subdir=subdir, |
| 373 | builder_name=builder, |
| 374 | infilename=filename, |
| 375 | outfilename=outfilename, |
| 376 | all_results=all_results) |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 377 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 378 | # main... |
epoger@google.com | ec3397b | 2013-05-29 17:09:43 +0000 | [diff] [blame] | 379 | |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 380 | parser = argparse.ArgumentParser() |
epoger@google.com | dad5310 | 2013-06-12 14:25:30 +0000 | [diff] [blame] | 381 | parser.add_argument('--add-new', action='store_true', |
| 382 | help='in addition to the standard behavior of ' + |
| 383 | 'updating expectations for failing tests, add ' + |
| 384 | 'expectations for tests which don\'t have expectations ' + |
| 385 | 'yet.') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 386 | parser.add_argument('--configs', metavar='CONFIG', nargs='+', |
| 387 | help='which configurations to rebaseline, e.g. ' + |
| 388 | '"--configs 565 8888"; if unspecified, run a default ' + |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 389 | 'set of configs. This should ONLY be specified if ' + |
| 390 | '--tests has also been specified.') |
epoger@google.com | 82f3178 | 2013-06-11 15:45:46 +0000 | [diff] [blame] | 391 | parser.add_argument('--dry-run', action='store_true', |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 392 | help='instead of actually downloading files or adding ' + |
| 393 | 'files to checkout, display a list of operations that ' + |
| 394 | 'we would normally perform') |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 395 | parser.add_argument('--expectations-root', |
| 396 | help='root of expectations directory to update-- should ' + |
| 397 | 'contain one or more base-* subdirectories. Defaults to ' + |
| 398 | '%(default)s', |
| 399 | default='.') |
epoger@google.com | 82f3178 | 2013-06-11 15:45:46 +0000 | [diff] [blame] | 400 | parser.add_argument('--json-base-url', |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 401 | help='base URL from which to read JSON_FILENAME ' + |
| 402 | 'files; defaults to %(default)s', |
| 403 | default='http://skia-autogen.googlecode.com/svn/gm-actual') |
epoger@google.com | 82f3178 | 2013-06-11 15:45:46 +0000 | [diff] [blame] | 404 | parser.add_argument('--json-filename', |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 405 | help='filename (under JSON_BASE_URL) to read a summary ' + |
| 406 | 'of results from; defaults to %(default)s', |
| 407 | default='actual-results.json') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 408 | parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+', |
| 409 | help='which platform subdirectories to rebaseline; ' + |
| 410 | 'if unspecified, rebaseline all subdirs, same as ' + |
| 411 | '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys()))) |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 412 | parser.add_argument('--tests', metavar='TEST', nargs='+', |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 413 | help='which tests to rebaseline, e.g. ' + |
epoger@google.com | 99ba65a | 2013-06-05 15:43:37 +0000 | [diff] [blame] | 414 | '"--tests aaclip bigmatrix"; if unspecified, then all ' + |
| 415 | 'failing tests (according to the actual-results.json ' + |
| 416 | 'file) will be rebaselined.') |
epoger@google.com | 9166bf5 | 2013-05-30 15:46:19 +0000 | [diff] [blame] | 417 | args = parser.parse_args() |
epoger@google.com | 99a8ec9 | 2013-06-19 18:56:59 +0000 | [diff] [blame^] | 418 | if args.subdirs: |
| 419 | subdirs = args.subdirs |
| 420 | missing_json_is_fatal = True |
| 421 | else: |
| 422 | subdirs = sorted(SUBDIR_MAPPING.keys()) |
| 423 | missing_json_is_fatal = False |
| 424 | for subdir in subdirs: |
| 425 | if not subdir in SUBDIR_MAPPING.keys(): |
| 426 | raise Exception(('unrecognized platform subdir "%s"; ' + |
| 427 | 'should be one of %s') % ( |
| 428 | subdir, SUBDIR_MAPPING.keys())) |
| 429 | builder = SUBDIR_MAPPING[subdir] |
| 430 | |
| 431 | # We instantiate different Rebaseliner objects depending |
| 432 | # on whether we are rebaselining an expected-results.json file, or |
| 433 | # individual image files. Different gm-expected subdirectories may move |
| 434 | # from individual image files to JSON-format expectations at different |
| 435 | # times, so we need to make this determination per subdirectory. |
| 436 | # |
| 437 | # See https://goto.google.com/ChecksumTransitionDetail |
| 438 | expectations_json_file = os.path.join(args.expectations_root, subdir, |
| 439 | JSON_EXPECTATIONS_FILENAME) |
| 440 | if os.path.isfile(expectations_json_file): |
| 441 | sys.stderr.write('ERROR: JsonRebaseliner is not implemented yet.\n') |
| 442 | sys.exit(1) |
| 443 | rebaseliner = JsonRebaseliner( |
| 444 | expectations_root=args.expectations_root, |
| 445 | tests=args.tests, configs=args.configs, |
| 446 | dry_run=args.dry_run, |
| 447 | json_base_url=args.json_base_url, |
| 448 | json_filename=args.json_filename, |
| 449 | add_new=args.add_new, |
| 450 | missing_json_is_fatal=missing_json_is_fatal) |
| 451 | else: |
| 452 | rebaseliner = rebaseline_imagefiles.ImageRebaseliner( |
| 453 | expectations_root=args.expectations_root, |
| 454 | tests=args.tests, configs=args.configs, |
| 455 | dry_run=args.dry_run, |
| 456 | json_base_url=args.json_base_url, |
| 457 | json_filename=args.json_filename, |
| 458 | add_new=args.add_new, |
| 459 | missing_json_is_fatal=missing_json_is_fatal) |
| 460 | rebaseliner.RebaselineSubdir(subdir=subdir, builder=builder) |