blob: 0549b789ae48a84bfa6b5dcd5ea569e5ec4609e5 [file] [log] [blame]
senorblanco@chromium.org782f3b42012-10-29 18:06:26 +00001#!/usr/bin/python
2
3'''
4Copyright 2012 Google Inc.
5
6Use of this source code is governed by a BSD-style license that can be
7found in the LICENSE file.
8'''
9
10'''
senorblanco@chromium.org123a0b52012-11-29 21:50:34 +000011Rebaselines the given GM tests, on all bots and all configurations.
12Must be run from the gm-expected directory. If run from a git or SVN
13checkout, the files will be added to the staging area for commit.
senorblanco@chromium.org782f3b42012-10-29 18:06:26 +000014'''
15
epoger@google.com99ba65a2013-06-05 15:43:37 +000016# System-level imports
epoger@google.com9166bf52013-05-30 15:46:19 +000017import argparse
epoger@google.comec3397b2013-05-29 17:09:43 +000018import os
19import subprocess
20import sys
epoger@google.com99ba65a2013-06-05 15:43:37 +000021import urllib2
22
23# Imports from within Skia
24#
epoger@google.comdad53102013-06-12 14:25:30 +000025# We need to add the 'gm' directory, so that we can import gm_json.py within
26# that directory. That script allows us to parse the actual-results.json file
27# written out by the GM tool.
28# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
29# so any dirs that are already in the PYTHONPATH will be preferred.
30#
31# This assumes that the 'gm' directory has been checked out as a sibling of
32# the 'tools' directory containing this script, which will be the case if
33# 'trunk' was checked out as a single unit.
epoger@google.com99ba65a2013-06-05 15:43:37 +000034GM_DIRECTORY = os.path.realpath(
35 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm'))
36if GM_DIRECTORY not in sys.path:
37 sys.path.append(GM_DIRECTORY)
38import gm_json
39
senorblanco@chromium.org782f3b42012-10-29 18:06:26 +000040
epoger@google.comec3397b2013-05-29 17:09:43 +000041# Mapping of gm-expectations subdir (under
42# https://skia.googlecode.com/svn/gm-expected/ )
43# to builder name (see list at http://108.170.217.252:10117/builders )
epoger@google.com9166bf52013-05-30 15:46:19 +000044SUBDIR_MAPPING = {
epoger@google.comec3397b2013-05-29 17:09:43 +000045 'base-shuttle-win7-intel-float':
46 'Test-Win7-ShuttleA-HD2000-x86-Release',
47 'base-shuttle-win7-intel-angle':
48 'Test-Win7-ShuttleA-HD2000-x86-Release-ANGLE',
49 'base-shuttle-win7-intel-directwrite':
50 'Test-Win7-ShuttleA-HD2000-x86-Release-DirectWrite',
51 'base-shuttle_ubuntu12_ati5770':
52 'Test-Ubuntu12-ShuttleA-ATI5770-x86_64-Release',
53 'base-macmini':
54 'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Release',
55 'base-macmini-lion-float':
56 'Test-Mac10.7-MacMini4.1-GeForce320M-x86-Release',
57 'base-android-galaxy-nexus':
58 'Test-Android-GalaxyNexus-SGX540-Arm7-Debug',
59 'base-android-nexus-7':
60 'Test-Android-Nexus7-Tegra3-Arm7-Release',
61 'base-android-nexus-s':
62 'Test-Android-NexusS-SGX540-Arm7-Release',
63 'base-android-xoom':
64 'Test-Android-Xoom-Tegra2-Arm7-Release',
65 'base-android-nexus-10':
66 'Test-Android-Nexus10-MaliT604-Arm7-Release',
67}
68
epoger@google.com9166bf52013-05-30 15:46:19 +000069
epoger@google.comdb29a312013-06-04 14:58:47 +000070class CommandFailedException(Exception):
71 pass
72
epoger@google.com9166bf52013-05-30 15:46:19 +000073class Rebaseliner(object):
74
75 # params:
epoger@google.com99ba65a2013-06-05 15:43:37 +000076 # json_base_url: base URL from which to read json_filename
77 # json_filename: filename (under json_base_url) from which to read a
78 # summary of results; typically "actual-results.json"
79 # subdirs: which platform subdirectories to rebaseline; if not specified,
epoger@google.com9166bf52013-05-30 15:46:19 +000080 # rebaseline all platform subdirectories
epoger@google.com99ba65a2013-06-05 15:43:37 +000081 # tests: list of tests to rebaseline, or None if we should rebaseline
82 # whatever files the JSON results summary file tells us to
83 # configs: which configs to run for each test; this should only be
84 # specified if the list of tests was also specified (otherwise,
85 # the JSON file will give us test names and configs)
epoger@google.com9166bf52013-05-30 15:46:19 +000086 # dry_run: if True, instead of actually downloading files or adding
87 # files to checkout, display a list of operations that
88 # we would normally perform
epoger@google.comdad53102013-06-12 14:25:30 +000089 # add_new: if True, add expectations for tests which don't have any yet
epoger@google.com99ba65a2013-06-05 15:43:37 +000090 def __init__(self, json_base_url, json_filename,
epoger@google.comdad53102013-06-12 14:25:30 +000091 subdirs=None, tests=None, configs=None, dry_run=False,
92 add_new=False):
epoger@google.com99ba65a2013-06-05 15:43:37 +000093 if configs and not tests:
94 raise ValueError('configs should only be specified if tests ' +
95 'were specified also')
epoger@google.com9166bf52013-05-30 15:46:19 +000096 self._tests = tests
97 self._configs = configs
98 if not subdirs:
99 self._subdirs = sorted(SUBDIR_MAPPING.keys())
epoger@google.combda4e912013-06-11 16:16:02 +0000100 self._missing_json_is_fatal = False
epoger@google.com9166bf52013-05-30 15:46:19 +0000101 else:
102 self._subdirs = subdirs
epoger@google.combda4e912013-06-11 16:16:02 +0000103 self._missing_json_is_fatal = True
epoger@google.com99ba65a2013-06-05 15:43:37 +0000104 self._json_base_url = json_base_url
105 self._json_filename = json_filename
epoger@google.com9166bf52013-05-30 15:46:19 +0000106 self._dry_run = dry_run
epoger@google.comdad53102013-06-12 14:25:30 +0000107 self._add_new = add_new
epoger@google.com9166bf52013-05-30 15:46:19 +0000108 self._is_svn_checkout = (
109 os.path.exists('.svn') or
110 os.path.exists(os.path.join(os.pardir, '.svn')))
111 self._is_git_checkout = (
112 os.path.exists('.git') or
113 os.path.exists(os.path.join(os.pardir, '.git')))
114
epoger@google.comdb29a312013-06-04 14:58:47 +0000115 # If dry_run is False, execute subprocess.call(cmd).
116 # If dry_run is True, print the command we would have otherwise run.
117 # Raises a CommandFailedException if the command fails.
118 def _Call(self, cmd):
epoger@google.com9166bf52013-05-30 15:46:19 +0000119 if self._dry_run:
120 print '%s' % ' '.join(cmd)
epoger@google.comdb29a312013-06-04 14:58:47 +0000121 return
122 if subprocess.call(cmd) != 0:
123 raise CommandFailedException('error running command: ' +
124 ' '.join(cmd))
125
126 # Download a single file, raising a CommandFailedException if it fails.
127 def _DownloadFile(self, source_url, dest_filename):
128 # Download into a temporary file and then rename it afterwards,
129 # so that we don't corrupt the existing file if it fails midway thru.
130 temp_filename = os.path.join(os.path.dirname(dest_filename),
131 '.temp-' + os.path.basename(dest_filename))
132
133 # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on
134 # Unix) with a Python HTTP library (which should work cross-platform)
135 self._Call([ 'curl', '--fail', '--silent', source_url,
136 '--output', temp_filename ])
137 self._Call([ 'mv', temp_filename, dest_filename ])
epoger@google.com9166bf52013-05-30 15:46:19 +0000138
epoger@google.com99ba65a2013-06-05 15:43:37 +0000139 # Returns the full contents of a URL, as a single string.
140 #
141 # Unlike standard URL handling, we allow relative "file:" URLs;
142 # for example, "file:one/two" resolves to the file ./one/two
143 # (relative to current working dir)
144 def _GetContentsOfUrl(self, url):
145 file_prefix = 'file:'
146 if url.startswith(file_prefix):
147 filename = url[len(file_prefix):]
148 return open(filename, 'r').read()
149 else:
150 return urllib2.urlopen(url).read()
151
152 # Returns a list of files that require rebaselining.
153 #
154 # Note that this returns a list of FILES, like this:
155 # ['imageblur_565.png', 'xfermodes_pdf.png']
156 # rather than a list of TESTS, like this:
157 # ['imageblur', 'xfermodes']
158 #
epoger@google.combda4e912013-06-11 16:16:02 +0000159 # If the JSON actual result summary file cannot be loaded, the behavior
160 # depends on self._missing_json_is_fatal:
161 # - if true: execution will halt with an exception
162 # - if false: we will log an error message but return an empty list so we
163 # go on to the next platform
164 #
epoger@google.com99ba65a2013-06-05 15:43:37 +0000165 # params:
166 # json_url: URL pointing to a JSON actual result summary file
epoger@google.comdad53102013-06-12 14:25:30 +0000167 # add_new: if True, then return files listed in any of these sections:
168 # - JSONKEY_ACTUALRESULTS_FAILED
169 # - JSONKEY_ACTUALRESULTS_NOCOMPARISON
170 # if False, then return files listed in these sections:
171 # - JSONKEY_ACTUALRESULTS_FAILED
epoger@google.com99ba65a2013-06-05 15:43:37 +0000172 #
epoger@google.comdad53102013-06-12 14:25:30 +0000173 def _GetFilesToRebaseline(self, json_url, add_new):
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000174 if self._dry_run:
175 print ''
176 print '#'
epoger@google.com99ba65a2013-06-05 15:43:37 +0000177 print ('# Getting files to rebaseline from JSON summary URL %s ...'
178 % json_url)
epoger@google.combda4e912013-06-11 16:16:02 +0000179 try:
180 json_contents = self._GetContentsOfUrl(json_url)
181 except urllib2.HTTPError:
182 message = 'unable to load JSON summary URL %s' % json_url
183 if self._missing_json_is_fatal:
184 raise ValueError(message)
185 else:
186 print '# %s' % message
187 return []
188
epoger@google.com99ba65a2013-06-05 15:43:37 +0000189 json_dict = gm_json.LoadFromString(json_contents)
190 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
epoger@google.comdad53102013-06-12 14:25:30 +0000191 sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED]
192 if add_new:
193 sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON)
epoger@google.com99ba65a2013-06-05 15:43:37 +0000194
195 files_to_rebaseline = []
epoger@google.comdad53102013-06-12 14:25:30 +0000196 for section in sections:
197 section_results = actual_results[section]
198 if section_results:
199 files_to_rebaseline.extend(section_results.keys())
epoger@google.com99ba65a2013-06-05 15:43:37 +0000200
201 print '# ... found files_to_rebaseline %s' % files_to_rebaseline
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000202 if self._dry_run:
203 print '#'
epoger@google.com99ba65a2013-06-05 15:43:37 +0000204 return files_to_rebaseline
205
epoger@google.com9166bf52013-05-30 15:46:19 +0000206 # Rebaseline a single file.
207 def _RebaselineOneFile(self, expectations_subdir, builder_name,
208 infilename, outfilename):
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000209 if self._dry_run:
210 print ''
epoger@google.com99ba65a2013-06-05 15:43:37 +0000211 print '# ' + infilename
epoger@google.com9166bf52013-05-30 15:46:19 +0000212 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' +
213 expectations_subdir + '/' + builder_name + '/' +
214 expectations_subdir + '/' + infilename)
epoger@google.comdb29a312013-06-04 14:58:47 +0000215
216 # Try to download this file, but if that fails, keep going...
217 #
218 # This not treated as a fatal failure because not all
219 # platforms generate all configs (e.g., Android does not
220 # generate PDF).
221 #
222 # We could tweak the list of configs within this tool to
223 # reflect which combinations the bots actually generate, and
224 # then fail if any of those expected combinations are
225 # missing... but then this tool would become useless every
226 # time someone tweaked the configs on the bots without
227 # updating this script.
228 try:
229 self._DownloadFile(source_url=url, dest_filename=outfilename)
230 except CommandFailedException:
epoger@google.com9166bf52013-05-30 15:46:19 +0000231 print '# Couldn\'t fetch ' + url
232 return
epoger@google.comdb29a312013-06-04 14:58:47 +0000233
epoger@google.comdad53102013-06-12 14:25:30 +0000234 # Add this file to version control (if appropriate).
235 if self._add_new:
236 if self._is_svn_checkout:
237 cmd = [ 'svn', 'add', '--quiet', outfilename ]
238 self._Call(cmd)
239 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type',
240 'image/png', outfilename ];
241 self._Call(cmd)
242 elif self._is_git_checkout:
243 cmd = [ 'git', 'add', outfilename ]
244 self._Call(cmd)
epoger@google.com9166bf52013-05-30 15:46:19 +0000245
246 # Rebaseline the given configs for a single test.
247 #
248 # params:
249 # expectations_subdir
250 # builder_name
251 # test: a single test to rebaseline
252 def _RebaselineOneTest(self, expectations_subdir, builder_name, test):
253 if self._configs:
254 configs = self._configs
255 else:
256 if (expectations_subdir == 'base-shuttle-win7-intel-angle'):
257 configs = [ 'angle', 'anglemsaa16' ]
258 else:
259 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16',
260 'msaa4' ]
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000261 if self._dry_run:
262 print ''
epoger@google.com9166bf52013-05-30 15:46:19 +0000263 print '# ' + expectations_subdir + ':'
264 for config in configs:
265 infilename = test + '_' + config + '.png'
epoger@google.com9166bf52013-05-30 15:46:19 +0000266 outfilename = os.path.join(expectations_subdir, infilename);
267 self._RebaselineOneFile(expectations_subdir=expectations_subdir,
268 builder_name=builder_name,
269 infilename=infilename,
270 outfilename=outfilename)
271
272 # Rebaseline all platforms/tests/types we specified in the constructor.
273 def RebaselineAll(self):
epoger@google.com99ba65a2013-06-05 15:43:37 +0000274 for subdir in self._subdirs:
275 if not subdir in SUBDIR_MAPPING.keys():
276 raise Exception(('unrecognized platform subdir "%s"; ' +
277 'should be one of %s') % (
278 subdir, SUBDIR_MAPPING.keys()))
279 builder_name = SUBDIR_MAPPING[subdir]
280 if self._tests:
281 for test in self._tests:
282 self._RebaselineOneTest(expectations_subdir=subdir,
283 builder_name=builder_name,
284 test=test)
285 else: # get the raw list of files that need rebaselining from JSON
286 json_url = '/'.join([self._json_base_url,
287 subdir, builder_name, subdir,
288 self._json_filename])
epoger@google.comdad53102013-06-12 14:25:30 +0000289 filenames = self._GetFilesToRebaseline(json_url=json_url,
290 add_new=self._add_new)
epoger@google.com99ba65a2013-06-05 15:43:37 +0000291 for filename in filenames:
292 outfilename = os.path.join(subdir, filename);
293 self._RebaselineOneFile(expectations_subdir=subdir,
294 builder_name=builder_name,
295 infilename=filename,
296 outfilename=outfilename)
epoger@google.comec3397b2013-05-29 17:09:43 +0000297
epoger@google.com9166bf52013-05-30 15:46:19 +0000298# main...
epoger@google.comec3397b2013-05-29 17:09:43 +0000299
epoger@google.com9166bf52013-05-30 15:46:19 +0000300parser = argparse.ArgumentParser()
epoger@google.comdad53102013-06-12 14:25:30 +0000301parser.add_argument('--add-new', action='store_true',
302 help='in addition to the standard behavior of ' +
303 'updating expectations for failing tests, add ' +
304 'expectations for tests which don\'t have expectations ' +
305 'yet.')
epoger@google.com9166bf52013-05-30 15:46:19 +0000306parser.add_argument('--configs', metavar='CONFIG', nargs='+',
307 help='which configurations to rebaseline, e.g. ' +
308 '"--configs 565 8888"; if unspecified, run a default ' +
epoger@google.com99ba65a2013-06-05 15:43:37 +0000309 'set of configs. This should ONLY be specified if ' +
310 '--tests has also been specified.')
epoger@google.com82f31782013-06-11 15:45:46 +0000311parser.add_argument('--dry-run', action='store_true',
epoger@google.com9166bf52013-05-30 15:46:19 +0000312 help='instead of actually downloading files or adding ' +
313 'files to checkout, display a list of operations that ' +
314 'we would normally perform')
epoger@google.com82f31782013-06-11 15:45:46 +0000315parser.add_argument('--json-base-url',
epoger@google.com99ba65a2013-06-05 15:43:37 +0000316 help='base URL from which to read JSON_FILENAME ' +
317 'files; defaults to %(default)s',
318 default='http://skia-autogen.googlecode.com/svn/gm-actual')
epoger@google.com82f31782013-06-11 15:45:46 +0000319parser.add_argument('--json-filename',
epoger@google.com99ba65a2013-06-05 15:43:37 +0000320 help='filename (under JSON_BASE_URL) to read a summary ' +
321 'of results from; defaults to %(default)s',
322 default='actual-results.json')
epoger@google.com9166bf52013-05-30 15:46:19 +0000323parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+',
324 help='which platform subdirectories to rebaseline; ' +
325 'if unspecified, rebaseline all subdirs, same as ' +
326 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys())))
epoger@google.com99ba65a2013-06-05 15:43:37 +0000327parser.add_argument('--tests', metavar='TEST', nargs='+',
epoger@google.com9166bf52013-05-30 15:46:19 +0000328 help='which tests to rebaseline, e.g. ' +
epoger@google.com99ba65a2013-06-05 15:43:37 +0000329 '"--tests aaclip bigmatrix"; if unspecified, then all ' +
330 'failing tests (according to the actual-results.json ' +
331 'file) will be rebaselined.')
epoger@google.com9166bf52013-05-30 15:46:19 +0000332args = parser.parse_args()
333rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs,
epoger@google.com99ba65a2013-06-05 15:43:37 +0000334 subdirs=args.subdirs, dry_run=args.dry_run,
335 json_base_url=args.json_base_url,
epoger@google.comdad53102013-06-12 14:25:30 +0000336 json_filename=args.json_filename,
337 add_new=args.add_new)
epoger@google.com9166bf52013-05-30 15:46:19 +0000338rebaseliner.RebaselineAll()