blob: cf72602c20dfc5eab6817222c27439e9bb0f5c30 [file] [log] [blame]
senorblanco@chromium.org782f3b42012-10-29 18:06:26 +00001#!/usr/bin/python
2
3'''
4Copyright 2012 Google Inc.
5
6Use of this source code is governed by a BSD-style license that can be
7found in the LICENSE file.
8'''
9
10'''
senorblanco@chromium.org123a0b52012-11-29 21:50:34 +000011Rebaselines the given GM tests, on all bots and all configurations.
12Must be run from the gm-expected directory. If run from a git or SVN
13checkout, the files will be added to the staging area for commit.
senorblanco@chromium.org782f3b42012-10-29 18:06:26 +000014'''
15
epoger@google.com99ba65a2013-06-05 15:43:37 +000016# System-level imports
epoger@google.com9166bf52013-05-30 15:46:19 +000017import argparse
epoger@google.comec3397b2013-05-29 17:09:43 +000018import os
epoger@google.come78d2072013-06-12 17:44:14 +000019import re
epoger@google.comec3397b2013-05-29 17:09:43 +000020import subprocess
21import sys
epoger@google.com99ba65a2013-06-05 15:43:37 +000022import urllib2
23
epoger@google.com99a8ec92013-06-19 18:56:59 +000024# Imports from local directory
25import rebaseline_imagefiles
26
epoger@google.com99ba65a2013-06-05 15:43:37 +000027# Imports from within Skia
28#
epoger@google.comdad53102013-06-12 14:25:30 +000029# We need to add the 'gm' directory, so that we can import gm_json.py within
30# that directory. That script allows us to parse the actual-results.json file
31# written out by the GM tool.
32# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
33# so any dirs that are already in the PYTHONPATH will be preferred.
34#
35# This assumes that the 'gm' directory has been checked out as a sibling of
36# the 'tools' directory containing this script, which will be the case if
37# 'trunk' was checked out as a single unit.
epoger@google.com99ba65a2013-06-05 15:43:37 +000038GM_DIRECTORY = os.path.realpath(
39 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm'))
40if GM_DIRECTORY not in sys.path:
41 sys.path.append(GM_DIRECTORY)
42import gm_json
43
epoger@google.com99a8ec92013-06-19 18:56:59 +000044JSON_EXPECTATIONS_FILENAME='expected-results.json'
senorblanco@chromium.org782f3b42012-10-29 18:06:26 +000045
epoger@google.comec3397b2013-05-29 17:09:43 +000046# Mapping of gm-expectations subdir (under
47# https://skia.googlecode.com/svn/gm-expected/ )
48# to builder name (see list at http://108.170.217.252:10117/builders )
epoger@google.com9166bf52013-05-30 15:46:19 +000049SUBDIR_MAPPING = {
epoger@google.comec3397b2013-05-29 17:09:43 +000050 'base-shuttle-win7-intel-float':
51 'Test-Win7-ShuttleA-HD2000-x86-Release',
52 'base-shuttle-win7-intel-angle':
53 'Test-Win7-ShuttleA-HD2000-x86-Release-ANGLE',
54 'base-shuttle-win7-intel-directwrite':
55 'Test-Win7-ShuttleA-HD2000-x86-Release-DirectWrite',
56 'base-shuttle_ubuntu12_ati5770':
57 'Test-Ubuntu12-ShuttleA-ATI5770-x86_64-Release',
58 'base-macmini':
59 'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Release',
60 'base-macmini-lion-float':
61 'Test-Mac10.7-MacMini4.1-GeForce320M-x86-Release',
62 'base-android-galaxy-nexus':
63 'Test-Android-GalaxyNexus-SGX540-Arm7-Debug',
64 'base-android-nexus-7':
65 'Test-Android-Nexus7-Tegra3-Arm7-Release',
66 'base-android-nexus-s':
67 'Test-Android-NexusS-SGX540-Arm7-Release',
68 'base-android-xoom':
69 'Test-Android-Xoom-Tegra2-Arm7-Release',
70 'base-android-nexus-10':
71 'Test-Android-Nexus10-MaliT604-Arm7-Release',
72}
73
epoger@google.com9166bf52013-05-30 15:46:19 +000074
epoger@google.comdb29a312013-06-04 14:58:47 +000075class CommandFailedException(Exception):
76 pass
77
epoger@google.com99a8ec92013-06-19 18:56:59 +000078# Object that rebaselines a JSON expectations file (not individual image files).
79#
80# TODO(epoger): Most of this is just the code from the old ImageRebaseliner...
81# some of it will need to be updated in order to properly rebaseline JSON files.
82# There is a lot of code duplicated between here and ImageRebaseliner, but
83# that's fine because we will delete ImageRebaseliner soon.
84class JsonRebaseliner(object):
epoger@google.com9166bf52013-05-30 15:46:19 +000085
86 # params:
epoger@google.com99a8ec92013-06-19 18:56:59 +000087 # expectations_root: root directory of all expectations
epoger@google.com99ba65a2013-06-05 15:43:37 +000088 # json_base_url: base URL from which to read json_filename
89 # json_filename: filename (under json_base_url) from which to read a
90 # summary of results; typically "actual-results.json"
epoger@google.com99ba65a2013-06-05 15:43:37 +000091 # tests: list of tests to rebaseline, or None if we should rebaseline
92 # whatever files the JSON results summary file tells us to
93 # configs: which configs to run for each test; this should only be
94 # specified if the list of tests was also specified (otherwise,
95 # the JSON file will give us test names and configs)
epoger@google.com9166bf52013-05-30 15:46:19 +000096 # dry_run: if True, instead of actually downloading files or adding
97 # files to checkout, display a list of operations that
98 # we would normally perform
epoger@google.comdad53102013-06-12 14:25:30 +000099 # add_new: if True, add expectations for tests which don't have any yet
epoger@google.com99a8ec92013-06-19 18:56:59 +0000100 # missing_json_is_fatal: whether to halt execution if we cannot read a
101 # JSON actual result summary file
102 def __init__(self, expectations_root, json_base_url, json_filename,
103 tests=None, configs=None, dry_run=False,
104 add_new=False, missing_json_is_fatal=False):
105 raise ValueError('JsonRebaseliner not yet implemented') # TODO(epoger)
epoger@google.com99ba65a2013-06-05 15:43:37 +0000106 if configs and not tests:
107 raise ValueError('configs should only be specified if tests ' +
108 'were specified also')
epoger@google.com99a8ec92013-06-19 18:56:59 +0000109 self._expectations_root = expectations_root
epoger@google.com9166bf52013-05-30 15:46:19 +0000110 self._tests = tests
111 self._configs = configs
epoger@google.com99ba65a2013-06-05 15:43:37 +0000112 self._json_base_url = json_base_url
113 self._json_filename = json_filename
epoger@google.com9166bf52013-05-30 15:46:19 +0000114 self._dry_run = dry_run
epoger@google.comdad53102013-06-12 14:25:30 +0000115 self._add_new = add_new
epoger@google.com99a8ec92013-06-19 18:56:59 +0000116 self._missing_json_is_fatal = missing_json_is_fatal
epoger@google.come78d2072013-06-12 17:44:14 +0000117 self._googlestorage_gm_actuals_root = (
118 'http://chromium-skia-gm.commondatastorage.googleapis.com/gm')
119 self._testname_pattern = re.compile('(\S+)_(\S+).png')
epoger@google.com9166bf52013-05-30 15:46:19 +0000120 self._is_svn_checkout = (
121 os.path.exists('.svn') or
122 os.path.exists(os.path.join(os.pardir, '.svn')))
123 self._is_git_checkout = (
124 os.path.exists('.git') or
125 os.path.exists(os.path.join(os.pardir, '.git')))
126
epoger@google.comdb29a312013-06-04 14:58:47 +0000127 # If dry_run is False, execute subprocess.call(cmd).
128 # If dry_run is True, print the command we would have otherwise run.
129 # Raises a CommandFailedException if the command fails.
130 def _Call(self, cmd):
epoger@google.com9166bf52013-05-30 15:46:19 +0000131 if self._dry_run:
132 print '%s' % ' '.join(cmd)
epoger@google.comdb29a312013-06-04 14:58:47 +0000133 return
134 if subprocess.call(cmd) != 0:
135 raise CommandFailedException('error running command: ' +
136 ' '.join(cmd))
137
epoger@google.come78d2072013-06-12 17:44:14 +0000138 # Download a single actual result from GoogleStorage, returning True if it
139 # succeeded.
140 def _DownloadFromGoogleStorage(self, infilename, outfilename, all_results):
141 test_name = self._testname_pattern.match(infilename).group(1)
142 if not test_name:
143 print '# unable to find test_name for infilename %s' % infilename
144 return False
145 try:
146 hash_type, hash_value = all_results[infilename]
147 except KeyError:
148 print ('# unable to find filename %s in all_results dict' %
149 infilename)
150 return False
epoger@google.combd4af3a2013-06-12 19:07:00 +0000151 except ValueError as e:
152 print '# ValueError reading filename %s from all_results dict: %s'%(
153 infilename, e)
154 return False
epoger@google.come78d2072013-06-12 17:44:14 +0000155 url = '%s/%s/%s/%s.png' % (self._googlestorage_gm_actuals_root,
156 hash_type, test_name, hash_value)
157 try:
158 self._DownloadFile(source_url=url, dest_filename=outfilename)
159 return True
160 except CommandFailedException:
161 print '# Couldn\'t fetch gs_url %s' % url
162 return False
163
164 # Download a single actual result from skia-autogen, returning True if it
165 # succeeded.
166 def _DownloadFromAutogen(self, infilename, outfilename,
167 expectations_subdir, builder_name):
168 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' +
169 expectations_subdir + '/' + builder_name + '/' +
170 expectations_subdir + '/' + infilename)
171 try:
172 self._DownloadFile(source_url=url, dest_filename=outfilename)
173 return True
174 except CommandFailedException:
175 print '# Couldn\'t fetch autogen_url %s' % url
176 return False
177
epoger@google.comdb29a312013-06-04 14:58:47 +0000178 # Download a single file, raising a CommandFailedException if it fails.
179 def _DownloadFile(self, source_url, dest_filename):
180 # Download into a temporary file and then rename it afterwards,
181 # so that we don't corrupt the existing file if it fails midway thru.
182 temp_filename = os.path.join(os.path.dirname(dest_filename),
183 '.temp-' + os.path.basename(dest_filename))
184
185 # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on
186 # Unix) with a Python HTTP library (which should work cross-platform)
187 self._Call([ 'curl', '--fail', '--silent', source_url,
188 '--output', temp_filename ])
189 self._Call([ 'mv', temp_filename, dest_filename ])
epoger@google.com9166bf52013-05-30 15:46:19 +0000190
epoger@google.com99ba65a2013-06-05 15:43:37 +0000191 # Returns the full contents of a URL, as a single string.
192 #
193 # Unlike standard URL handling, we allow relative "file:" URLs;
194 # for example, "file:one/two" resolves to the file ./one/two
195 # (relative to current working dir)
196 def _GetContentsOfUrl(self, url):
197 file_prefix = 'file:'
198 if url.startswith(file_prefix):
199 filename = url[len(file_prefix):]
200 return open(filename, 'r').read()
201 else:
202 return urllib2.urlopen(url).read()
203
epoger@google.come78d2072013-06-12 17:44:14 +0000204 # Returns a dictionary of actual results from actual-results.json file.
205 #
206 # The dictionary returned has this format:
207 # {
208 # u'imageblur_565.png': [u'bitmap-64bitMD5', 3359963596899141322],
209 # u'imageblur_8888.png': [u'bitmap-64bitMD5', 4217923806027861152],
210 # u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716]
211 # }
212 #
213 # If the JSON actual result summary file cannot be loaded, the behavior
214 # depends on self._missing_json_is_fatal:
215 # - if true: execution will halt with an exception
216 # - if false: we will log an error message but return an empty dictionary
217 #
218 # params:
219 # json_url: URL pointing to a JSON actual result summary file
220 # sections: a list of section names to include in the results, e.g.
221 # [gm_json.JSONKEY_ACTUALRESULTS_FAILED,
222 # gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ;
223 # if None, then include ALL sections.
224 def _GetActualResults(self, json_url, sections=None):
225 try:
226 json_contents = self._GetContentsOfUrl(json_url)
227 except (urllib2.HTTPError, IOError):
228 message = 'unable to load JSON summary URL %s' % json_url
229 if self._missing_json_is_fatal:
230 raise ValueError(message)
231 else:
232 print '# %s' % message
233 return {}
234
235 json_dict = gm_json.LoadFromString(json_contents)
236 results_to_return = {}
237 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
238 if not sections:
239 sections = actual_results.keys()
240 for section in sections:
241 section_results = actual_results[section]
242 if section_results:
243 results_to_return.update(section_results)
244 return results_to_return
245
epoger@google.com99ba65a2013-06-05 15:43:37 +0000246 # Returns a list of files that require rebaselining.
247 #
248 # Note that this returns a list of FILES, like this:
249 # ['imageblur_565.png', 'xfermodes_pdf.png']
250 # rather than a list of TESTS, like this:
251 # ['imageblur', 'xfermodes']
252 #
253 # params:
254 # json_url: URL pointing to a JSON actual result summary file
epoger@google.comdad53102013-06-12 14:25:30 +0000255 # add_new: if True, then return files listed in any of these sections:
256 # - JSONKEY_ACTUALRESULTS_FAILED
257 # - JSONKEY_ACTUALRESULTS_NOCOMPARISON
258 # if False, then return files listed in these sections:
259 # - JSONKEY_ACTUALRESULTS_FAILED
epoger@google.com99ba65a2013-06-05 15:43:37 +0000260 #
epoger@google.comdad53102013-06-12 14:25:30 +0000261 def _GetFilesToRebaseline(self, json_url, add_new):
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000262 if self._dry_run:
263 print ''
264 print '#'
epoger@google.com99ba65a2013-06-05 15:43:37 +0000265 print ('# Getting files to rebaseline from JSON summary URL %s ...'
266 % json_url)
epoger@google.comdad53102013-06-12 14:25:30 +0000267 sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED]
268 if add_new:
269 sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON)
epoger@google.come78d2072013-06-12 17:44:14 +0000270 results_to_rebaseline = self._GetActualResults(json_url=json_url,
271 sections=sections)
272 files_to_rebaseline = results_to_rebaseline.keys()
273 files_to_rebaseline.sort()
epoger@google.com99ba65a2013-06-05 15:43:37 +0000274 print '# ... found files_to_rebaseline %s' % files_to_rebaseline
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000275 if self._dry_run:
276 print '#'
epoger@google.com99ba65a2013-06-05 15:43:37 +0000277 return files_to_rebaseline
278
epoger@google.com9166bf52013-05-30 15:46:19 +0000279 # Rebaseline a single file.
280 def _RebaselineOneFile(self, expectations_subdir, builder_name,
epoger@google.come78d2072013-06-12 17:44:14 +0000281 infilename, outfilename, all_results):
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000282 if self._dry_run:
283 print ''
epoger@google.com99ba65a2013-06-05 15:43:37 +0000284 print '# ' + infilename
epoger@google.comdb29a312013-06-04 14:58:47 +0000285
epoger@google.come78d2072013-06-12 17:44:14 +0000286 # First try to download this result image from Google Storage.
287 # If that fails, try skia-autogen.
288 # If that fails too, just go on to the next file.
epoger@google.comdb29a312013-06-04 14:58:47 +0000289 #
290 # This not treated as a fatal failure because not all
291 # platforms generate all configs (e.g., Android does not
292 # generate PDF).
293 #
epoger@google.come78d2072013-06-12 17:44:14 +0000294 # TODO(epoger): Once we are downloading only files that the
295 # actual-results.json file told us to, this should become a
296 # fatal error. (If the actual-results.json file told us that
297 # the test failed with XXX results, we should be able to download
298 # those results every time.)
299 if not self._DownloadFromGoogleStorage(infilename=infilename,
300 outfilename=outfilename,
301 all_results=all_results):
302 if not self._DownloadFromAutogen(infilename=infilename,
303 outfilename=outfilename,
304 expectations_subdir=expectations_subdir,
305 builder_name=builder_name):
306 print '# Couldn\'t fetch infilename ' + infilename
307 return
epoger@google.comdb29a312013-06-04 14:58:47 +0000308
epoger@google.comdad53102013-06-12 14:25:30 +0000309 # Add this file to version control (if appropriate).
310 if self._add_new:
311 if self._is_svn_checkout:
312 cmd = [ 'svn', 'add', '--quiet', outfilename ]
313 self._Call(cmd)
314 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type',
315 'image/png', outfilename ];
316 self._Call(cmd)
317 elif self._is_git_checkout:
318 cmd = [ 'git', 'add', outfilename ]
319 self._Call(cmd)
epoger@google.com9166bf52013-05-30 15:46:19 +0000320
321 # Rebaseline the given configs for a single test.
322 #
323 # params:
324 # expectations_subdir
325 # builder_name
326 # test: a single test to rebaseline
epoger@google.come78d2072013-06-12 17:44:14 +0000327 # all_results: a dictionary of all actual results
328 def _RebaselineOneTest(self, expectations_subdir, builder_name, test,
329 all_results):
epoger@google.com9166bf52013-05-30 15:46:19 +0000330 if self._configs:
331 configs = self._configs
332 else:
333 if (expectations_subdir == 'base-shuttle-win7-intel-angle'):
334 configs = [ 'angle', 'anglemsaa16' ]
335 else:
336 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16',
337 'msaa4' ]
epoger@google.comcc2e1cf2013-06-11 16:24:37 +0000338 if self._dry_run:
339 print ''
epoger@google.com9166bf52013-05-30 15:46:19 +0000340 print '# ' + expectations_subdir + ':'
341 for config in configs:
342 infilename = test + '_' + config + '.png'
epoger@google.com9166bf52013-05-30 15:46:19 +0000343 outfilename = os.path.join(expectations_subdir, infilename);
344 self._RebaselineOneFile(expectations_subdir=expectations_subdir,
345 builder_name=builder_name,
346 infilename=infilename,
epoger@google.come78d2072013-06-12 17:44:14 +0000347 outfilename=outfilename,
348 all_results=all_results)
epoger@google.com9166bf52013-05-30 15:46:19 +0000349
epoger@google.com99a8ec92013-06-19 18:56:59 +0000350 # Rebaseline all tests/types we specified in the constructor,
351 # within this gm-expectations subdir.
352 #
353 # params:
354 # subdir : e.g. 'base-shuttle-win7-intel-float'
355 # builder : e.g. 'Test-Win7-ShuttleA-HD2000-x86-Release'
356 def RebaselineSubdir(self, subdir, builder):
357 json_url = '/'.join([self._json_base_url,
358 subdir, builder, subdir,
359 self._json_filename])
360 all_results = self._GetActualResults(json_url=json_url)
epoger@google.come78d2072013-06-12 17:44:14 +0000361
epoger@google.com99a8ec92013-06-19 18:56:59 +0000362 if self._tests:
363 for test in self._tests:
364 self._RebaselineOneTest(expectations_subdir=subdir,
365 builder_name=builder,
366 test=test, all_results=all_results)
367 else: # get the raw list of files that need rebaselining from JSON
368 filenames = self._GetFilesToRebaseline(json_url=json_url,
369 add_new=self._add_new)
370 for filename in filenames:
371 outfilename = os.path.join(subdir, filename);
372 self._RebaselineOneFile(expectations_subdir=subdir,
373 builder_name=builder,
374 infilename=filename,
375 outfilename=outfilename,
376 all_results=all_results)
epoger@google.comec3397b2013-05-29 17:09:43 +0000377
epoger@google.com9166bf52013-05-30 15:46:19 +0000378# main...
epoger@google.comec3397b2013-05-29 17:09:43 +0000379
epoger@google.com9166bf52013-05-30 15:46:19 +0000380parser = argparse.ArgumentParser()
epoger@google.comdad53102013-06-12 14:25:30 +0000381parser.add_argument('--add-new', action='store_true',
382 help='in addition to the standard behavior of ' +
383 'updating expectations for failing tests, add ' +
384 'expectations for tests which don\'t have expectations ' +
385 'yet.')
epoger@google.com9166bf52013-05-30 15:46:19 +0000386parser.add_argument('--configs', metavar='CONFIG', nargs='+',
387 help='which configurations to rebaseline, e.g. ' +
388 '"--configs 565 8888"; if unspecified, run a default ' +
epoger@google.com99ba65a2013-06-05 15:43:37 +0000389 'set of configs. This should ONLY be specified if ' +
390 '--tests has also been specified.')
epoger@google.com82f31782013-06-11 15:45:46 +0000391parser.add_argument('--dry-run', action='store_true',
epoger@google.com9166bf52013-05-30 15:46:19 +0000392 help='instead of actually downloading files or adding ' +
393 'files to checkout, display a list of operations that ' +
394 'we would normally perform')
epoger@google.com99a8ec92013-06-19 18:56:59 +0000395parser.add_argument('--expectations-root',
396 help='root of expectations directory to update-- should ' +
397 'contain one or more base-* subdirectories. Defaults to ' +
398 '%(default)s',
399 default='.')
epoger@google.com82f31782013-06-11 15:45:46 +0000400parser.add_argument('--json-base-url',
epoger@google.com99ba65a2013-06-05 15:43:37 +0000401 help='base URL from which to read JSON_FILENAME ' +
402 'files; defaults to %(default)s',
403 default='http://skia-autogen.googlecode.com/svn/gm-actual')
epoger@google.com82f31782013-06-11 15:45:46 +0000404parser.add_argument('--json-filename',
epoger@google.com99ba65a2013-06-05 15:43:37 +0000405 help='filename (under JSON_BASE_URL) to read a summary ' +
406 'of results from; defaults to %(default)s',
407 default='actual-results.json')
epoger@google.com9166bf52013-05-30 15:46:19 +0000408parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+',
409 help='which platform subdirectories to rebaseline; ' +
410 'if unspecified, rebaseline all subdirs, same as ' +
411 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys())))
epoger@google.com99ba65a2013-06-05 15:43:37 +0000412parser.add_argument('--tests', metavar='TEST', nargs='+',
epoger@google.com9166bf52013-05-30 15:46:19 +0000413 help='which tests to rebaseline, e.g. ' +
epoger@google.com99ba65a2013-06-05 15:43:37 +0000414 '"--tests aaclip bigmatrix"; if unspecified, then all ' +
415 'failing tests (according to the actual-results.json ' +
416 'file) will be rebaselined.')
epoger@google.com9166bf52013-05-30 15:46:19 +0000417args = parser.parse_args()
epoger@google.com99a8ec92013-06-19 18:56:59 +0000418if args.subdirs:
419 subdirs = args.subdirs
420 missing_json_is_fatal = True
421else:
422 subdirs = sorted(SUBDIR_MAPPING.keys())
423 missing_json_is_fatal = False
424for subdir in subdirs:
425 if not subdir in SUBDIR_MAPPING.keys():
426 raise Exception(('unrecognized platform subdir "%s"; ' +
427 'should be one of %s') % (
428 subdir, SUBDIR_MAPPING.keys()))
429 builder = SUBDIR_MAPPING[subdir]
430
431 # We instantiate different Rebaseliner objects depending
432 # on whether we are rebaselining an expected-results.json file, or
433 # individual image files. Different gm-expected subdirectories may move
434 # from individual image files to JSON-format expectations at different
435 # times, so we need to make this determination per subdirectory.
436 #
437 # See https://goto.google.com/ChecksumTransitionDetail
438 expectations_json_file = os.path.join(args.expectations_root, subdir,
439 JSON_EXPECTATIONS_FILENAME)
440 if os.path.isfile(expectations_json_file):
441 sys.stderr.write('ERROR: JsonRebaseliner is not implemented yet.\n')
442 sys.exit(1)
443 rebaseliner = JsonRebaseliner(
444 expectations_root=args.expectations_root,
445 tests=args.tests, configs=args.configs,
446 dry_run=args.dry_run,
447 json_base_url=args.json_base_url,
448 json_filename=args.json_filename,
449 add_new=args.add_new,
450 missing_json_is_fatal=missing_json_is_fatal)
451 else:
452 rebaseliner = rebaseline_imagefiles.ImageRebaseliner(
453 expectations_root=args.expectations_root,
454 tests=args.tests, configs=args.configs,
455 dry_run=args.dry_run,
456 json_base_url=args.json_base_url,
457 json_filename=args.json_filename,
458 add_new=args.add_new,
459 missing_json_is_fatal=missing_json_is_fatal)
460 rebaseliner.RebaselineSubdir(subdir=subdir, builder=builder)