blob: 37c0f58318e070d6f67f7ef798aea5396e1b26b5 [file] [log] [blame]
mbligh96cf0512008-04-17 15:25:38 +00001#!/usr/bin/python -u
mblighc2514542008-02-19 15:54:26 +00002
Aviv Keshet687d2dc2016-10-20 15:41:16 -07003import collections
Fang Deng49822682014-10-21 16:29:22 -07004import datetime
Aviv Keshet687d2dc2016-10-20 15:41:16 -07005import errno
6import fcntl
Simran Basi1e10e922015-04-16 15:09:56 -07007import json
Aviv Keshet687d2dc2016-10-20 15:41:16 -07008import optparse
9import os
10import socket
Shuqian Zhao31425d52016-12-07 09:35:03 -080011import subprocess
Aviv Keshet687d2dc2016-10-20 15:41:16 -070012import sys
13import traceback
mblighbb7b8912006-10-08 03:59:02 +000014
mbligh96cf0512008-04-17 15:25:38 +000015import common
Benny Peakefeb775c2017-02-08 15:14:14 -080016from autotest_lib.client.common_lib import global_config
jadmanskidb4f9b52008-12-03 22:52:53 +000017from autotest_lib.client.common_lib import mail, pidfile
Fang Deng49822682014-10-21 16:29:22 -070018from autotest_lib.client.common_lib import utils
Simran Basi59ca5ac2016-09-22 16:57:56 -070019from autotest_lib.client.common_lib.cros.graphite import autotest_es
Fang Deng49822682014-10-21 16:29:22 -070020from autotest_lib.frontend import setup_django_environment
Fang Deng9ec66802014-04-28 19:04:33 +000021from autotest_lib.frontend.tko import models as tko_models
Shuqian Zhao19e62fb2017-01-09 10:10:14 -080022from autotest_lib.server import site_utils
Fang Deng49822682014-10-21 16:29:22 -070023from autotest_lib.server.cros.dynamic_suite import constants
24from autotest_lib.site_utils import job_overhead
Dan Shi4c33b6a2016-08-18 16:11:31 -070025from autotest_lib.site_utils import sponge_utils
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -070026from autotest_lib.tko import db as tko_db, utils as tko_utils
Luigi Semenzatoe7064812017-02-03 14:47:59 -080027from autotest_lib.tko import models, parser_lib
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -070028from autotest_lib.tko.perf_upload import perf_uploader
mbligh74fc0462007-11-05 20:24:17 +000029
Aviv Keshet687d2dc2016-10-20 15:41:16 -070030_ParseOptions = collections.namedtuple(
Shuqian Zhao19e62fb2017-01-09 10:10:14 -080031 'ParseOptions', ['reparse', 'mail_on_failure', 'dry_run', 'suite_report',
32 'datastore_creds', 'export_to_gcloud_path'])
Aviv Keshet687d2dc2016-10-20 15:41:16 -070033
mbligh96cf0512008-04-17 15:25:38 +000034def parse_args():
Fang Deng49822682014-10-21 16:29:22 -070035 """Parse args."""
jadmanski0afbb632008-06-06 21:10:57 +000036 # build up our options parser and parse sys.argv
37 parser = optparse.OptionParser()
38 parser.add_option("-m", help="Send mail for FAILED tests",
39 dest="mailit", action="store_true")
40 parser.add_option("-r", help="Reparse the results of a job",
41 dest="reparse", action="store_true")
42 parser.add_option("-o", help="Parse a single results directory",
43 dest="singledir", action="store_true")
44 parser.add_option("-l", help=("Levels of subdirectories to include "
45 "in the job name"),
46 type="int", dest="level", default=1)
47 parser.add_option("-n", help="No blocking on an existing parse",
48 dest="noblock", action="store_true")
49 parser.add_option("-s", help="Database server hostname",
50 dest="db_host", action="store")
51 parser.add_option("-u", help="Database username", dest="db_user",
52 action="store")
53 parser.add_option("-p", help="Database password", dest="db_pass",
54 action="store")
55 parser.add_option("-d", help="Database name", dest="db_name",
56 action="store")
Aviv Keshet0b7bab02016-10-20 17:17:36 -070057 parser.add_option("--dry-run", help="Do not actually commit any results.",
58 dest="dry_run", action="store_true", default=False)
jadmanskid5ab8c52008-12-03 16:27:07 +000059 parser.add_option("--write-pidfile",
60 help="write pidfile (.parser_execute)",
61 dest="write_pidfile", action="store_true",
62 default=False)
Fang Deng49822682014-10-21 16:29:22 -070063 parser.add_option("--record-duration",
64 help="Record timing to metadata db",
65 dest="record_duration", action="store_true",
66 default=False)
Shuqian Zhao31425d52016-12-07 09:35:03 -080067 parser.add_option("--suite-report",
68 help=("Allows parsing job to attempt to create a suite "
Shuqian Zhao19e62fb2017-01-09 10:10:14 -080069 "timeline report, if it detects that the job being "
Shuqian Zhao31425d52016-12-07 09:35:03 -080070 "parsed is a suite job."),
71 dest="suite_report", action="store_true",
72 default=False)
Shuqian Zhao19e62fb2017-01-09 10:10:14 -080073 parser.add_option("--datastore-creds",
74 help=("The path to gcloud datastore credentials file, "
75 "which will be used to upload suite timeline "
76 "report to gcloud. If not specified, the one "
77 "defined in shadow_config will be used."),
78 dest="datastore_creds", action="store", default=None)
79 parser.add_option("--export-to-gcloud-path",
80 help=("The path to export_to_gcloud script. Please find "
81 "chromite path on your server. The script is under "
82 "chromite/bin/."),
83 dest="export_to_gcloud_path", action="store",
84 default=None)
jadmanski0afbb632008-06-06 21:10:57 +000085 options, args = parser.parse_args()
mbligh74fc0462007-11-05 20:24:17 +000086
jadmanski0afbb632008-06-06 21:10:57 +000087 # we need a results directory
88 if len(args) == 0:
89 tko_utils.dprint("ERROR: at least one results directory must "
90 "be provided")
91 parser.print_help()
92 sys.exit(1)
mbligh74fc0462007-11-05 20:24:17 +000093
Shuqian Zhao19e62fb2017-01-09 10:10:14 -080094 if not options.datastore_creds:
95 gcloud_creds = global_config.global_config.get_config_value(
96 'GCLOUD', 'cidb_datastore_writer_creds', default=None)
97 options.datastore_creds = (site_utils.get_creds_abspath(gcloud_creds)
98 if gcloud_creds else None)
99
100 if not options.export_to_gcloud_path:
101 export_script = 'chromiumos/chromite/bin/export_to_gcloud'
102 # If it is a lab server, the script is under ~chromeos-test/
103 if os.path.exists(os.path.expanduser('~chromeos-test/%s' %
104 export_script)):
105 path = os.path.expanduser('~chromeos-test/%s' % export_script)
106 # If it is a local workstation, it is probably under ~/
107 elif os.path.exists(os.path.expanduser('~/%s' % export_script)):
108 path = os.path.expanduser('~/%s' % export_script)
109 # If it is not found anywhere, the default will be set to None.
110 else:
111 path = None
112 options.export_to_gcloud_path = path
113
jadmanski0afbb632008-06-06 21:10:57 +0000114 # pass the options back
115 return options, args
mbligh74fc0462007-11-05 20:24:17 +0000116
117
mbligh96cf0512008-04-17 15:25:38 +0000118def format_failure_message(jobname, kernel, testname, status, reason):
Fang Deng49822682014-10-21 16:29:22 -0700119 """Format failure message with the given information.
120
121 @param jobname: String representing the job name.
122 @param kernel: String representing the kernel.
123 @param testname: String representing the test name.
124 @param status: String representing the test status.
125 @param reason: String representing the reason.
126
127 @return: Failure message as a string.
128 """
jadmanski0afbb632008-06-06 21:10:57 +0000129 format_string = "%-12s %-20s %-12s %-10s %s"
130 return format_string % (jobname, kernel, testname, status, reason)
mblighb85e6b02006-10-08 17:20:56 +0000131
mblighbb7b8912006-10-08 03:59:02 +0000132
mbligh96cf0512008-04-17 15:25:38 +0000133def mailfailure(jobname, job, message):
Fang Deng49822682014-10-21 16:29:22 -0700134 """Send an email about the failure.
135
136 @param jobname: String representing the job name.
137 @param job: A job object.
138 @param message: The message to mail.
139 """
jadmanski0afbb632008-06-06 21:10:57 +0000140 message_lines = [""]
141 message_lines.append("The following tests FAILED for this job")
142 message_lines.append("http://%s/results/%s" %
143 (socket.gethostname(), jobname))
144 message_lines.append("")
145 message_lines.append(format_failure_message("Job name", "Kernel",
146 "Test name", "FAIL/WARN",
147 "Failure reason"))
148 message_lines.append(format_failure_message("=" * 8, "=" * 6, "=" * 8,
149 "=" * 8, "=" * 14))
150 message_header = "\n".join(message_lines)
mbligh96cf0512008-04-17 15:25:38 +0000151
jadmanski0afbb632008-06-06 21:10:57 +0000152 subject = "AUTOTEST: FAILED tests from job %s" % jobname
153 mail.send("", job.user, "", subject, message_header + message)
mbligh006f2302007-09-13 20:46:46 +0000154
155
Fang Deng9ec66802014-04-28 19:04:33 +0000156def _invalidate_original_tests(orig_job_idx, retry_job_idx):
157 """Retry tests invalidates original tests.
158
159 Whenever a retry job is complete, we want to invalidate the original
160 job's test results, such that the consumers of the tko database
161 (e.g. tko frontend, wmatrix) could figure out which results are the latest.
162
163 When a retry job is parsed, we retrieve the original job's afe_job_id
164 from the retry job's keyvals, which is then converted to tko job_idx and
165 passed into this method as |orig_job_idx|.
166
167 In this method, we are going to invalidate the rows in tko_tests that are
168 associated with the original job by flipping their 'invalid' bit to True.
169 In addition, in tko_tests, we also maintain a pointer from the retry results
170 to the original results, so that later we can always know which rows in
171 tko_tests are retries and which are the corresponding original results.
172 This is done by setting the field 'invalidates_test_idx' of the tests
173 associated with the retry job.
174
175 For example, assume Job(job_idx=105) are retried by Job(job_idx=108), after
176 this method is run, their tko_tests rows will look like:
177 __________________________________________________________________________
178 test_idx| job_idx | test | ... | invalid | invalidates_test_idx
179 10 | 105 | dummy_Fail.Error| ... | 1 | NULL
180 11 | 105 | dummy_Fail.Fail | ... | 1 | NULL
181 ...
182 20 | 108 | dummy_Fail.Error| ... | 0 | 10
183 21 | 108 | dummy_Fail.Fail | ... | 0 | 11
184 __________________________________________________________________________
185 Note the invalid bits of the rows for Job(job_idx=105) are set to '1'.
186 And the 'invalidates_test_idx' fields of the rows for Job(job_idx=108)
187 are set to 10 and 11 (the test_idx of the rows for the original job).
188
189 @param orig_job_idx: An integer representing the original job's
190 tko job_idx. Tests associated with this job will
191 be marked as 'invalid'.
192 @param retry_job_idx: An integer representing the retry job's
193 tko job_idx. The field 'invalidates_test_idx'
194 of the tests associated with this job will be updated.
195
196 """
197 msg = 'orig_job_idx: %s, retry_job_idx: %s' % (orig_job_idx, retry_job_idx)
198 if not orig_job_idx or not retry_job_idx:
199 tko_utils.dprint('ERROR: Could not invalidate tests: ' + msg)
200 # Using django models here makes things easier, but make sure that
201 # before this method is called, all other relevant transactions have been
202 # committed to avoid race condition. In the long run, we might consider
203 # to make the rest of parser use django models.
204 orig_tests = tko_models.Test.objects.filter(job__job_idx=orig_job_idx)
205 retry_tests = tko_models.Test.objects.filter(job__job_idx=retry_job_idx)
206
207 # Invalidate original tests.
208 orig_tests.update(invalid=True)
209
210 # Maintain a dictionary that maps (test, subdir) to original tests.
211 # Note that within the scope of a job, (test, subdir) uniquelly
212 # identifies a test run, but 'test' does not.
213 # In a control file, one could run the same test with different
214 # 'subdir_tag', for example,
215 # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_1')
216 # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_2')
217 # In tko, we will get
218 # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_1')
219 # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_2')
220 invalidated_tests = {(orig_test.test, orig_test.subdir): orig_test
221 for orig_test in orig_tests}
222 for retry in retry_tests:
223 # It is possible that (retry.test, retry.subdir) doesn't exist
224 # in invalidated_tests. This could happen when the original job
225 # didn't run some of its tests. For example, a dut goes offline
226 # since the beginning of the job, in which case invalidated_tests
227 # will only have one entry for 'SERVER_JOB'.
228 orig_test = invalidated_tests.get((retry.test, retry.subdir), None)
229 if orig_test:
230 retry.invalidates_test = orig_test
231 retry.save()
232 tko_utils.dprint('DEBUG: Invalidated tests associated to job: ' + msg)
233
234
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700235def parse_one(db, jobname, path, parse_options):
Fang Deng49822682014-10-21 16:29:22 -0700236 """Parse a single job. Optionally send email on failure.
237
238 @param db: database object.
239 @param jobname: the tag used to search for existing job in db,
240 e.g. '1234-chromeos-test/host1'
241 @param path: The path to the results to be parsed.
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700242 @param parse_options: _ParseOptions instance.
jadmanski0afbb632008-06-06 21:10:57 +0000243 """
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700244 reparse = parse_options.reparse
245 mail_on_failure = parse_options.mail_on_failure
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700246 dry_run = parse_options.dry_run
Shuqian Zhao31425d52016-12-07 09:35:03 -0800247 suite_report = parse_options.suite_report
Shuqian Zhao19e62fb2017-01-09 10:10:14 -0800248 datastore_creds = parse_options.datastore_creds
249 export_to_gcloud_path = parse_options.export_to_gcloud_path
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700250
jadmanski0afbb632008-06-06 21:10:57 +0000251 tko_utils.dprint("\nScanning %s (%s)" % (jobname, path))
jadmanski9b6babf2009-04-21 17:57:40 +0000252 old_job_idx = db.find_job(jobname)
showard0fec8a02009-12-04 01:19:54 +0000253 # old tests is a dict from tuple (test_name, subdir) to test_idx
254 old_tests = {}
255 if old_job_idx is not None:
256 if not reparse:
257 tko_utils.dprint("! Job is already parsed, done")
258 return
259
showardeab66ce2009-12-23 00:03:56 +0000260 raw_old_tests = db.select("test_idx,subdir,test", "tko_tests",
showard0fec8a02009-12-04 01:19:54 +0000261 {"job_idx": old_job_idx})
262 if raw_old_tests:
263 old_tests = dict(((test, subdir), test_idx)
264 for test_idx, subdir, test in raw_old_tests)
mbligh96cf0512008-04-17 15:25:38 +0000265
jadmanski0afbb632008-06-06 21:10:57 +0000266 # look up the status version
jadmanskidb4f9b52008-12-03 22:52:53 +0000267 job_keyval = models.job.read_keyval(path)
268 status_version = job_keyval.get("status_version", 0)
jadmanski6e8bf752008-05-14 00:17:48 +0000269
jadmanski0afbb632008-06-06 21:10:57 +0000270 # parse out the job
Luigi Semenzatoe7064812017-02-03 14:47:59 -0800271 parser = parser_lib.parser(status_version)
jadmanski0afbb632008-06-06 21:10:57 +0000272 job = parser.make_job(path)
273 status_log = os.path.join(path, "status.log")
274 if not os.path.exists(status_log):
275 status_log = os.path.join(path, "status")
276 if not os.path.exists(status_log):
277 tko_utils.dprint("! Unable to parse job, no status file")
278 return
mbligh96cf0512008-04-17 15:25:38 +0000279
jadmanski0afbb632008-06-06 21:10:57 +0000280 # parse the status logs
281 tko_utils.dprint("+ Parsing dir=%s, jobname=%s" % (path, jobname))
282 status_lines = open(status_log).readlines()
283 parser.start(job)
284 tests = parser.end(status_lines)
jadmanski9b6babf2009-04-21 17:57:40 +0000285
286 # parser.end can return the same object multiple times, so filter out dups
287 job.tests = []
288 already_added = set()
289 for test in tests:
290 if test not in already_added:
291 already_added.add(test)
292 job.tests.append(test)
293
showard0fec8a02009-12-04 01:19:54 +0000294 # try and port test_idx over from the old tests, but if old tests stop
jadmanski9b6babf2009-04-21 17:57:40 +0000295 # matching up with new ones just give up
showard0fec8a02009-12-04 01:19:54 +0000296 if reparse and old_job_idx is not None:
297 job.index = old_job_idx
298 for test in job.tests:
299 test_idx = old_tests.pop((test.testname, test.subdir), None)
300 if test_idx is not None:
301 test.test_idx = test_idx
302 else:
303 tko_utils.dprint("! Reparse returned new test "
304 "testname=%r subdir=%r" %
305 (test.testname, test.subdir))
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700306 if not dry_run:
307 for test_idx in old_tests.itervalues():
308 where = {'test_idx' : test_idx}
309 db.delete('tko_iteration_result', where)
310 db.delete('tko_iteration_perf_value', where)
311 db.delete('tko_iteration_attributes', where)
312 db.delete('tko_test_attributes', where)
313 db.delete('tko_test_labels_tests', {'test_id': test_idx})
314 db.delete('tko_tests', where)
mbligh96cf0512008-04-17 15:25:38 +0000315
Benny Peakefeb775c2017-02-08 15:14:14 -0800316 job.build = None
317 job.board = None
318 job.build_version = None
319 job.suite = None
320 if job.label:
321 label_info = site_utils.parse_job_name(job.label)
322 if label_info:
323 job.build = label_info.get('build', None)
324 job.build_version = label_info.get('build_version', None)
325 job.board = label_info.get('board', None)
326 job.suite = label_info.get('suite', None)
327
Dan Shi47199f02016-10-13 17:15:26 -0700328 # Upload job details to Sponge.
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700329 if not dry_run:
330 sponge_url = sponge_utils.upload_results(job, log=tko_utils.dprint)
331 if sponge_url:
332 job.keyval_dict['sponge_url'] = sponge_url
Dan Shi47199f02016-10-13 17:15:26 -0700333
jadmanski0afbb632008-06-06 21:10:57 +0000334 # check for failures
335 message_lines = [""]
Simran Basi1e10e922015-04-16 15:09:56 -0700336 job_successful = True
jadmanski0afbb632008-06-06 21:10:57 +0000337 for test in job.tests:
338 if not test.subdir:
339 continue
340 tko_utils.dprint("* testname, status, reason: %s %s %s"
341 % (test.subdir, test.status, test.reason))
Simran Basi1e10e922015-04-16 15:09:56 -0700342 if test.status != 'GOOD':
343 job_successful = False
jadmanski0afbb632008-06-06 21:10:57 +0000344 message_lines.append(format_failure_message(
345 jobname, test.kernel.base, test.subdir,
346 test.status, test.reason))
Simran Basi59ca5ac2016-09-22 16:57:56 -0700347 try:
348 message = "\n".join(message_lines)
Simran Basi1e10e922015-04-16 15:09:56 -0700349
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700350 if not dry_run:
351 # send out a email report of failure
352 if len(message) > 2 and mail_on_failure:
353 tko_utils.dprint("Sending email report of failure on %s to %s"
354 % (jobname, job.user))
355 mailfailure(jobname, job, message)
mbligh96cf0512008-04-17 15:25:38 +0000356
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700357 # write the job into the database.
Shuqian Zhao31425d52016-12-07 09:35:03 -0800358 job_data = db.insert_job(
359 jobname, job,
360 parent_job_id=job_keyval.get(constants.PARENT_JOB_ID, None))
mbligh96cf0512008-04-17 15:25:38 +0000361
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700362 # Upload perf values to the perf dashboard, if applicable.
363 for test in job.tests:
364 perf_uploader.upload_test(job, test, jobname)
jamesren7a522042010-06-10 22:53:55 +0000365
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700366 # Although the cursor has autocommit, we still need to force it to
367 # commit existing changes before we can use django models, otherwise
368 # it will go into deadlock when django models try to start a new
369 # trasaction while the current one has not finished yet.
370 db.commit()
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -0700371
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700372 # Handle retry job.
373 orig_afe_job_id = job_keyval.get(constants.RETRY_ORIGINAL_JOB_ID,
374 None)
375 if orig_afe_job_id:
376 orig_job_idx = tko_models.Job.objects.get(
377 afe_job_id=orig_afe_job_id).job_idx
378 _invalidate_original_tests(orig_job_idx, job.index)
Simran Basi59ca5ac2016-09-22 16:57:56 -0700379 except Exception as e:
380 metadata = {'path': path, 'error': str(e),
381 'details': traceback.format_exc()}
382 tko_utils.dprint("Hit exception while uploading to tko db:\n%s" %
383 traceback.format_exc())
384 autotest_es.post(use_http=True, type_str='parse_failure',
385 metadata=metadata)
386 raise e
Fang Deng9ec66802014-04-28 19:04:33 +0000387
jamesren7a522042010-06-10 22:53:55 +0000388 # Serializing job into a binary file
389 try:
390 from autotest_lib.tko import tko_pb2
391 from autotest_lib.tko import job_serializer
392
393 serializer = job_serializer.JobSerializer()
jamesren4826cc42010-06-15 20:33:22 +0000394 binary_file_name = os.path.join(path, "job.serialize")
395 serializer.serialize_to_binary(job, jobname, binary_file_name)
396
397 if reparse:
398 site_export_file = "autotest_lib.tko.site_export"
399 site_export = utils.import_site_function(__file__,
400 site_export_file,
401 "site_export",
402 _site_export_dummy)
403 site_export(binary_file_name)
404
jamesren7a522042010-06-10 22:53:55 +0000405 except ImportError:
406 tko_utils.dprint("DEBUG: tko_pb2.py doesn't exist. Create by "
407 "compiling tko/tko.proto.")
408
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700409 if not dry_run:
410 db.commit()
mbligh26b992b2008-02-19 15:46:21 +0000411
Shuqian Zhao31425d52016-12-07 09:35:03 -0800412 # Generate a suite report.
413 # Check whether this is a suite job, a suite job will be a hostless job, its
414 # jobname will be <JOB_ID>-<USERNAME>/hostless, the suite field will not be
415 # NULL
416 try:
417 if suite_report and jobname.endswith('/hostless') and job_data['suite']:
418 tko_utils.dprint('Start dumping suite timing report...')
419 timing_log = os.path.join(path, 'suite_timing.log')
420 dump_cmd = ("%s/site_utils/dump_suite_report.py %s "
421 "--output='%s' --debug" %
422 (common.autotest_dir, job_data['afe_job_id'],
423 timing_log))
424 subprocess.check_output(dump_cmd, shell=True)
425 tko_utils.dprint('Successfully finish dumping suite timing report')
426
Shuqian Zhao19e62fb2017-01-09 10:10:14 -0800427 if (datastore_creds and export_to_gcloud_path
428 and os.path.exists(export_to_gcloud_path)):
429 upload_cmd = ("%s %s %s" %
430 (export_to_gcloud_path, datastore_creds,
431 timing_log))
432 tko_utils.dprint('Start exporting timeline report to gcloud')
433 subprocess.check_output(upload_cmd, shell=True)
434 tko_utils.dprint('Successfully export timeline report to '
435 'gcloud')
436 else:
437 tko_utils.dprint('DEBUG: skip exporting suite timeline to '
438 'gcloud, because either gcloud creds or '
439 'export_to_gcloud script is not found.')
Shuqian Zhao31425d52016-12-07 09:35:03 -0800440 except Exception as e:
Shuqian Zhao19e62fb2017-01-09 10:10:14 -0800441 tko_utils.dprint("WARNING: fail to dump/export suite report. "
442 "Error:\n%s" % e)
Shuqian Zhao31425d52016-12-07 09:35:03 -0800443
Dan Shi5f626332016-01-27 15:25:58 -0800444 # Mark GS_OFFLOADER_NO_OFFLOAD in gs_offloader_instructions at the end of
445 # the function, so any failure, e.g., db connection error, will stop
446 # gs_offloader_instructions being updated, and logs can be uploaded for
447 # troubleshooting.
448 if job_successful:
449 # Check if we should not offload this test's results.
450 if job_keyval.get(constants.JOB_OFFLOAD_FAILURES_KEY, False):
451 # Update the gs_offloader_instructions json file.
452 gs_instructions_file = os.path.join(
453 path, constants.GS_OFFLOADER_INSTRUCTIONS)
454 gs_offloader_instructions = {}
455 if os.path.exists(gs_instructions_file):
456 with open(gs_instructions_file, 'r') as f:
457 gs_offloader_instructions = json.load(f)
458
459 gs_offloader_instructions[constants.GS_OFFLOADER_NO_OFFLOAD] = True
460 with open(gs_instructions_file, 'w') as f:
461 json.dump(gs_offloader_instructions, f)
462
463
jamesren4826cc42010-06-15 20:33:22 +0000464def _site_export_dummy(binary_file_name):
465 pass
mbligh26b992b2008-02-19 15:46:21 +0000466
Dan Shi5f626332016-01-27 15:25:58 -0800467
jadmanski8e9c2572008-11-11 00:29:02 +0000468def _get_job_subdirs(path):
469 """
470 Returns a list of job subdirectories at path. Returns None if the test
471 is itself a job directory. Does not recurse into the subdirs.
472 """
473 # if there's a .machines file, use it to get the subdirs
jadmanski0afbb632008-06-06 21:10:57 +0000474 machine_list = os.path.join(path, ".machines")
475 if os.path.exists(machine_list):
jadmanski42fbd072009-01-30 15:07:05 +0000476 subdirs = set(line.strip() for line in file(machine_list))
477 existing_subdirs = set(subdir for subdir in subdirs
478 if os.path.exists(os.path.join(path, subdir)))
479 if len(existing_subdirs) != 0:
480 return existing_subdirs
jadmanski8e9c2572008-11-11 00:29:02 +0000481
482 # if this dir contains ONLY subdirectories, return them
483 contents = set(os.listdir(path))
484 contents.discard(".parse.lock")
485 subdirs = set(sub for sub in contents if
486 os.path.isdir(os.path.join(path, sub)))
487 if len(contents) == len(subdirs) != 0:
488 return subdirs
489
490 # this is a job directory, or something else we don't understand
491 return None
492
493
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700494def parse_leaf_path(db, path, level, parse_options):
Fang Deng49822682014-10-21 16:29:22 -0700495 """Parse a leaf path.
496
497 @param db: database handle.
498 @param path: The path to the results to be parsed.
499 @param level: Integer, level of subdirectories to include in the job name.
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700500 @param parse_options: _ParseOptions instance.
Fang Deng49822682014-10-21 16:29:22 -0700501
502 @returns: The job name of the parsed job, e.g. '123-chromeos-test/host1'
503 """
mbligha48eeb22009-03-11 16:44:43 +0000504 job_elements = path.split("/")[-level:]
505 jobname = "/".join(job_elements)
506 try:
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700507 db.run_with_retry(parse_one, db, jobname, path, parse_options)
Simran Basi8de306c2016-12-21 12:04:21 -0800508 except Exception as e:
509 tko_utils.dprint("Error parsing leaf path: %s\nException:\n%s\n%s" %
510 (path, e, traceback.format_exc()))
Fang Deng49822682014-10-21 16:29:22 -0700511 return jobname
mbligha48eeb22009-03-11 16:44:43 +0000512
513
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700514def parse_path(db, path, level, parse_options):
Fang Deng49822682014-10-21 16:29:22 -0700515 """Parse a path
516
517 @param db: database handle.
518 @param path: The path to the results to be parsed.
519 @param level: Integer, level of subdirectories to include in the job name.
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700520 @param parse_options: _ParseOptions instance.
Fang Deng49822682014-10-21 16:29:22 -0700521
522 @returns: A set of job names of the parsed jobs.
523 set(['123-chromeos-test/host1', '123-chromeos-test/host2'])
524 """
525 processed_jobs = set()
jadmanski8e9c2572008-11-11 00:29:02 +0000526 job_subdirs = _get_job_subdirs(path)
527 if job_subdirs is not None:
mbligha48eeb22009-03-11 16:44:43 +0000528 # parse status.log in current directory, if it exists. multi-machine
529 # synchronous server side tests record output in this directory. without
530 # this check, we do not parse these results.
531 if os.path.exists(os.path.join(path, 'status.log')):
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700532 new_job = parse_leaf_path(db, path, level, parse_options)
Fang Deng49822682014-10-21 16:29:22 -0700533 processed_jobs.add(new_job)
jadmanski0afbb632008-06-06 21:10:57 +0000534 # multi-machine job
jadmanski8e9c2572008-11-11 00:29:02 +0000535 for subdir in job_subdirs:
536 jobpath = os.path.join(path, subdir)
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700537 new_jobs = parse_path(db, jobpath, level + 1, parse_options)
Fang Deng49822682014-10-21 16:29:22 -0700538 processed_jobs.update(new_jobs)
jadmanski0afbb632008-06-06 21:10:57 +0000539 else:
540 # single machine job
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700541 new_job = parse_leaf_path(db, path, level, parse_options)
Fang Deng49822682014-10-21 16:29:22 -0700542 processed_jobs.add(new_job)
543 return processed_jobs
544
545
546def record_parsing(processed_jobs, duration_secs):
547 """Record the time spent on parsing to metadata db.
548
549 @param processed_jobs: A set of job names of the parsed jobs.
550 set(['123-chromeos-test/host1', '123-chromeos-test/host2'])
551 @param duration_secs: Total time spent on parsing, in seconds.
552 """
553
554 for job_name in processed_jobs:
555 job_id, hostname = tko_utils.get_afe_job_id_and_hostname(job_name)
556 if not job_id or not hostname:
557 tko_utils.dprint('ERROR: can not parse job name %s, '
558 'will not send duration to metadata db.'
559 % job_name)
560 continue
561 else:
562 job_overhead.record_state_duration(
563 job_id, hostname, job_overhead.STATUS.PARSING,
564 duration_secs)
mblighbb7b8912006-10-08 03:59:02 +0000565
566
mbligh96cf0512008-04-17 15:25:38 +0000567def main():
Fang Deng49822682014-10-21 16:29:22 -0700568 """Main entrance."""
569 start_time = datetime.datetime.now()
570 # Record the processed jobs so that
571 # we can send the duration of parsing to metadata db.
572 processed_jobs = set()
573
jadmanski0afbb632008-06-06 21:10:57 +0000574 options, args = parse_args()
Aviv Keshet0b7bab02016-10-20 17:17:36 -0700575 parse_options = _ParseOptions(options.reparse, options.mailit,
Shuqian Zhao19e62fb2017-01-09 10:10:14 -0800576 options.dry_run, options.suite_report,
577 options.datastore_creds,
578 options.export_to_gcloud_path)
jadmanski0afbb632008-06-06 21:10:57 +0000579 results_dir = os.path.abspath(args[0])
580 assert os.path.exists(results_dir)
mbligh96cf0512008-04-17 15:25:38 +0000581
jadmanskid5ab8c52008-12-03 16:27:07 +0000582 pid_file_manager = pidfile.PidFileManager("parser", results_dir)
mbligh96cf0512008-04-17 15:25:38 +0000583
jadmanskid5ab8c52008-12-03 16:27:07 +0000584 if options.write_pidfile:
585 pid_file_manager.open_file()
mbligh96cf0512008-04-17 15:25:38 +0000586
jadmanskid5ab8c52008-12-03 16:27:07 +0000587 try:
588 # build up the list of job dirs to parse
589 if options.singledir:
590 jobs_list = [results_dir]
591 else:
592 jobs_list = [os.path.join(results_dir, subdir)
593 for subdir in os.listdir(results_dir)]
594
595 # build up the database
596 db = tko_db.db(autocommit=False, host=options.db_host,
597 user=options.db_user, password=options.db_pass,
598 database=options.db_name)
599
600 # parse all the jobs
601 for path in jobs_list:
602 lockfile = open(os.path.join(path, ".parse.lock"), "w")
603 flags = fcntl.LOCK_EX
604 if options.noblock:
mblighdb18b0e2009-01-30 00:34:32 +0000605 flags |= fcntl.LOCK_NB
jadmanskid5ab8c52008-12-03 16:27:07 +0000606 try:
607 fcntl.flock(lockfile, flags)
608 except IOError, e:
mblighdb18b0e2009-01-30 00:34:32 +0000609 # lock is not available and nonblock has been requested
jadmanskid5ab8c52008-12-03 16:27:07 +0000610 if e.errno == errno.EWOULDBLOCK:
611 lockfile.close()
612 continue
613 else:
614 raise # something unexpected happened
615 try:
Aviv Keshet687d2dc2016-10-20 15:41:16 -0700616 new_jobs = parse_path(db, path, options.level, parse_options)
Fang Deng49822682014-10-21 16:29:22 -0700617 processed_jobs.update(new_jobs)
mbligh9e936402009-05-13 20:42:17 +0000618
jadmanskid5ab8c52008-12-03 16:27:07 +0000619 finally:
620 fcntl.flock(lockfile, fcntl.LOCK_UN)
jadmanski0afbb632008-06-06 21:10:57 +0000621 lockfile.close()
mblighe97e0e62009-05-21 01:41:58 +0000622
Dan Shib7a36ea2017-02-28 21:52:20 -0800623 except Exception as e:
jadmanskid5ab8c52008-12-03 16:27:07 +0000624 pid_file_manager.close_file(1)
Dan Shib7a36ea2017-02-28 21:52:20 -0800625
626 metadata = {'results_dir': results_dir,
627 'error': str(e),
628 'details': traceback.format_exc()}
629 autotest_es.post(use_http=True, type_str='parse_failure_final',
630 metadata=metadata)
631
jadmanskid5ab8c52008-12-03 16:27:07 +0000632 raise
633 else:
634 pid_file_manager.close_file(0)
Fang Deng49822682014-10-21 16:29:22 -0700635 duration_secs = (datetime.datetime.now() - start_time).total_seconds()
636 if options.record_duration:
637 record_parsing(processed_jobs, duration_secs)
mbligh71d340d2008-03-05 15:51:16 +0000638
mbligh532cb272007-11-26 18:54:20 +0000639
mbligh96cf0512008-04-17 15:25:38 +0000640if __name__ == "__main__":
jadmanski0afbb632008-06-06 21:10:57 +0000641 main()