blob: f3cf2e43beb7e3e5cb4bcc7cbff173757e90fe12 [file] [log] [blame]
mbligh96cf0512008-04-17 15:25:38 +00001#!/usr/bin/python -u
mblighc2514542008-02-19 15:54:26 +00002
Fang Deng49822682014-10-21 16:29:22 -07003import datetime
Simran Basi1e10e922015-04-16 15:09:56 -07004import json
mblighb33e53e2008-06-17 19:41:26 +00005import os, sys, optparse, fcntl, errno, traceback, socket
mblighbb7b8912006-10-08 03:59:02 +00006
mbligh96cf0512008-04-17 15:25:38 +00007import common
jadmanskidb4f9b52008-12-03 22:52:53 +00008from autotest_lib.client.common_lib import mail, pidfile
Fang Deng49822682014-10-21 16:29:22 -07009from autotest_lib.client.common_lib import utils
Fang Deng49822682014-10-21 16:29:22 -070010from autotest_lib.frontend import setup_django_environment
Fang Deng9ec66802014-04-28 19:04:33 +000011from autotest_lib.frontend.tko import models as tko_models
Fang Deng49822682014-10-21 16:29:22 -070012from autotest_lib.server.cros.dynamic_suite import constants
13from autotest_lib.site_utils import job_overhead
Dan Shi4c33b6a2016-08-18 16:11:31 -070014from autotest_lib.site_utils import sponge_utils
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -070015from autotest_lib.tko import db as tko_db, utils as tko_utils
16from autotest_lib.tko import models, status_lib
17from autotest_lib.tko.perf_upload import perf_uploader
mbligh74fc0462007-11-05 20:24:17 +000018
19
mbligh96cf0512008-04-17 15:25:38 +000020def parse_args():
Fang Deng49822682014-10-21 16:29:22 -070021 """Parse args."""
jadmanski0afbb632008-06-06 21:10:57 +000022 # build up our options parser and parse sys.argv
23 parser = optparse.OptionParser()
24 parser.add_option("-m", help="Send mail for FAILED tests",
25 dest="mailit", action="store_true")
26 parser.add_option("-r", help="Reparse the results of a job",
27 dest="reparse", action="store_true")
28 parser.add_option("-o", help="Parse a single results directory",
29 dest="singledir", action="store_true")
30 parser.add_option("-l", help=("Levels of subdirectories to include "
31 "in the job name"),
32 type="int", dest="level", default=1)
33 parser.add_option("-n", help="No blocking on an existing parse",
34 dest="noblock", action="store_true")
35 parser.add_option("-s", help="Database server hostname",
36 dest="db_host", action="store")
37 parser.add_option("-u", help="Database username", dest="db_user",
38 action="store")
39 parser.add_option("-p", help="Database password", dest="db_pass",
40 action="store")
41 parser.add_option("-d", help="Database name", dest="db_name",
42 action="store")
jadmanskid5ab8c52008-12-03 16:27:07 +000043 parser.add_option("--write-pidfile",
44 help="write pidfile (.parser_execute)",
45 dest="write_pidfile", action="store_true",
46 default=False)
Fang Deng49822682014-10-21 16:29:22 -070047 parser.add_option("--record-duration",
48 help="Record timing to metadata db",
49 dest="record_duration", action="store_true",
50 default=False)
jadmanski0afbb632008-06-06 21:10:57 +000051 options, args = parser.parse_args()
mbligh74fc0462007-11-05 20:24:17 +000052
jadmanski0afbb632008-06-06 21:10:57 +000053 # we need a results directory
54 if len(args) == 0:
55 tko_utils.dprint("ERROR: at least one results directory must "
56 "be provided")
57 parser.print_help()
58 sys.exit(1)
mbligh74fc0462007-11-05 20:24:17 +000059
jadmanski0afbb632008-06-06 21:10:57 +000060 # pass the options back
61 return options, args
mbligh74fc0462007-11-05 20:24:17 +000062
63
mbligh96cf0512008-04-17 15:25:38 +000064def format_failure_message(jobname, kernel, testname, status, reason):
Fang Deng49822682014-10-21 16:29:22 -070065 """Format failure message with the given information.
66
67 @param jobname: String representing the job name.
68 @param kernel: String representing the kernel.
69 @param testname: String representing the test name.
70 @param status: String representing the test status.
71 @param reason: String representing the reason.
72
73 @return: Failure message as a string.
74 """
jadmanski0afbb632008-06-06 21:10:57 +000075 format_string = "%-12s %-20s %-12s %-10s %s"
76 return format_string % (jobname, kernel, testname, status, reason)
mblighb85e6b02006-10-08 17:20:56 +000077
mblighbb7b8912006-10-08 03:59:02 +000078
mbligh96cf0512008-04-17 15:25:38 +000079def mailfailure(jobname, job, message):
Fang Deng49822682014-10-21 16:29:22 -070080 """Send an email about the failure.
81
82 @param jobname: String representing the job name.
83 @param job: A job object.
84 @param message: The message to mail.
85 """
jadmanski0afbb632008-06-06 21:10:57 +000086 message_lines = [""]
87 message_lines.append("The following tests FAILED for this job")
88 message_lines.append("http://%s/results/%s" %
89 (socket.gethostname(), jobname))
90 message_lines.append("")
91 message_lines.append(format_failure_message("Job name", "Kernel",
92 "Test name", "FAIL/WARN",
93 "Failure reason"))
94 message_lines.append(format_failure_message("=" * 8, "=" * 6, "=" * 8,
95 "=" * 8, "=" * 14))
96 message_header = "\n".join(message_lines)
mbligh96cf0512008-04-17 15:25:38 +000097
jadmanski0afbb632008-06-06 21:10:57 +000098 subject = "AUTOTEST: FAILED tests from job %s" % jobname
99 mail.send("", job.user, "", subject, message_header + message)
mbligh006f2302007-09-13 20:46:46 +0000100
101
Fang Deng9ec66802014-04-28 19:04:33 +0000102def _invalidate_original_tests(orig_job_idx, retry_job_idx):
103 """Retry tests invalidates original tests.
104
105 Whenever a retry job is complete, we want to invalidate the original
106 job's test results, such that the consumers of the tko database
107 (e.g. tko frontend, wmatrix) could figure out which results are the latest.
108
109 When a retry job is parsed, we retrieve the original job's afe_job_id
110 from the retry job's keyvals, which is then converted to tko job_idx and
111 passed into this method as |orig_job_idx|.
112
113 In this method, we are going to invalidate the rows in tko_tests that are
114 associated with the original job by flipping their 'invalid' bit to True.
115 In addition, in tko_tests, we also maintain a pointer from the retry results
116 to the original results, so that later we can always know which rows in
117 tko_tests are retries and which are the corresponding original results.
118 This is done by setting the field 'invalidates_test_idx' of the tests
119 associated with the retry job.
120
121 For example, assume Job(job_idx=105) are retried by Job(job_idx=108), after
122 this method is run, their tko_tests rows will look like:
123 __________________________________________________________________________
124 test_idx| job_idx | test | ... | invalid | invalidates_test_idx
125 10 | 105 | dummy_Fail.Error| ... | 1 | NULL
126 11 | 105 | dummy_Fail.Fail | ... | 1 | NULL
127 ...
128 20 | 108 | dummy_Fail.Error| ... | 0 | 10
129 21 | 108 | dummy_Fail.Fail | ... | 0 | 11
130 __________________________________________________________________________
131 Note the invalid bits of the rows for Job(job_idx=105) are set to '1'.
132 And the 'invalidates_test_idx' fields of the rows for Job(job_idx=108)
133 are set to 10 and 11 (the test_idx of the rows for the original job).
134
135 @param orig_job_idx: An integer representing the original job's
136 tko job_idx. Tests associated with this job will
137 be marked as 'invalid'.
138 @param retry_job_idx: An integer representing the retry job's
139 tko job_idx. The field 'invalidates_test_idx'
140 of the tests associated with this job will be updated.
141
142 """
143 msg = 'orig_job_idx: %s, retry_job_idx: %s' % (orig_job_idx, retry_job_idx)
144 if not orig_job_idx or not retry_job_idx:
145 tko_utils.dprint('ERROR: Could not invalidate tests: ' + msg)
146 # Using django models here makes things easier, but make sure that
147 # before this method is called, all other relevant transactions have been
148 # committed to avoid race condition. In the long run, we might consider
149 # to make the rest of parser use django models.
150 orig_tests = tko_models.Test.objects.filter(job__job_idx=orig_job_idx)
151 retry_tests = tko_models.Test.objects.filter(job__job_idx=retry_job_idx)
152
153 # Invalidate original tests.
154 orig_tests.update(invalid=True)
155
156 # Maintain a dictionary that maps (test, subdir) to original tests.
157 # Note that within the scope of a job, (test, subdir) uniquelly
158 # identifies a test run, but 'test' does not.
159 # In a control file, one could run the same test with different
160 # 'subdir_tag', for example,
161 # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_1')
162 # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_2')
163 # In tko, we will get
164 # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_1')
165 # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_2')
166 invalidated_tests = {(orig_test.test, orig_test.subdir): orig_test
167 for orig_test in orig_tests}
168 for retry in retry_tests:
169 # It is possible that (retry.test, retry.subdir) doesn't exist
170 # in invalidated_tests. This could happen when the original job
171 # didn't run some of its tests. For example, a dut goes offline
172 # since the beginning of the job, in which case invalidated_tests
173 # will only have one entry for 'SERVER_JOB'.
174 orig_test = invalidated_tests.get((retry.test, retry.subdir), None)
175 if orig_test:
176 retry.invalidates_test = orig_test
177 retry.save()
178 tko_utils.dprint('DEBUG: Invalidated tests associated to job: ' + msg)
179
180
mbligh96cf0512008-04-17 15:25:38 +0000181def parse_one(db, jobname, path, reparse, mail_on_failure):
Fang Deng49822682014-10-21 16:29:22 -0700182 """Parse a single job. Optionally send email on failure.
183
184 @param db: database object.
185 @param jobname: the tag used to search for existing job in db,
186 e.g. '1234-chromeos-test/host1'
187 @param path: The path to the results to be parsed.
188 @param reparse: True/False, whether this is reparsing of the job.
189 @param mail_on_failure: whether to send email on FAILED test.
190
191
jadmanski0afbb632008-06-06 21:10:57 +0000192 """
193 tko_utils.dprint("\nScanning %s (%s)" % (jobname, path))
jadmanski9b6babf2009-04-21 17:57:40 +0000194 old_job_idx = db.find_job(jobname)
showard0fec8a02009-12-04 01:19:54 +0000195 # old tests is a dict from tuple (test_name, subdir) to test_idx
196 old_tests = {}
197 if old_job_idx is not None:
198 if not reparse:
199 tko_utils.dprint("! Job is already parsed, done")
200 return
201
showardeab66ce2009-12-23 00:03:56 +0000202 raw_old_tests = db.select("test_idx,subdir,test", "tko_tests",
showard0fec8a02009-12-04 01:19:54 +0000203 {"job_idx": old_job_idx})
204 if raw_old_tests:
205 old_tests = dict(((test, subdir), test_idx)
206 for test_idx, subdir, test in raw_old_tests)
mbligh96cf0512008-04-17 15:25:38 +0000207
jadmanski0afbb632008-06-06 21:10:57 +0000208 # look up the status version
jadmanskidb4f9b52008-12-03 22:52:53 +0000209 job_keyval = models.job.read_keyval(path)
210 status_version = job_keyval.get("status_version", 0)
jadmanski6e8bf752008-05-14 00:17:48 +0000211
jadmanski0afbb632008-06-06 21:10:57 +0000212 # parse out the job
213 parser = status_lib.parser(status_version)
214 job = parser.make_job(path)
215 status_log = os.path.join(path, "status.log")
216 if not os.path.exists(status_log):
217 status_log = os.path.join(path, "status")
218 if not os.path.exists(status_log):
219 tko_utils.dprint("! Unable to parse job, no status file")
220 return
mbligh96cf0512008-04-17 15:25:38 +0000221
jadmanski0afbb632008-06-06 21:10:57 +0000222 # parse the status logs
223 tko_utils.dprint("+ Parsing dir=%s, jobname=%s" % (path, jobname))
224 status_lines = open(status_log).readlines()
225 parser.start(job)
226 tests = parser.end(status_lines)
jadmanski9b6babf2009-04-21 17:57:40 +0000227
228 # parser.end can return the same object multiple times, so filter out dups
229 job.tests = []
230 already_added = set()
231 for test in tests:
232 if test not in already_added:
233 already_added.add(test)
234 job.tests.append(test)
235
showard0fec8a02009-12-04 01:19:54 +0000236 # try and port test_idx over from the old tests, but if old tests stop
jadmanski9b6babf2009-04-21 17:57:40 +0000237 # matching up with new ones just give up
showard0fec8a02009-12-04 01:19:54 +0000238 if reparse and old_job_idx is not None:
239 job.index = old_job_idx
240 for test in job.tests:
241 test_idx = old_tests.pop((test.testname, test.subdir), None)
242 if test_idx is not None:
243 test.test_idx = test_idx
244 else:
245 tko_utils.dprint("! Reparse returned new test "
246 "testname=%r subdir=%r" %
247 (test.testname, test.subdir))
248 for test_idx in old_tests.itervalues():
249 where = {'test_idx' : test_idx}
jamesrene660ed82010-08-05 19:57:46 +0000250 db.delete('tko_iteration_result', where)
Dennis Jeffrey368c54b2013-07-24 11:19:03 -0700251 db.delete('tko_iteration_perf_value', where)
jamesrene660ed82010-08-05 19:57:46 +0000252 db.delete('tko_iteration_attributes', where)
253 db.delete('tko_test_attributes', where)
254 db.delete('tko_test_labels_tests', {'test_id': test_idx})
255 db.delete('tko_tests', where)
mbligh96cf0512008-04-17 15:25:38 +0000256
jadmanski0afbb632008-06-06 21:10:57 +0000257 # check for failures
258 message_lines = [""]
Simran Basi1e10e922015-04-16 15:09:56 -0700259 job_successful = True
jadmanski0afbb632008-06-06 21:10:57 +0000260 for test in job.tests:
261 if not test.subdir:
262 continue
263 tko_utils.dprint("* testname, status, reason: %s %s %s"
264 % (test.subdir, test.status, test.reason))
Simran Basi1e10e922015-04-16 15:09:56 -0700265 if test.status != 'GOOD':
266 job_successful = False
jadmanski0afbb632008-06-06 21:10:57 +0000267 message_lines.append(format_failure_message(
268 jobname, test.kernel.base, test.subdir,
269 test.status, test.reason))
Simran Basi1e10e922015-04-16 15:09:56 -0700270
jadmanski0afbb632008-06-06 21:10:57 +0000271 message = "\n".join(message_lines)
mbligh96cf0512008-04-17 15:25:38 +0000272
jadmanski0afbb632008-06-06 21:10:57 +0000273 # send out a email report of failure
274 if len(message) > 2 and mail_on_failure:
275 tko_utils.dprint("Sending email report of failure on %s to %s"
276 % (jobname, job.user))
277 mailfailure(jobname, job, message)
mbligh96cf0512008-04-17 15:25:38 +0000278
Fang Deng9ec66802014-04-28 19:04:33 +0000279 # write the job into the database.
Dan Shi70647ca2015-07-16 22:52:35 -0700280 db.insert_job(jobname, job,
281 parent_job_id=job_keyval.get(constants.PARENT_JOB_ID, None))
jamesren7a522042010-06-10 22:53:55 +0000282
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -0700283 # Upload perf values to the perf dashboard, if applicable.
284 for test in job.tests:
Keith Haddow7a5a7bd2016-02-05 20:24:12 -0800285 perf_uploader.upload_test(job, test, jobname)
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -0700286
Fang Deng9ec66802014-04-28 19:04:33 +0000287 # Although the cursor has autocommit, we still need to force it to commit
288 # existing changes before we can use django models, otherwise it
289 # will go into deadlock when django models try to start a new trasaction
290 # while the current one has not finished yet.
291 db.commit()
292
293 # Handle retry job.
294 orig_afe_job_id = job_keyval.get(constants.RETRY_ORIGINAL_JOB_ID, None)
295 if orig_afe_job_id:
296 orig_job_idx = tko_models.Job.objects.get(
297 afe_job_id=orig_afe_job_id).job_idx
298 _invalidate_original_tests(orig_job_idx, job.index)
299
jamesren7a522042010-06-10 22:53:55 +0000300 # Serializing job into a binary file
301 try:
302 from autotest_lib.tko import tko_pb2
303 from autotest_lib.tko import job_serializer
304
305 serializer = job_serializer.JobSerializer()
jamesren4826cc42010-06-15 20:33:22 +0000306 binary_file_name = os.path.join(path, "job.serialize")
307 serializer.serialize_to_binary(job, jobname, binary_file_name)
308
309 if reparse:
310 site_export_file = "autotest_lib.tko.site_export"
311 site_export = utils.import_site_function(__file__,
312 site_export_file,
313 "site_export",
314 _site_export_dummy)
315 site_export(binary_file_name)
316
jamesren7a522042010-06-10 22:53:55 +0000317 except ImportError:
318 tko_utils.dprint("DEBUG: tko_pb2.py doesn't exist. Create by "
319 "compiling tko/tko.proto.")
320
jadmanski0afbb632008-06-06 21:10:57 +0000321 db.commit()
mbligh26b992b2008-02-19 15:46:21 +0000322
Dan Shi4c33b6a2016-08-18 16:11:31 -0700323 # Upload job details to Sponge.
324 sponge_utils.upload_results(job)
325
Dan Shi5f626332016-01-27 15:25:58 -0800326 # Mark GS_OFFLOADER_NO_OFFLOAD in gs_offloader_instructions at the end of
327 # the function, so any failure, e.g., db connection error, will stop
328 # gs_offloader_instructions being updated, and logs can be uploaded for
329 # troubleshooting.
330 if job_successful:
331 # Check if we should not offload this test's results.
332 if job_keyval.get(constants.JOB_OFFLOAD_FAILURES_KEY, False):
333 # Update the gs_offloader_instructions json file.
334 gs_instructions_file = os.path.join(
335 path, constants.GS_OFFLOADER_INSTRUCTIONS)
336 gs_offloader_instructions = {}
337 if os.path.exists(gs_instructions_file):
338 with open(gs_instructions_file, 'r') as f:
339 gs_offloader_instructions = json.load(f)
340
341 gs_offloader_instructions[constants.GS_OFFLOADER_NO_OFFLOAD] = True
342 with open(gs_instructions_file, 'w') as f:
343 json.dump(gs_offloader_instructions, f)
344
345
jamesren4826cc42010-06-15 20:33:22 +0000346def _site_export_dummy(binary_file_name):
347 pass
mbligh26b992b2008-02-19 15:46:21 +0000348
Dan Shi5f626332016-01-27 15:25:58 -0800349
jadmanski8e9c2572008-11-11 00:29:02 +0000350def _get_job_subdirs(path):
351 """
352 Returns a list of job subdirectories at path. Returns None if the test
353 is itself a job directory. Does not recurse into the subdirs.
354 """
355 # if there's a .machines file, use it to get the subdirs
jadmanski0afbb632008-06-06 21:10:57 +0000356 machine_list = os.path.join(path, ".machines")
357 if os.path.exists(machine_list):
jadmanski42fbd072009-01-30 15:07:05 +0000358 subdirs = set(line.strip() for line in file(machine_list))
359 existing_subdirs = set(subdir for subdir in subdirs
360 if os.path.exists(os.path.join(path, subdir)))
361 if len(existing_subdirs) != 0:
362 return existing_subdirs
jadmanski8e9c2572008-11-11 00:29:02 +0000363
364 # if this dir contains ONLY subdirectories, return them
365 contents = set(os.listdir(path))
366 contents.discard(".parse.lock")
367 subdirs = set(sub for sub in contents if
368 os.path.isdir(os.path.join(path, sub)))
369 if len(contents) == len(subdirs) != 0:
370 return subdirs
371
372 # this is a job directory, or something else we don't understand
373 return None
374
375
mbligha48eeb22009-03-11 16:44:43 +0000376def parse_leaf_path(db, path, level, reparse, mail_on_failure):
Fang Deng49822682014-10-21 16:29:22 -0700377 """Parse a leaf path.
378
379 @param db: database handle.
380 @param path: The path to the results to be parsed.
381 @param level: Integer, level of subdirectories to include in the job name.
382 @param reparse: True/False, whether this is reparsing of the job.
383 @param mail_on_failure: whether to send email on FAILED test.
384
385 @returns: The job name of the parsed job, e.g. '123-chromeos-test/host1'
386 """
mbligha48eeb22009-03-11 16:44:43 +0000387 job_elements = path.split("/")[-level:]
388 jobname = "/".join(job_elements)
389 try:
390 db.run_with_retry(parse_one, db, jobname, path, reparse,
391 mail_on_failure)
392 except Exception:
393 traceback.print_exc()
Fang Deng49822682014-10-21 16:29:22 -0700394 return jobname
mbligha48eeb22009-03-11 16:44:43 +0000395
396
jadmanski8e9c2572008-11-11 00:29:02 +0000397def parse_path(db, path, level, reparse, mail_on_failure):
Fang Deng49822682014-10-21 16:29:22 -0700398 """Parse a path
399
400 @param db: database handle.
401 @param path: The path to the results to be parsed.
402 @param level: Integer, level of subdirectories to include in the job name.
403 @param reparse: True/False, whether this is reparsing of the job.
404 @param mail_on_failure: whether to send email on FAILED test.
405
406 @returns: A set of job names of the parsed jobs.
407 set(['123-chromeos-test/host1', '123-chromeos-test/host2'])
408 """
409 processed_jobs = set()
jadmanski8e9c2572008-11-11 00:29:02 +0000410 job_subdirs = _get_job_subdirs(path)
411 if job_subdirs is not None:
mbligha48eeb22009-03-11 16:44:43 +0000412 # parse status.log in current directory, if it exists. multi-machine
413 # synchronous server side tests record output in this directory. without
414 # this check, we do not parse these results.
415 if os.path.exists(os.path.join(path, 'status.log')):
Fang Deng49822682014-10-21 16:29:22 -0700416 new_job = parse_leaf_path(db, path, level, reparse, mail_on_failure)
417 processed_jobs.add(new_job)
jadmanski0afbb632008-06-06 21:10:57 +0000418 # multi-machine job
jadmanski8e9c2572008-11-11 00:29:02 +0000419 for subdir in job_subdirs:
420 jobpath = os.path.join(path, subdir)
Fang Deng49822682014-10-21 16:29:22 -0700421 new_jobs = parse_path(db, jobpath, level + 1, reparse, mail_on_failure)
422 processed_jobs.update(new_jobs)
jadmanski0afbb632008-06-06 21:10:57 +0000423 else:
424 # single machine job
Fang Deng49822682014-10-21 16:29:22 -0700425 new_job = parse_leaf_path(db, path, level, reparse, mail_on_failure)
426 processed_jobs.add(new_job)
427 return processed_jobs
428
429
430def record_parsing(processed_jobs, duration_secs):
431 """Record the time spent on parsing to metadata db.
432
433 @param processed_jobs: A set of job names of the parsed jobs.
434 set(['123-chromeos-test/host1', '123-chromeos-test/host2'])
435 @param duration_secs: Total time spent on parsing, in seconds.
436 """
437
438 for job_name in processed_jobs:
439 job_id, hostname = tko_utils.get_afe_job_id_and_hostname(job_name)
440 if not job_id or not hostname:
441 tko_utils.dprint('ERROR: can not parse job name %s, '
442 'will not send duration to metadata db.'
443 % job_name)
444 continue
445 else:
446 job_overhead.record_state_duration(
447 job_id, hostname, job_overhead.STATUS.PARSING,
448 duration_secs)
mblighbb7b8912006-10-08 03:59:02 +0000449
450
mbligh96cf0512008-04-17 15:25:38 +0000451def main():
Fang Deng49822682014-10-21 16:29:22 -0700452 """Main entrance."""
453 start_time = datetime.datetime.now()
454 # Record the processed jobs so that
455 # we can send the duration of parsing to metadata db.
456 processed_jobs = set()
457
jadmanski0afbb632008-06-06 21:10:57 +0000458 options, args = parse_args()
459 results_dir = os.path.abspath(args[0])
460 assert os.path.exists(results_dir)
mbligh96cf0512008-04-17 15:25:38 +0000461
jadmanskid5ab8c52008-12-03 16:27:07 +0000462 pid_file_manager = pidfile.PidFileManager("parser", results_dir)
mbligh96cf0512008-04-17 15:25:38 +0000463
jadmanskid5ab8c52008-12-03 16:27:07 +0000464 if options.write_pidfile:
465 pid_file_manager.open_file()
mbligh96cf0512008-04-17 15:25:38 +0000466
jadmanskid5ab8c52008-12-03 16:27:07 +0000467 try:
468 # build up the list of job dirs to parse
469 if options.singledir:
470 jobs_list = [results_dir]
471 else:
472 jobs_list = [os.path.join(results_dir, subdir)
473 for subdir in os.listdir(results_dir)]
474
475 # build up the database
476 db = tko_db.db(autocommit=False, host=options.db_host,
477 user=options.db_user, password=options.db_pass,
478 database=options.db_name)
479
480 # parse all the jobs
481 for path in jobs_list:
482 lockfile = open(os.path.join(path, ".parse.lock"), "w")
483 flags = fcntl.LOCK_EX
484 if options.noblock:
mblighdb18b0e2009-01-30 00:34:32 +0000485 flags |= fcntl.LOCK_NB
jadmanskid5ab8c52008-12-03 16:27:07 +0000486 try:
487 fcntl.flock(lockfile, flags)
488 except IOError, e:
mblighdb18b0e2009-01-30 00:34:32 +0000489 # lock is not available and nonblock has been requested
jadmanskid5ab8c52008-12-03 16:27:07 +0000490 if e.errno == errno.EWOULDBLOCK:
491 lockfile.close()
492 continue
493 else:
494 raise # something unexpected happened
495 try:
Fang Deng49822682014-10-21 16:29:22 -0700496 new_jobs = parse_path(db, path, options.level, options.reparse,
jadmanskid5ab8c52008-12-03 16:27:07 +0000497 options.mailit)
Fang Deng49822682014-10-21 16:29:22 -0700498 processed_jobs.update(new_jobs)
mbligh9e936402009-05-13 20:42:17 +0000499
jadmanskid5ab8c52008-12-03 16:27:07 +0000500 finally:
501 fcntl.flock(lockfile, fcntl.LOCK_UN)
jadmanski0afbb632008-06-06 21:10:57 +0000502 lockfile.close()
mblighe97e0e62009-05-21 01:41:58 +0000503
jadmanskid5ab8c52008-12-03 16:27:07 +0000504 except:
505 pid_file_manager.close_file(1)
506 raise
507 else:
508 pid_file_manager.close_file(0)
Fang Deng49822682014-10-21 16:29:22 -0700509 duration_secs = (datetime.datetime.now() - start_time).total_seconds()
510 if options.record_duration:
511 record_parsing(processed_jobs, duration_secs)
mbligh71d340d2008-03-05 15:51:16 +0000512
mbligh532cb272007-11-26 18:54:20 +0000513
mbligh96cf0512008-04-17 15:25:38 +0000514if __name__ == "__main__":
jadmanski0afbb632008-06-06 21:10:57 +0000515 main()