blob: ada7cfaebc186d18fea890abd7f078724d16b8aa [file] [log] [blame]
mbligh96cf0512008-04-17 15:25:38 +00001#!/usr/bin/python -u
mblighc2514542008-02-19 15:54:26 +00002
Fang Deng49822682014-10-21 16:29:22 -07003import datetime
Simran Basi1e10e922015-04-16 15:09:56 -07004import json
mblighb33e53e2008-06-17 19:41:26 +00005import os, sys, optparse, fcntl, errno, traceback, socket
mblighbb7b8912006-10-08 03:59:02 +00006
mbligh96cf0512008-04-17 15:25:38 +00007import common
jadmanskidb4f9b52008-12-03 22:52:53 +00008from autotest_lib.client.common_lib import mail, pidfile
Fang Deng49822682014-10-21 16:29:22 -07009from autotest_lib.client.common_lib import utils
Fang Deng49822682014-10-21 16:29:22 -070010from autotest_lib.frontend import setup_django_environment
Fang Deng9ec66802014-04-28 19:04:33 +000011from autotest_lib.frontend.tko import models as tko_models
Fang Deng49822682014-10-21 16:29:22 -070012from autotest_lib.server.cros.dynamic_suite import constants
13from autotest_lib.site_utils import job_overhead
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -070014from autotest_lib.tko import db as tko_db, utils as tko_utils
15from autotest_lib.tko import models, status_lib
16from autotest_lib.tko.perf_upload import perf_uploader
mbligh74fc0462007-11-05 20:24:17 +000017
18
mbligh96cf0512008-04-17 15:25:38 +000019def parse_args():
Fang Deng49822682014-10-21 16:29:22 -070020 """Parse args."""
jadmanski0afbb632008-06-06 21:10:57 +000021 # build up our options parser and parse sys.argv
22 parser = optparse.OptionParser()
23 parser.add_option("-m", help="Send mail for FAILED tests",
24 dest="mailit", action="store_true")
25 parser.add_option("-r", help="Reparse the results of a job",
26 dest="reparse", action="store_true")
27 parser.add_option("-o", help="Parse a single results directory",
28 dest="singledir", action="store_true")
29 parser.add_option("-l", help=("Levels of subdirectories to include "
30 "in the job name"),
31 type="int", dest="level", default=1)
32 parser.add_option("-n", help="No blocking on an existing parse",
33 dest="noblock", action="store_true")
34 parser.add_option("-s", help="Database server hostname",
35 dest="db_host", action="store")
36 parser.add_option("-u", help="Database username", dest="db_user",
37 action="store")
38 parser.add_option("-p", help="Database password", dest="db_pass",
39 action="store")
40 parser.add_option("-d", help="Database name", dest="db_name",
41 action="store")
jadmanskid5ab8c52008-12-03 16:27:07 +000042 parser.add_option("--write-pidfile",
43 help="write pidfile (.parser_execute)",
44 dest="write_pidfile", action="store_true",
45 default=False)
Fang Deng49822682014-10-21 16:29:22 -070046 parser.add_option("--record-duration",
47 help="Record timing to metadata db",
48 dest="record_duration", action="store_true",
49 default=False)
jadmanski0afbb632008-06-06 21:10:57 +000050 options, args = parser.parse_args()
mbligh74fc0462007-11-05 20:24:17 +000051
jadmanski0afbb632008-06-06 21:10:57 +000052 # we need a results directory
53 if len(args) == 0:
54 tko_utils.dprint("ERROR: at least one results directory must "
55 "be provided")
56 parser.print_help()
57 sys.exit(1)
mbligh74fc0462007-11-05 20:24:17 +000058
jadmanski0afbb632008-06-06 21:10:57 +000059 # pass the options back
60 return options, args
mbligh74fc0462007-11-05 20:24:17 +000061
62
mbligh96cf0512008-04-17 15:25:38 +000063def format_failure_message(jobname, kernel, testname, status, reason):
Fang Deng49822682014-10-21 16:29:22 -070064 """Format failure message with the given information.
65
66 @param jobname: String representing the job name.
67 @param kernel: String representing the kernel.
68 @param testname: String representing the test name.
69 @param status: String representing the test status.
70 @param reason: String representing the reason.
71
72 @return: Failure message as a string.
73 """
jadmanski0afbb632008-06-06 21:10:57 +000074 format_string = "%-12s %-20s %-12s %-10s %s"
75 return format_string % (jobname, kernel, testname, status, reason)
mblighb85e6b02006-10-08 17:20:56 +000076
mblighbb7b8912006-10-08 03:59:02 +000077
mbligh96cf0512008-04-17 15:25:38 +000078def mailfailure(jobname, job, message):
Fang Deng49822682014-10-21 16:29:22 -070079 """Send an email about the failure.
80
81 @param jobname: String representing the job name.
82 @param job: A job object.
83 @param message: The message to mail.
84 """
jadmanski0afbb632008-06-06 21:10:57 +000085 message_lines = [""]
86 message_lines.append("The following tests FAILED for this job")
87 message_lines.append("http://%s/results/%s" %
88 (socket.gethostname(), jobname))
89 message_lines.append("")
90 message_lines.append(format_failure_message("Job name", "Kernel",
91 "Test name", "FAIL/WARN",
92 "Failure reason"))
93 message_lines.append(format_failure_message("=" * 8, "=" * 6, "=" * 8,
94 "=" * 8, "=" * 14))
95 message_header = "\n".join(message_lines)
mbligh96cf0512008-04-17 15:25:38 +000096
jadmanski0afbb632008-06-06 21:10:57 +000097 subject = "AUTOTEST: FAILED tests from job %s" % jobname
98 mail.send("", job.user, "", subject, message_header + message)
mbligh006f2302007-09-13 20:46:46 +000099
100
Fang Deng9ec66802014-04-28 19:04:33 +0000101def _invalidate_original_tests(orig_job_idx, retry_job_idx):
102 """Retry tests invalidates original tests.
103
104 Whenever a retry job is complete, we want to invalidate the original
105 job's test results, such that the consumers of the tko database
106 (e.g. tko frontend, wmatrix) could figure out which results are the latest.
107
108 When a retry job is parsed, we retrieve the original job's afe_job_id
109 from the retry job's keyvals, which is then converted to tko job_idx and
110 passed into this method as |orig_job_idx|.
111
112 In this method, we are going to invalidate the rows in tko_tests that are
113 associated with the original job by flipping their 'invalid' bit to True.
114 In addition, in tko_tests, we also maintain a pointer from the retry results
115 to the original results, so that later we can always know which rows in
116 tko_tests are retries and which are the corresponding original results.
117 This is done by setting the field 'invalidates_test_idx' of the tests
118 associated with the retry job.
119
120 For example, assume Job(job_idx=105) are retried by Job(job_idx=108), after
121 this method is run, their tko_tests rows will look like:
122 __________________________________________________________________________
123 test_idx| job_idx | test | ... | invalid | invalidates_test_idx
124 10 | 105 | dummy_Fail.Error| ... | 1 | NULL
125 11 | 105 | dummy_Fail.Fail | ... | 1 | NULL
126 ...
127 20 | 108 | dummy_Fail.Error| ... | 0 | 10
128 21 | 108 | dummy_Fail.Fail | ... | 0 | 11
129 __________________________________________________________________________
130 Note the invalid bits of the rows for Job(job_idx=105) are set to '1'.
131 And the 'invalidates_test_idx' fields of the rows for Job(job_idx=108)
132 are set to 10 and 11 (the test_idx of the rows for the original job).
133
134 @param orig_job_idx: An integer representing the original job's
135 tko job_idx. Tests associated with this job will
136 be marked as 'invalid'.
137 @param retry_job_idx: An integer representing the retry job's
138 tko job_idx. The field 'invalidates_test_idx'
139 of the tests associated with this job will be updated.
140
141 """
142 msg = 'orig_job_idx: %s, retry_job_idx: %s' % (orig_job_idx, retry_job_idx)
143 if not orig_job_idx or not retry_job_idx:
144 tko_utils.dprint('ERROR: Could not invalidate tests: ' + msg)
145 # Using django models here makes things easier, but make sure that
146 # before this method is called, all other relevant transactions have been
147 # committed to avoid race condition. In the long run, we might consider
148 # to make the rest of parser use django models.
149 orig_tests = tko_models.Test.objects.filter(job__job_idx=orig_job_idx)
150 retry_tests = tko_models.Test.objects.filter(job__job_idx=retry_job_idx)
151
152 # Invalidate original tests.
153 orig_tests.update(invalid=True)
154
155 # Maintain a dictionary that maps (test, subdir) to original tests.
156 # Note that within the scope of a job, (test, subdir) uniquelly
157 # identifies a test run, but 'test' does not.
158 # In a control file, one could run the same test with different
159 # 'subdir_tag', for example,
160 # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_1')
161 # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_2')
162 # In tko, we will get
163 # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_1')
164 # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_2')
165 invalidated_tests = {(orig_test.test, orig_test.subdir): orig_test
166 for orig_test in orig_tests}
167 for retry in retry_tests:
168 # It is possible that (retry.test, retry.subdir) doesn't exist
169 # in invalidated_tests. This could happen when the original job
170 # didn't run some of its tests. For example, a dut goes offline
171 # since the beginning of the job, in which case invalidated_tests
172 # will only have one entry for 'SERVER_JOB'.
173 orig_test = invalidated_tests.get((retry.test, retry.subdir), None)
174 if orig_test:
175 retry.invalidates_test = orig_test
176 retry.save()
177 tko_utils.dprint('DEBUG: Invalidated tests associated to job: ' + msg)
178
179
mbligh96cf0512008-04-17 15:25:38 +0000180def parse_one(db, jobname, path, reparse, mail_on_failure):
Fang Deng49822682014-10-21 16:29:22 -0700181 """Parse a single job. Optionally send email on failure.
182
183 @param db: database object.
184 @param jobname: the tag used to search for existing job in db,
185 e.g. '1234-chromeos-test/host1'
186 @param path: The path to the results to be parsed.
187 @param reparse: True/False, whether this is reparsing of the job.
188 @param mail_on_failure: whether to send email on FAILED test.
189
190
jadmanski0afbb632008-06-06 21:10:57 +0000191 """
192 tko_utils.dprint("\nScanning %s (%s)" % (jobname, path))
jadmanski9b6babf2009-04-21 17:57:40 +0000193 old_job_idx = db.find_job(jobname)
showard0fec8a02009-12-04 01:19:54 +0000194 # old tests is a dict from tuple (test_name, subdir) to test_idx
195 old_tests = {}
196 if old_job_idx is not None:
197 if not reparse:
198 tko_utils.dprint("! Job is already parsed, done")
199 return
200
showardeab66ce2009-12-23 00:03:56 +0000201 raw_old_tests = db.select("test_idx,subdir,test", "tko_tests",
showard0fec8a02009-12-04 01:19:54 +0000202 {"job_idx": old_job_idx})
203 if raw_old_tests:
204 old_tests = dict(((test, subdir), test_idx)
205 for test_idx, subdir, test in raw_old_tests)
mbligh96cf0512008-04-17 15:25:38 +0000206
jadmanski0afbb632008-06-06 21:10:57 +0000207 # look up the status version
jadmanskidb4f9b52008-12-03 22:52:53 +0000208 job_keyval = models.job.read_keyval(path)
209 status_version = job_keyval.get("status_version", 0)
jadmanski6e8bf752008-05-14 00:17:48 +0000210
jadmanski0afbb632008-06-06 21:10:57 +0000211 # parse out the job
212 parser = status_lib.parser(status_version)
213 job = parser.make_job(path)
214 status_log = os.path.join(path, "status.log")
215 if not os.path.exists(status_log):
216 status_log = os.path.join(path, "status")
217 if not os.path.exists(status_log):
218 tko_utils.dprint("! Unable to parse job, no status file")
219 return
mbligh96cf0512008-04-17 15:25:38 +0000220
jadmanski0afbb632008-06-06 21:10:57 +0000221 # parse the status logs
222 tko_utils.dprint("+ Parsing dir=%s, jobname=%s" % (path, jobname))
223 status_lines = open(status_log).readlines()
224 parser.start(job)
225 tests = parser.end(status_lines)
jadmanski9b6babf2009-04-21 17:57:40 +0000226
227 # parser.end can return the same object multiple times, so filter out dups
228 job.tests = []
229 already_added = set()
230 for test in tests:
231 if test not in already_added:
232 already_added.add(test)
233 job.tests.append(test)
234
showard0fec8a02009-12-04 01:19:54 +0000235 # try and port test_idx over from the old tests, but if old tests stop
jadmanski9b6babf2009-04-21 17:57:40 +0000236 # matching up with new ones just give up
showard0fec8a02009-12-04 01:19:54 +0000237 if reparse and old_job_idx is not None:
238 job.index = old_job_idx
239 for test in job.tests:
240 test_idx = old_tests.pop((test.testname, test.subdir), None)
241 if test_idx is not None:
242 test.test_idx = test_idx
243 else:
244 tko_utils.dprint("! Reparse returned new test "
245 "testname=%r subdir=%r" %
246 (test.testname, test.subdir))
247 for test_idx in old_tests.itervalues():
248 where = {'test_idx' : test_idx}
jamesrene660ed82010-08-05 19:57:46 +0000249 db.delete('tko_iteration_result', where)
Dennis Jeffrey368c54b2013-07-24 11:19:03 -0700250 db.delete('tko_iteration_perf_value', where)
jamesrene660ed82010-08-05 19:57:46 +0000251 db.delete('tko_iteration_attributes', where)
252 db.delete('tko_test_attributes', where)
253 db.delete('tko_test_labels_tests', {'test_id': test_idx})
254 db.delete('tko_tests', where)
mbligh96cf0512008-04-17 15:25:38 +0000255
jadmanski0afbb632008-06-06 21:10:57 +0000256 # check for failures
257 message_lines = [""]
Simran Basi1e10e922015-04-16 15:09:56 -0700258 job_successful = True
jadmanski0afbb632008-06-06 21:10:57 +0000259 for test in job.tests:
260 if not test.subdir:
261 continue
262 tko_utils.dprint("* testname, status, reason: %s %s %s"
263 % (test.subdir, test.status, test.reason))
Simran Basi1e10e922015-04-16 15:09:56 -0700264 if test.status != 'GOOD':
265 job_successful = False
jadmanski0afbb632008-06-06 21:10:57 +0000266 message_lines.append(format_failure_message(
267 jobname, test.kernel.base, test.subdir,
268 test.status, test.reason))
Simran Basi1e10e922015-04-16 15:09:56 -0700269 if job_successful:
270 # Check if we should not offload this test's results.
271 if job_keyval.get(constants.JOB_OFFLOAD_FAILURES_KEY, False):
272 # Update the gs_offloader_instructions json file.
273 gs_instructions_file = os.path.join(
274 path, constants.GS_OFFLOADER_INSTRUCTIONS)
275 gs_offloader_instructions = {}
276 if os.path.exists(gs_instructions_file):
277 with open(gs_instructions_file, 'r') as f:
278 gs_offloader_instructions = json.load(f)
279
280 gs_offloader_instructions[constants.GS_OFFLOADER_NO_OFFLOAD] = True
281 with open(gs_instructions_file, 'w') as f:
282 json.dump(gs_offloader_instructions, f)
283
284
jadmanski0afbb632008-06-06 21:10:57 +0000285 message = "\n".join(message_lines)
mbligh96cf0512008-04-17 15:25:38 +0000286
jadmanski0afbb632008-06-06 21:10:57 +0000287 # send out a email report of failure
288 if len(message) > 2 and mail_on_failure:
289 tko_utils.dprint("Sending email report of failure on %s to %s"
290 % (jobname, job.user))
291 mailfailure(jobname, job, message)
mbligh96cf0512008-04-17 15:25:38 +0000292
Fang Deng9ec66802014-04-28 19:04:33 +0000293 # write the job into the database.
Dan Shi70647ca2015-07-16 22:52:35 -0700294 db.insert_job(jobname, job,
295 parent_job_id=job_keyval.get(constants.PARENT_JOB_ID, None))
jamesren7a522042010-06-10 22:53:55 +0000296
Dennis Jeffreyf9bef6c2013-08-05 11:01:27 -0700297 # Upload perf values to the perf dashboard, if applicable.
298 for test in job.tests:
299 perf_uploader.upload_test(job, test)
300
Fang Deng9ec66802014-04-28 19:04:33 +0000301 # Although the cursor has autocommit, we still need to force it to commit
302 # existing changes before we can use django models, otherwise it
303 # will go into deadlock when django models try to start a new trasaction
304 # while the current one has not finished yet.
305 db.commit()
306
307 # Handle retry job.
308 orig_afe_job_id = job_keyval.get(constants.RETRY_ORIGINAL_JOB_ID, None)
309 if orig_afe_job_id:
310 orig_job_idx = tko_models.Job.objects.get(
311 afe_job_id=orig_afe_job_id).job_idx
312 _invalidate_original_tests(orig_job_idx, job.index)
313
jamesren7a522042010-06-10 22:53:55 +0000314 # Serializing job into a binary file
315 try:
316 from autotest_lib.tko import tko_pb2
317 from autotest_lib.tko import job_serializer
318
319 serializer = job_serializer.JobSerializer()
jamesren4826cc42010-06-15 20:33:22 +0000320 binary_file_name = os.path.join(path, "job.serialize")
321 serializer.serialize_to_binary(job, jobname, binary_file_name)
322
323 if reparse:
324 site_export_file = "autotest_lib.tko.site_export"
325 site_export = utils.import_site_function(__file__,
326 site_export_file,
327 "site_export",
328 _site_export_dummy)
329 site_export(binary_file_name)
330
jamesren7a522042010-06-10 22:53:55 +0000331 except ImportError:
332 tko_utils.dprint("DEBUG: tko_pb2.py doesn't exist. Create by "
333 "compiling tko/tko.proto.")
334
jadmanski0afbb632008-06-06 21:10:57 +0000335 db.commit()
mbligh26b992b2008-02-19 15:46:21 +0000336
jamesren4826cc42010-06-15 20:33:22 +0000337def _site_export_dummy(binary_file_name):
338 pass
mbligh26b992b2008-02-19 15:46:21 +0000339
jadmanski8e9c2572008-11-11 00:29:02 +0000340def _get_job_subdirs(path):
341 """
342 Returns a list of job subdirectories at path. Returns None if the test
343 is itself a job directory. Does not recurse into the subdirs.
344 """
345 # if there's a .machines file, use it to get the subdirs
jadmanski0afbb632008-06-06 21:10:57 +0000346 machine_list = os.path.join(path, ".machines")
347 if os.path.exists(machine_list):
jadmanski42fbd072009-01-30 15:07:05 +0000348 subdirs = set(line.strip() for line in file(machine_list))
349 existing_subdirs = set(subdir for subdir in subdirs
350 if os.path.exists(os.path.join(path, subdir)))
351 if len(existing_subdirs) != 0:
352 return existing_subdirs
jadmanski8e9c2572008-11-11 00:29:02 +0000353
354 # if this dir contains ONLY subdirectories, return them
355 contents = set(os.listdir(path))
356 contents.discard(".parse.lock")
357 subdirs = set(sub for sub in contents if
358 os.path.isdir(os.path.join(path, sub)))
359 if len(contents) == len(subdirs) != 0:
360 return subdirs
361
362 # this is a job directory, or something else we don't understand
363 return None
364
365
mbligha48eeb22009-03-11 16:44:43 +0000366def parse_leaf_path(db, path, level, reparse, mail_on_failure):
Fang Deng49822682014-10-21 16:29:22 -0700367 """Parse a leaf path.
368
369 @param db: database handle.
370 @param path: The path to the results to be parsed.
371 @param level: Integer, level of subdirectories to include in the job name.
372 @param reparse: True/False, whether this is reparsing of the job.
373 @param mail_on_failure: whether to send email on FAILED test.
374
375 @returns: The job name of the parsed job, e.g. '123-chromeos-test/host1'
376 """
mbligha48eeb22009-03-11 16:44:43 +0000377 job_elements = path.split("/")[-level:]
378 jobname = "/".join(job_elements)
379 try:
380 db.run_with_retry(parse_one, db, jobname, path, reparse,
381 mail_on_failure)
382 except Exception:
383 traceback.print_exc()
Fang Deng49822682014-10-21 16:29:22 -0700384 return jobname
mbligha48eeb22009-03-11 16:44:43 +0000385
386
jadmanski8e9c2572008-11-11 00:29:02 +0000387def parse_path(db, path, level, reparse, mail_on_failure):
Fang Deng49822682014-10-21 16:29:22 -0700388 """Parse a path
389
390 @param db: database handle.
391 @param path: The path to the results to be parsed.
392 @param level: Integer, level of subdirectories to include in the job name.
393 @param reparse: True/False, whether this is reparsing of the job.
394 @param mail_on_failure: whether to send email on FAILED test.
395
396 @returns: A set of job names of the parsed jobs.
397 set(['123-chromeos-test/host1', '123-chromeos-test/host2'])
398 """
399 processed_jobs = set()
jadmanski8e9c2572008-11-11 00:29:02 +0000400 job_subdirs = _get_job_subdirs(path)
401 if job_subdirs is not None:
mbligha48eeb22009-03-11 16:44:43 +0000402 # parse status.log in current directory, if it exists. multi-machine
403 # synchronous server side tests record output in this directory. without
404 # this check, we do not parse these results.
405 if os.path.exists(os.path.join(path, 'status.log')):
Fang Deng49822682014-10-21 16:29:22 -0700406 new_job = parse_leaf_path(db, path, level, reparse, mail_on_failure)
407 processed_jobs.add(new_job)
jadmanski0afbb632008-06-06 21:10:57 +0000408 # multi-machine job
jadmanski8e9c2572008-11-11 00:29:02 +0000409 for subdir in job_subdirs:
410 jobpath = os.path.join(path, subdir)
Fang Deng49822682014-10-21 16:29:22 -0700411 new_jobs = parse_path(db, jobpath, level + 1, reparse, mail_on_failure)
412 processed_jobs.update(new_jobs)
jadmanski0afbb632008-06-06 21:10:57 +0000413 else:
414 # single machine job
Fang Deng49822682014-10-21 16:29:22 -0700415 new_job = parse_leaf_path(db, path, level, reparse, mail_on_failure)
416 processed_jobs.add(new_job)
417 return processed_jobs
418
419
420def record_parsing(processed_jobs, duration_secs):
421 """Record the time spent on parsing to metadata db.
422
423 @param processed_jobs: A set of job names of the parsed jobs.
424 set(['123-chromeos-test/host1', '123-chromeos-test/host2'])
425 @param duration_secs: Total time spent on parsing, in seconds.
426 """
427
428 for job_name in processed_jobs:
429 job_id, hostname = tko_utils.get_afe_job_id_and_hostname(job_name)
430 if not job_id or not hostname:
431 tko_utils.dprint('ERROR: can not parse job name %s, '
432 'will not send duration to metadata db.'
433 % job_name)
434 continue
435 else:
436 job_overhead.record_state_duration(
437 job_id, hostname, job_overhead.STATUS.PARSING,
438 duration_secs)
mblighbb7b8912006-10-08 03:59:02 +0000439
440
mbligh96cf0512008-04-17 15:25:38 +0000441def main():
Fang Deng49822682014-10-21 16:29:22 -0700442 """Main entrance."""
443 start_time = datetime.datetime.now()
444 # Record the processed jobs so that
445 # we can send the duration of parsing to metadata db.
446 processed_jobs = set()
447
jadmanski0afbb632008-06-06 21:10:57 +0000448 options, args = parse_args()
449 results_dir = os.path.abspath(args[0])
450 assert os.path.exists(results_dir)
mbligh96cf0512008-04-17 15:25:38 +0000451
jadmanskid5ab8c52008-12-03 16:27:07 +0000452 pid_file_manager = pidfile.PidFileManager("parser", results_dir)
mbligh96cf0512008-04-17 15:25:38 +0000453
jadmanskid5ab8c52008-12-03 16:27:07 +0000454 if options.write_pidfile:
455 pid_file_manager.open_file()
mbligh96cf0512008-04-17 15:25:38 +0000456
jadmanskid5ab8c52008-12-03 16:27:07 +0000457 try:
458 # build up the list of job dirs to parse
459 if options.singledir:
460 jobs_list = [results_dir]
461 else:
462 jobs_list = [os.path.join(results_dir, subdir)
463 for subdir in os.listdir(results_dir)]
464
465 # build up the database
466 db = tko_db.db(autocommit=False, host=options.db_host,
467 user=options.db_user, password=options.db_pass,
468 database=options.db_name)
469
470 # parse all the jobs
471 for path in jobs_list:
472 lockfile = open(os.path.join(path, ".parse.lock"), "w")
473 flags = fcntl.LOCK_EX
474 if options.noblock:
mblighdb18b0e2009-01-30 00:34:32 +0000475 flags |= fcntl.LOCK_NB
jadmanskid5ab8c52008-12-03 16:27:07 +0000476 try:
477 fcntl.flock(lockfile, flags)
478 except IOError, e:
mblighdb18b0e2009-01-30 00:34:32 +0000479 # lock is not available and nonblock has been requested
jadmanskid5ab8c52008-12-03 16:27:07 +0000480 if e.errno == errno.EWOULDBLOCK:
481 lockfile.close()
482 continue
483 else:
484 raise # something unexpected happened
485 try:
Fang Deng49822682014-10-21 16:29:22 -0700486 new_jobs = parse_path(db, path, options.level, options.reparse,
jadmanskid5ab8c52008-12-03 16:27:07 +0000487 options.mailit)
Fang Deng49822682014-10-21 16:29:22 -0700488 processed_jobs.update(new_jobs)
mbligh9e936402009-05-13 20:42:17 +0000489
jadmanskid5ab8c52008-12-03 16:27:07 +0000490 finally:
491 fcntl.flock(lockfile, fcntl.LOCK_UN)
jadmanski0afbb632008-06-06 21:10:57 +0000492 lockfile.close()
mblighe97e0e62009-05-21 01:41:58 +0000493
jadmanskid5ab8c52008-12-03 16:27:07 +0000494 except:
495 pid_file_manager.close_file(1)
496 raise
497 else:
498 pid_file_manager.close_file(0)
Fang Deng49822682014-10-21 16:29:22 -0700499 duration_secs = (datetime.datetime.now() - start_time).total_seconds()
500 if options.record_duration:
501 record_parsing(processed_jobs, duration_secs)
mbligh71d340d2008-03-05 15:51:16 +0000502
mbligh532cb272007-11-26 18:54:20 +0000503
mbligh96cf0512008-04-17 15:25:38 +0000504if __name__ == "__main__":
jadmanski0afbb632008-06-06 21:10:57 +0000505 main()