mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 1 | #!/usr/bin/python -u |
mbligh | c251454 | 2008-02-19 15:54:26 +0000 | [diff] [blame] | 2 | |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 3 | import datetime |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 4 | import json |
mbligh | b33e53e | 2008-06-17 19:41:26 +0000 | [diff] [blame] | 5 | import os, sys, optparse, fcntl, errno, traceback, socket |
mbligh | bb7b891 | 2006-10-08 03:59:02 +0000 | [diff] [blame] | 6 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 7 | import common |
jadmanski | db4f9b5 | 2008-12-03 22:52:53 +0000 | [diff] [blame] | 8 | from autotest_lib.client.common_lib import mail, pidfile |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 9 | from autotest_lib.client.common_lib import utils |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 10 | from autotest_lib.frontend import setup_django_environment |
Fang Deng | 9ec6680 | 2014-04-28 19:04:33 +0000 | [diff] [blame] | 11 | from autotest_lib.frontend.tko import models as tko_models |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 12 | from autotest_lib.server.cros.dynamic_suite import constants |
| 13 | from autotest_lib.site_utils import job_overhead |
Dan Shi | 4c33b6a | 2016-08-18 16:11:31 -0700 | [diff] [blame^] | 14 | from autotest_lib.site_utils import sponge_utils |
Dennis Jeffrey | f9bef6c | 2013-08-05 11:01:27 -0700 | [diff] [blame] | 15 | from autotest_lib.tko import db as tko_db, utils as tko_utils |
| 16 | from autotest_lib.tko import models, status_lib |
| 17 | from autotest_lib.tko.perf_upload import perf_uploader |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 18 | |
| 19 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 20 | def parse_args(): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 21 | """Parse args.""" |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 22 | # build up our options parser and parse sys.argv |
| 23 | parser = optparse.OptionParser() |
| 24 | parser.add_option("-m", help="Send mail for FAILED tests", |
| 25 | dest="mailit", action="store_true") |
| 26 | parser.add_option("-r", help="Reparse the results of a job", |
| 27 | dest="reparse", action="store_true") |
| 28 | parser.add_option("-o", help="Parse a single results directory", |
| 29 | dest="singledir", action="store_true") |
| 30 | parser.add_option("-l", help=("Levels of subdirectories to include " |
| 31 | "in the job name"), |
| 32 | type="int", dest="level", default=1) |
| 33 | parser.add_option("-n", help="No blocking on an existing parse", |
| 34 | dest="noblock", action="store_true") |
| 35 | parser.add_option("-s", help="Database server hostname", |
| 36 | dest="db_host", action="store") |
| 37 | parser.add_option("-u", help="Database username", dest="db_user", |
| 38 | action="store") |
| 39 | parser.add_option("-p", help="Database password", dest="db_pass", |
| 40 | action="store") |
| 41 | parser.add_option("-d", help="Database name", dest="db_name", |
| 42 | action="store") |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 43 | parser.add_option("--write-pidfile", |
| 44 | help="write pidfile (.parser_execute)", |
| 45 | dest="write_pidfile", action="store_true", |
| 46 | default=False) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 47 | parser.add_option("--record-duration", |
| 48 | help="Record timing to metadata db", |
| 49 | dest="record_duration", action="store_true", |
| 50 | default=False) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 51 | options, args = parser.parse_args() |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 52 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 53 | # we need a results directory |
| 54 | if len(args) == 0: |
| 55 | tko_utils.dprint("ERROR: at least one results directory must " |
| 56 | "be provided") |
| 57 | parser.print_help() |
| 58 | sys.exit(1) |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 59 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 60 | # pass the options back |
| 61 | return options, args |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 62 | |
| 63 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 64 | def format_failure_message(jobname, kernel, testname, status, reason): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 65 | """Format failure message with the given information. |
| 66 | |
| 67 | @param jobname: String representing the job name. |
| 68 | @param kernel: String representing the kernel. |
| 69 | @param testname: String representing the test name. |
| 70 | @param status: String representing the test status. |
| 71 | @param reason: String representing the reason. |
| 72 | |
| 73 | @return: Failure message as a string. |
| 74 | """ |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 75 | format_string = "%-12s %-20s %-12s %-10s %s" |
| 76 | return format_string % (jobname, kernel, testname, status, reason) |
mbligh | b85e6b0 | 2006-10-08 17:20:56 +0000 | [diff] [blame] | 77 | |
mbligh | bb7b891 | 2006-10-08 03:59:02 +0000 | [diff] [blame] | 78 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 79 | def mailfailure(jobname, job, message): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 80 | """Send an email about the failure. |
| 81 | |
| 82 | @param jobname: String representing the job name. |
| 83 | @param job: A job object. |
| 84 | @param message: The message to mail. |
| 85 | """ |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 86 | message_lines = [""] |
| 87 | message_lines.append("The following tests FAILED for this job") |
| 88 | message_lines.append("http://%s/results/%s" % |
| 89 | (socket.gethostname(), jobname)) |
| 90 | message_lines.append("") |
| 91 | message_lines.append(format_failure_message("Job name", "Kernel", |
| 92 | "Test name", "FAIL/WARN", |
| 93 | "Failure reason")) |
| 94 | message_lines.append(format_failure_message("=" * 8, "=" * 6, "=" * 8, |
| 95 | "=" * 8, "=" * 14)) |
| 96 | message_header = "\n".join(message_lines) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 97 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 98 | subject = "AUTOTEST: FAILED tests from job %s" % jobname |
| 99 | mail.send("", job.user, "", subject, message_header + message) |
mbligh | 006f230 | 2007-09-13 20:46:46 +0000 | [diff] [blame] | 100 | |
| 101 | |
Fang Deng | 9ec6680 | 2014-04-28 19:04:33 +0000 | [diff] [blame] | 102 | def _invalidate_original_tests(orig_job_idx, retry_job_idx): |
| 103 | """Retry tests invalidates original tests. |
| 104 | |
| 105 | Whenever a retry job is complete, we want to invalidate the original |
| 106 | job's test results, such that the consumers of the tko database |
| 107 | (e.g. tko frontend, wmatrix) could figure out which results are the latest. |
| 108 | |
| 109 | When a retry job is parsed, we retrieve the original job's afe_job_id |
| 110 | from the retry job's keyvals, which is then converted to tko job_idx and |
| 111 | passed into this method as |orig_job_idx|. |
| 112 | |
| 113 | In this method, we are going to invalidate the rows in tko_tests that are |
| 114 | associated with the original job by flipping their 'invalid' bit to True. |
| 115 | In addition, in tko_tests, we also maintain a pointer from the retry results |
| 116 | to the original results, so that later we can always know which rows in |
| 117 | tko_tests are retries and which are the corresponding original results. |
| 118 | This is done by setting the field 'invalidates_test_idx' of the tests |
| 119 | associated with the retry job. |
| 120 | |
| 121 | For example, assume Job(job_idx=105) are retried by Job(job_idx=108), after |
| 122 | this method is run, their tko_tests rows will look like: |
| 123 | __________________________________________________________________________ |
| 124 | test_idx| job_idx | test | ... | invalid | invalidates_test_idx |
| 125 | 10 | 105 | dummy_Fail.Error| ... | 1 | NULL |
| 126 | 11 | 105 | dummy_Fail.Fail | ... | 1 | NULL |
| 127 | ... |
| 128 | 20 | 108 | dummy_Fail.Error| ... | 0 | 10 |
| 129 | 21 | 108 | dummy_Fail.Fail | ... | 0 | 11 |
| 130 | __________________________________________________________________________ |
| 131 | Note the invalid bits of the rows for Job(job_idx=105) are set to '1'. |
| 132 | And the 'invalidates_test_idx' fields of the rows for Job(job_idx=108) |
| 133 | are set to 10 and 11 (the test_idx of the rows for the original job). |
| 134 | |
| 135 | @param orig_job_idx: An integer representing the original job's |
| 136 | tko job_idx. Tests associated with this job will |
| 137 | be marked as 'invalid'. |
| 138 | @param retry_job_idx: An integer representing the retry job's |
| 139 | tko job_idx. The field 'invalidates_test_idx' |
| 140 | of the tests associated with this job will be updated. |
| 141 | |
| 142 | """ |
| 143 | msg = 'orig_job_idx: %s, retry_job_idx: %s' % (orig_job_idx, retry_job_idx) |
| 144 | if not orig_job_idx or not retry_job_idx: |
| 145 | tko_utils.dprint('ERROR: Could not invalidate tests: ' + msg) |
| 146 | # Using django models here makes things easier, but make sure that |
| 147 | # before this method is called, all other relevant transactions have been |
| 148 | # committed to avoid race condition. In the long run, we might consider |
| 149 | # to make the rest of parser use django models. |
| 150 | orig_tests = tko_models.Test.objects.filter(job__job_idx=orig_job_idx) |
| 151 | retry_tests = tko_models.Test.objects.filter(job__job_idx=retry_job_idx) |
| 152 | |
| 153 | # Invalidate original tests. |
| 154 | orig_tests.update(invalid=True) |
| 155 | |
| 156 | # Maintain a dictionary that maps (test, subdir) to original tests. |
| 157 | # Note that within the scope of a job, (test, subdir) uniquelly |
| 158 | # identifies a test run, but 'test' does not. |
| 159 | # In a control file, one could run the same test with different |
| 160 | # 'subdir_tag', for example, |
| 161 | # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_1') |
| 162 | # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_2') |
| 163 | # In tko, we will get |
| 164 | # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_1') |
| 165 | # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_2') |
| 166 | invalidated_tests = {(orig_test.test, orig_test.subdir): orig_test |
| 167 | for orig_test in orig_tests} |
| 168 | for retry in retry_tests: |
| 169 | # It is possible that (retry.test, retry.subdir) doesn't exist |
| 170 | # in invalidated_tests. This could happen when the original job |
| 171 | # didn't run some of its tests. For example, a dut goes offline |
| 172 | # since the beginning of the job, in which case invalidated_tests |
| 173 | # will only have one entry for 'SERVER_JOB'. |
| 174 | orig_test = invalidated_tests.get((retry.test, retry.subdir), None) |
| 175 | if orig_test: |
| 176 | retry.invalidates_test = orig_test |
| 177 | retry.save() |
| 178 | tko_utils.dprint('DEBUG: Invalidated tests associated to job: ' + msg) |
| 179 | |
| 180 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 181 | def parse_one(db, jobname, path, reparse, mail_on_failure): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 182 | """Parse a single job. Optionally send email on failure. |
| 183 | |
| 184 | @param db: database object. |
| 185 | @param jobname: the tag used to search for existing job in db, |
| 186 | e.g. '1234-chromeos-test/host1' |
| 187 | @param path: The path to the results to be parsed. |
| 188 | @param reparse: True/False, whether this is reparsing of the job. |
| 189 | @param mail_on_failure: whether to send email on FAILED test. |
| 190 | |
| 191 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 192 | """ |
| 193 | tko_utils.dprint("\nScanning %s (%s)" % (jobname, path)) |
jadmanski | 9b6babf | 2009-04-21 17:57:40 +0000 | [diff] [blame] | 194 | old_job_idx = db.find_job(jobname) |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 195 | # old tests is a dict from tuple (test_name, subdir) to test_idx |
| 196 | old_tests = {} |
| 197 | if old_job_idx is not None: |
| 198 | if not reparse: |
| 199 | tko_utils.dprint("! Job is already parsed, done") |
| 200 | return |
| 201 | |
showard | eab66ce | 2009-12-23 00:03:56 +0000 | [diff] [blame] | 202 | raw_old_tests = db.select("test_idx,subdir,test", "tko_tests", |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 203 | {"job_idx": old_job_idx}) |
| 204 | if raw_old_tests: |
| 205 | old_tests = dict(((test, subdir), test_idx) |
| 206 | for test_idx, subdir, test in raw_old_tests) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 207 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 208 | # look up the status version |
jadmanski | db4f9b5 | 2008-12-03 22:52:53 +0000 | [diff] [blame] | 209 | job_keyval = models.job.read_keyval(path) |
| 210 | status_version = job_keyval.get("status_version", 0) |
jadmanski | 6e8bf75 | 2008-05-14 00:17:48 +0000 | [diff] [blame] | 211 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 212 | # parse out the job |
| 213 | parser = status_lib.parser(status_version) |
| 214 | job = parser.make_job(path) |
| 215 | status_log = os.path.join(path, "status.log") |
| 216 | if not os.path.exists(status_log): |
| 217 | status_log = os.path.join(path, "status") |
| 218 | if not os.path.exists(status_log): |
| 219 | tko_utils.dprint("! Unable to parse job, no status file") |
| 220 | return |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 221 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 222 | # parse the status logs |
| 223 | tko_utils.dprint("+ Parsing dir=%s, jobname=%s" % (path, jobname)) |
| 224 | status_lines = open(status_log).readlines() |
| 225 | parser.start(job) |
| 226 | tests = parser.end(status_lines) |
jadmanski | 9b6babf | 2009-04-21 17:57:40 +0000 | [diff] [blame] | 227 | |
| 228 | # parser.end can return the same object multiple times, so filter out dups |
| 229 | job.tests = [] |
| 230 | already_added = set() |
| 231 | for test in tests: |
| 232 | if test not in already_added: |
| 233 | already_added.add(test) |
| 234 | job.tests.append(test) |
| 235 | |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 236 | # try and port test_idx over from the old tests, but if old tests stop |
jadmanski | 9b6babf | 2009-04-21 17:57:40 +0000 | [diff] [blame] | 237 | # matching up with new ones just give up |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 238 | if reparse and old_job_idx is not None: |
| 239 | job.index = old_job_idx |
| 240 | for test in job.tests: |
| 241 | test_idx = old_tests.pop((test.testname, test.subdir), None) |
| 242 | if test_idx is not None: |
| 243 | test.test_idx = test_idx |
| 244 | else: |
| 245 | tko_utils.dprint("! Reparse returned new test " |
| 246 | "testname=%r subdir=%r" % |
| 247 | (test.testname, test.subdir)) |
| 248 | for test_idx in old_tests.itervalues(): |
| 249 | where = {'test_idx' : test_idx} |
jamesren | e660ed8 | 2010-08-05 19:57:46 +0000 | [diff] [blame] | 250 | db.delete('tko_iteration_result', where) |
Dennis Jeffrey | 368c54b | 2013-07-24 11:19:03 -0700 | [diff] [blame] | 251 | db.delete('tko_iteration_perf_value', where) |
jamesren | e660ed8 | 2010-08-05 19:57:46 +0000 | [diff] [blame] | 252 | db.delete('tko_iteration_attributes', where) |
| 253 | db.delete('tko_test_attributes', where) |
| 254 | db.delete('tko_test_labels_tests', {'test_id': test_idx}) |
| 255 | db.delete('tko_tests', where) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 256 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 257 | # check for failures |
| 258 | message_lines = [""] |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 259 | job_successful = True |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 260 | for test in job.tests: |
| 261 | if not test.subdir: |
| 262 | continue |
| 263 | tko_utils.dprint("* testname, status, reason: %s %s %s" |
| 264 | % (test.subdir, test.status, test.reason)) |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 265 | if test.status != 'GOOD': |
| 266 | job_successful = False |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 267 | message_lines.append(format_failure_message( |
| 268 | jobname, test.kernel.base, test.subdir, |
| 269 | test.status, test.reason)) |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 270 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 271 | message = "\n".join(message_lines) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 272 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 273 | # send out a email report of failure |
| 274 | if len(message) > 2 and mail_on_failure: |
| 275 | tko_utils.dprint("Sending email report of failure on %s to %s" |
| 276 | % (jobname, job.user)) |
| 277 | mailfailure(jobname, job, message) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 278 | |
Fang Deng | 9ec6680 | 2014-04-28 19:04:33 +0000 | [diff] [blame] | 279 | # write the job into the database. |
Dan Shi | 70647ca | 2015-07-16 22:52:35 -0700 | [diff] [blame] | 280 | db.insert_job(jobname, job, |
| 281 | parent_job_id=job_keyval.get(constants.PARENT_JOB_ID, None)) |
jamesren | 7a52204 | 2010-06-10 22:53:55 +0000 | [diff] [blame] | 282 | |
Dennis Jeffrey | f9bef6c | 2013-08-05 11:01:27 -0700 | [diff] [blame] | 283 | # Upload perf values to the perf dashboard, if applicable. |
| 284 | for test in job.tests: |
Keith Haddow | 7a5a7bd | 2016-02-05 20:24:12 -0800 | [diff] [blame] | 285 | perf_uploader.upload_test(job, test, jobname) |
Dennis Jeffrey | f9bef6c | 2013-08-05 11:01:27 -0700 | [diff] [blame] | 286 | |
Fang Deng | 9ec6680 | 2014-04-28 19:04:33 +0000 | [diff] [blame] | 287 | # Although the cursor has autocommit, we still need to force it to commit |
| 288 | # existing changes before we can use django models, otherwise it |
| 289 | # will go into deadlock when django models try to start a new trasaction |
| 290 | # while the current one has not finished yet. |
| 291 | db.commit() |
| 292 | |
| 293 | # Handle retry job. |
| 294 | orig_afe_job_id = job_keyval.get(constants.RETRY_ORIGINAL_JOB_ID, None) |
| 295 | if orig_afe_job_id: |
| 296 | orig_job_idx = tko_models.Job.objects.get( |
| 297 | afe_job_id=orig_afe_job_id).job_idx |
| 298 | _invalidate_original_tests(orig_job_idx, job.index) |
| 299 | |
jamesren | 7a52204 | 2010-06-10 22:53:55 +0000 | [diff] [blame] | 300 | # Serializing job into a binary file |
| 301 | try: |
| 302 | from autotest_lib.tko import tko_pb2 |
| 303 | from autotest_lib.tko import job_serializer |
| 304 | |
| 305 | serializer = job_serializer.JobSerializer() |
jamesren | 4826cc4 | 2010-06-15 20:33:22 +0000 | [diff] [blame] | 306 | binary_file_name = os.path.join(path, "job.serialize") |
| 307 | serializer.serialize_to_binary(job, jobname, binary_file_name) |
| 308 | |
| 309 | if reparse: |
| 310 | site_export_file = "autotest_lib.tko.site_export" |
| 311 | site_export = utils.import_site_function(__file__, |
| 312 | site_export_file, |
| 313 | "site_export", |
| 314 | _site_export_dummy) |
| 315 | site_export(binary_file_name) |
| 316 | |
jamesren | 7a52204 | 2010-06-10 22:53:55 +0000 | [diff] [blame] | 317 | except ImportError: |
| 318 | tko_utils.dprint("DEBUG: tko_pb2.py doesn't exist. Create by " |
| 319 | "compiling tko/tko.proto.") |
| 320 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 321 | db.commit() |
mbligh | 26b992b | 2008-02-19 15:46:21 +0000 | [diff] [blame] | 322 | |
Dan Shi | 4c33b6a | 2016-08-18 16:11:31 -0700 | [diff] [blame^] | 323 | # Upload job details to Sponge. |
| 324 | sponge_utils.upload_results(job) |
| 325 | |
Dan Shi | 5f62633 | 2016-01-27 15:25:58 -0800 | [diff] [blame] | 326 | # Mark GS_OFFLOADER_NO_OFFLOAD in gs_offloader_instructions at the end of |
| 327 | # the function, so any failure, e.g., db connection error, will stop |
| 328 | # gs_offloader_instructions being updated, and logs can be uploaded for |
| 329 | # troubleshooting. |
| 330 | if job_successful: |
| 331 | # Check if we should not offload this test's results. |
| 332 | if job_keyval.get(constants.JOB_OFFLOAD_FAILURES_KEY, False): |
| 333 | # Update the gs_offloader_instructions json file. |
| 334 | gs_instructions_file = os.path.join( |
| 335 | path, constants.GS_OFFLOADER_INSTRUCTIONS) |
| 336 | gs_offloader_instructions = {} |
| 337 | if os.path.exists(gs_instructions_file): |
| 338 | with open(gs_instructions_file, 'r') as f: |
| 339 | gs_offloader_instructions = json.load(f) |
| 340 | |
| 341 | gs_offloader_instructions[constants.GS_OFFLOADER_NO_OFFLOAD] = True |
| 342 | with open(gs_instructions_file, 'w') as f: |
| 343 | json.dump(gs_offloader_instructions, f) |
| 344 | |
| 345 | |
jamesren | 4826cc4 | 2010-06-15 20:33:22 +0000 | [diff] [blame] | 346 | def _site_export_dummy(binary_file_name): |
| 347 | pass |
mbligh | 26b992b | 2008-02-19 15:46:21 +0000 | [diff] [blame] | 348 | |
Dan Shi | 5f62633 | 2016-01-27 15:25:58 -0800 | [diff] [blame] | 349 | |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 350 | def _get_job_subdirs(path): |
| 351 | """ |
| 352 | Returns a list of job subdirectories at path. Returns None if the test |
| 353 | is itself a job directory. Does not recurse into the subdirs. |
| 354 | """ |
| 355 | # if there's a .machines file, use it to get the subdirs |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 356 | machine_list = os.path.join(path, ".machines") |
| 357 | if os.path.exists(machine_list): |
jadmanski | 42fbd07 | 2009-01-30 15:07:05 +0000 | [diff] [blame] | 358 | subdirs = set(line.strip() for line in file(machine_list)) |
| 359 | existing_subdirs = set(subdir for subdir in subdirs |
| 360 | if os.path.exists(os.path.join(path, subdir))) |
| 361 | if len(existing_subdirs) != 0: |
| 362 | return existing_subdirs |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 363 | |
| 364 | # if this dir contains ONLY subdirectories, return them |
| 365 | contents = set(os.listdir(path)) |
| 366 | contents.discard(".parse.lock") |
| 367 | subdirs = set(sub for sub in contents if |
| 368 | os.path.isdir(os.path.join(path, sub))) |
| 369 | if len(contents) == len(subdirs) != 0: |
| 370 | return subdirs |
| 371 | |
| 372 | # this is a job directory, or something else we don't understand |
| 373 | return None |
| 374 | |
| 375 | |
mbligh | a48eeb2 | 2009-03-11 16:44:43 +0000 | [diff] [blame] | 376 | def parse_leaf_path(db, path, level, reparse, mail_on_failure): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 377 | """Parse a leaf path. |
| 378 | |
| 379 | @param db: database handle. |
| 380 | @param path: The path to the results to be parsed. |
| 381 | @param level: Integer, level of subdirectories to include in the job name. |
| 382 | @param reparse: True/False, whether this is reparsing of the job. |
| 383 | @param mail_on_failure: whether to send email on FAILED test. |
| 384 | |
| 385 | @returns: The job name of the parsed job, e.g. '123-chromeos-test/host1' |
| 386 | """ |
mbligh | a48eeb2 | 2009-03-11 16:44:43 +0000 | [diff] [blame] | 387 | job_elements = path.split("/")[-level:] |
| 388 | jobname = "/".join(job_elements) |
| 389 | try: |
| 390 | db.run_with_retry(parse_one, db, jobname, path, reparse, |
| 391 | mail_on_failure) |
| 392 | except Exception: |
| 393 | traceback.print_exc() |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 394 | return jobname |
mbligh | a48eeb2 | 2009-03-11 16:44:43 +0000 | [diff] [blame] | 395 | |
| 396 | |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 397 | def parse_path(db, path, level, reparse, mail_on_failure): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 398 | """Parse a path |
| 399 | |
| 400 | @param db: database handle. |
| 401 | @param path: The path to the results to be parsed. |
| 402 | @param level: Integer, level of subdirectories to include in the job name. |
| 403 | @param reparse: True/False, whether this is reparsing of the job. |
| 404 | @param mail_on_failure: whether to send email on FAILED test. |
| 405 | |
| 406 | @returns: A set of job names of the parsed jobs. |
| 407 | set(['123-chromeos-test/host1', '123-chromeos-test/host2']) |
| 408 | """ |
| 409 | processed_jobs = set() |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 410 | job_subdirs = _get_job_subdirs(path) |
| 411 | if job_subdirs is not None: |
mbligh | a48eeb2 | 2009-03-11 16:44:43 +0000 | [diff] [blame] | 412 | # parse status.log in current directory, if it exists. multi-machine |
| 413 | # synchronous server side tests record output in this directory. without |
| 414 | # this check, we do not parse these results. |
| 415 | if os.path.exists(os.path.join(path, 'status.log')): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 416 | new_job = parse_leaf_path(db, path, level, reparse, mail_on_failure) |
| 417 | processed_jobs.add(new_job) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 418 | # multi-machine job |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 419 | for subdir in job_subdirs: |
| 420 | jobpath = os.path.join(path, subdir) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 421 | new_jobs = parse_path(db, jobpath, level + 1, reparse, mail_on_failure) |
| 422 | processed_jobs.update(new_jobs) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 423 | else: |
| 424 | # single machine job |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 425 | new_job = parse_leaf_path(db, path, level, reparse, mail_on_failure) |
| 426 | processed_jobs.add(new_job) |
| 427 | return processed_jobs |
| 428 | |
| 429 | |
| 430 | def record_parsing(processed_jobs, duration_secs): |
| 431 | """Record the time spent on parsing to metadata db. |
| 432 | |
| 433 | @param processed_jobs: A set of job names of the parsed jobs. |
| 434 | set(['123-chromeos-test/host1', '123-chromeos-test/host2']) |
| 435 | @param duration_secs: Total time spent on parsing, in seconds. |
| 436 | """ |
| 437 | |
| 438 | for job_name in processed_jobs: |
| 439 | job_id, hostname = tko_utils.get_afe_job_id_and_hostname(job_name) |
| 440 | if not job_id or not hostname: |
| 441 | tko_utils.dprint('ERROR: can not parse job name %s, ' |
| 442 | 'will not send duration to metadata db.' |
| 443 | % job_name) |
| 444 | continue |
| 445 | else: |
| 446 | job_overhead.record_state_duration( |
| 447 | job_id, hostname, job_overhead.STATUS.PARSING, |
| 448 | duration_secs) |
mbligh | bb7b891 | 2006-10-08 03:59:02 +0000 | [diff] [blame] | 449 | |
| 450 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 451 | def main(): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 452 | """Main entrance.""" |
| 453 | start_time = datetime.datetime.now() |
| 454 | # Record the processed jobs so that |
| 455 | # we can send the duration of parsing to metadata db. |
| 456 | processed_jobs = set() |
| 457 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 458 | options, args = parse_args() |
| 459 | results_dir = os.path.abspath(args[0]) |
| 460 | assert os.path.exists(results_dir) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 461 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 462 | pid_file_manager = pidfile.PidFileManager("parser", results_dir) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 463 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 464 | if options.write_pidfile: |
| 465 | pid_file_manager.open_file() |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 466 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 467 | try: |
| 468 | # build up the list of job dirs to parse |
| 469 | if options.singledir: |
| 470 | jobs_list = [results_dir] |
| 471 | else: |
| 472 | jobs_list = [os.path.join(results_dir, subdir) |
| 473 | for subdir in os.listdir(results_dir)] |
| 474 | |
| 475 | # build up the database |
| 476 | db = tko_db.db(autocommit=False, host=options.db_host, |
| 477 | user=options.db_user, password=options.db_pass, |
| 478 | database=options.db_name) |
| 479 | |
| 480 | # parse all the jobs |
| 481 | for path in jobs_list: |
| 482 | lockfile = open(os.path.join(path, ".parse.lock"), "w") |
| 483 | flags = fcntl.LOCK_EX |
| 484 | if options.noblock: |
mbligh | db18b0e | 2009-01-30 00:34:32 +0000 | [diff] [blame] | 485 | flags |= fcntl.LOCK_NB |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 486 | try: |
| 487 | fcntl.flock(lockfile, flags) |
| 488 | except IOError, e: |
mbligh | db18b0e | 2009-01-30 00:34:32 +0000 | [diff] [blame] | 489 | # lock is not available and nonblock has been requested |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 490 | if e.errno == errno.EWOULDBLOCK: |
| 491 | lockfile.close() |
| 492 | continue |
| 493 | else: |
| 494 | raise # something unexpected happened |
| 495 | try: |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 496 | new_jobs = parse_path(db, path, options.level, options.reparse, |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 497 | options.mailit) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 498 | processed_jobs.update(new_jobs) |
mbligh | 9e93640 | 2009-05-13 20:42:17 +0000 | [diff] [blame] | 499 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 500 | finally: |
| 501 | fcntl.flock(lockfile, fcntl.LOCK_UN) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 502 | lockfile.close() |
mbligh | e97e0e6 | 2009-05-21 01:41:58 +0000 | [diff] [blame] | 503 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 504 | except: |
| 505 | pid_file_manager.close_file(1) |
| 506 | raise |
| 507 | else: |
| 508 | pid_file_manager.close_file(0) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 509 | duration_secs = (datetime.datetime.now() - start_time).total_seconds() |
| 510 | if options.record_duration: |
| 511 | record_parsing(processed_jobs, duration_secs) |
mbligh | 71d340d | 2008-03-05 15:51:16 +0000 | [diff] [blame] | 512 | |
mbligh | 532cb27 | 2007-11-26 18:54:20 +0000 | [diff] [blame] | 513 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 514 | if __name__ == "__main__": |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 515 | main() |