mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 1 | #!/usr/bin/python -u |
mbligh | c251454 | 2008-02-19 15:54:26 +0000 | [diff] [blame] | 2 | |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 3 | import collections |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 4 | import datetime |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 5 | import errno |
| 6 | import fcntl |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 7 | import json |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 8 | import optparse |
| 9 | import os |
| 10 | import socket |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 11 | import subprocess |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 12 | import sys |
| 13 | import traceback |
mbligh | bb7b891 | 2006-10-08 03:59:02 +0000 | [diff] [blame] | 14 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 15 | import common |
Benny Peake | feb775c | 2017-02-08 15:14:14 -0800 | [diff] [blame] | 16 | from autotest_lib.client.common_lib import global_config |
jadmanski | db4f9b5 | 2008-12-03 22:52:53 +0000 | [diff] [blame] | 17 | from autotest_lib.client.common_lib import mail, pidfile |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 18 | from autotest_lib.client.common_lib import utils |
Simran Basi | 59ca5ac | 2016-09-22 16:57:56 -0700 | [diff] [blame] | 19 | from autotest_lib.client.common_lib.cros.graphite import autotest_es |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 20 | from autotest_lib.frontend import setup_django_environment |
Fang Deng | 9ec6680 | 2014-04-28 19:04:33 +0000 | [diff] [blame] | 21 | from autotest_lib.frontend.tko import models as tko_models |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 22 | from autotest_lib.server import site_utils |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 23 | from autotest_lib.server.cros.dynamic_suite import constants |
| 24 | from autotest_lib.site_utils import job_overhead |
Dan Shi | 4c33b6a | 2016-08-18 16:11:31 -0700 | [diff] [blame] | 25 | from autotest_lib.site_utils import sponge_utils |
Dennis Jeffrey | f9bef6c | 2013-08-05 11:01:27 -0700 | [diff] [blame] | 26 | from autotest_lib.tko import db as tko_db, utils as tko_utils |
Luigi Semenzato | e706481 | 2017-02-03 14:47:59 -0800 | [diff] [blame] | 27 | from autotest_lib.tko import models, parser_lib |
Dennis Jeffrey | f9bef6c | 2013-08-05 11:01:27 -0700 | [diff] [blame] | 28 | from autotest_lib.tko.perf_upload import perf_uploader |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 29 | |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 30 | _ParseOptions = collections.namedtuple( |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 31 | 'ParseOptions', ['reparse', 'mail_on_failure', 'dry_run', 'suite_report', |
| 32 | 'datastore_creds', 'export_to_gcloud_path']) |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 33 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 34 | def parse_args(): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 35 | """Parse args.""" |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 36 | # build up our options parser and parse sys.argv |
| 37 | parser = optparse.OptionParser() |
| 38 | parser.add_option("-m", help="Send mail for FAILED tests", |
| 39 | dest="mailit", action="store_true") |
| 40 | parser.add_option("-r", help="Reparse the results of a job", |
| 41 | dest="reparse", action="store_true") |
| 42 | parser.add_option("-o", help="Parse a single results directory", |
| 43 | dest="singledir", action="store_true") |
| 44 | parser.add_option("-l", help=("Levels of subdirectories to include " |
| 45 | "in the job name"), |
| 46 | type="int", dest="level", default=1) |
| 47 | parser.add_option("-n", help="No blocking on an existing parse", |
| 48 | dest="noblock", action="store_true") |
| 49 | parser.add_option("-s", help="Database server hostname", |
| 50 | dest="db_host", action="store") |
| 51 | parser.add_option("-u", help="Database username", dest="db_user", |
| 52 | action="store") |
| 53 | parser.add_option("-p", help="Database password", dest="db_pass", |
| 54 | action="store") |
| 55 | parser.add_option("-d", help="Database name", dest="db_name", |
| 56 | action="store") |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 57 | parser.add_option("--dry-run", help="Do not actually commit any results.", |
| 58 | dest="dry_run", action="store_true", default=False) |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 59 | parser.add_option("--write-pidfile", |
| 60 | help="write pidfile (.parser_execute)", |
| 61 | dest="write_pidfile", action="store_true", |
| 62 | default=False) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 63 | parser.add_option("--record-duration", |
| 64 | help="Record timing to metadata db", |
| 65 | dest="record_duration", action="store_true", |
| 66 | default=False) |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 67 | parser.add_option("--suite-report", |
| 68 | help=("Allows parsing job to attempt to create a suite " |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 69 | "timeline report, if it detects that the job being " |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 70 | "parsed is a suite job."), |
| 71 | dest="suite_report", action="store_true", |
| 72 | default=False) |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 73 | parser.add_option("--datastore-creds", |
| 74 | help=("The path to gcloud datastore credentials file, " |
| 75 | "which will be used to upload suite timeline " |
| 76 | "report to gcloud. If not specified, the one " |
| 77 | "defined in shadow_config will be used."), |
| 78 | dest="datastore_creds", action="store", default=None) |
| 79 | parser.add_option("--export-to-gcloud-path", |
| 80 | help=("The path to export_to_gcloud script. Please find " |
| 81 | "chromite path on your server. The script is under " |
| 82 | "chromite/bin/."), |
| 83 | dest="export_to_gcloud_path", action="store", |
| 84 | default=None) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 85 | options, args = parser.parse_args() |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 86 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 87 | # we need a results directory |
| 88 | if len(args) == 0: |
| 89 | tko_utils.dprint("ERROR: at least one results directory must " |
| 90 | "be provided") |
| 91 | parser.print_help() |
| 92 | sys.exit(1) |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 93 | |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 94 | if not options.datastore_creds: |
| 95 | gcloud_creds = global_config.global_config.get_config_value( |
| 96 | 'GCLOUD', 'cidb_datastore_writer_creds', default=None) |
| 97 | options.datastore_creds = (site_utils.get_creds_abspath(gcloud_creds) |
| 98 | if gcloud_creds else None) |
| 99 | |
| 100 | if not options.export_to_gcloud_path: |
| 101 | export_script = 'chromiumos/chromite/bin/export_to_gcloud' |
| 102 | # If it is a lab server, the script is under ~chromeos-test/ |
| 103 | if os.path.exists(os.path.expanduser('~chromeos-test/%s' % |
| 104 | export_script)): |
| 105 | path = os.path.expanduser('~chromeos-test/%s' % export_script) |
| 106 | # If it is a local workstation, it is probably under ~/ |
| 107 | elif os.path.exists(os.path.expanduser('~/%s' % export_script)): |
| 108 | path = os.path.expanduser('~/%s' % export_script) |
| 109 | # If it is not found anywhere, the default will be set to None. |
| 110 | else: |
| 111 | path = None |
| 112 | options.export_to_gcloud_path = path |
| 113 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 114 | # pass the options back |
| 115 | return options, args |
mbligh | 74fc046 | 2007-11-05 20:24:17 +0000 | [diff] [blame] | 116 | |
| 117 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 118 | def format_failure_message(jobname, kernel, testname, status, reason): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 119 | """Format failure message with the given information. |
| 120 | |
| 121 | @param jobname: String representing the job name. |
| 122 | @param kernel: String representing the kernel. |
| 123 | @param testname: String representing the test name. |
| 124 | @param status: String representing the test status. |
| 125 | @param reason: String representing the reason. |
| 126 | |
| 127 | @return: Failure message as a string. |
| 128 | """ |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 129 | format_string = "%-12s %-20s %-12s %-10s %s" |
| 130 | return format_string % (jobname, kernel, testname, status, reason) |
mbligh | b85e6b0 | 2006-10-08 17:20:56 +0000 | [diff] [blame] | 131 | |
mbligh | bb7b891 | 2006-10-08 03:59:02 +0000 | [diff] [blame] | 132 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 133 | def mailfailure(jobname, job, message): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 134 | """Send an email about the failure. |
| 135 | |
| 136 | @param jobname: String representing the job name. |
| 137 | @param job: A job object. |
| 138 | @param message: The message to mail. |
| 139 | """ |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 140 | message_lines = [""] |
| 141 | message_lines.append("The following tests FAILED for this job") |
| 142 | message_lines.append("http://%s/results/%s" % |
| 143 | (socket.gethostname(), jobname)) |
| 144 | message_lines.append("") |
| 145 | message_lines.append(format_failure_message("Job name", "Kernel", |
| 146 | "Test name", "FAIL/WARN", |
| 147 | "Failure reason")) |
| 148 | message_lines.append(format_failure_message("=" * 8, "=" * 6, "=" * 8, |
| 149 | "=" * 8, "=" * 14)) |
| 150 | message_header = "\n".join(message_lines) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 151 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 152 | subject = "AUTOTEST: FAILED tests from job %s" % jobname |
| 153 | mail.send("", job.user, "", subject, message_header + message) |
mbligh | 006f230 | 2007-09-13 20:46:46 +0000 | [diff] [blame] | 154 | |
| 155 | |
Fang Deng | 9ec6680 | 2014-04-28 19:04:33 +0000 | [diff] [blame] | 156 | def _invalidate_original_tests(orig_job_idx, retry_job_idx): |
| 157 | """Retry tests invalidates original tests. |
| 158 | |
| 159 | Whenever a retry job is complete, we want to invalidate the original |
| 160 | job's test results, such that the consumers of the tko database |
| 161 | (e.g. tko frontend, wmatrix) could figure out which results are the latest. |
| 162 | |
| 163 | When a retry job is parsed, we retrieve the original job's afe_job_id |
| 164 | from the retry job's keyvals, which is then converted to tko job_idx and |
| 165 | passed into this method as |orig_job_idx|. |
| 166 | |
| 167 | In this method, we are going to invalidate the rows in tko_tests that are |
| 168 | associated with the original job by flipping their 'invalid' bit to True. |
| 169 | In addition, in tko_tests, we also maintain a pointer from the retry results |
| 170 | to the original results, so that later we can always know which rows in |
| 171 | tko_tests are retries and which are the corresponding original results. |
| 172 | This is done by setting the field 'invalidates_test_idx' of the tests |
| 173 | associated with the retry job. |
| 174 | |
| 175 | For example, assume Job(job_idx=105) are retried by Job(job_idx=108), after |
| 176 | this method is run, their tko_tests rows will look like: |
| 177 | __________________________________________________________________________ |
| 178 | test_idx| job_idx | test | ... | invalid | invalidates_test_idx |
| 179 | 10 | 105 | dummy_Fail.Error| ... | 1 | NULL |
| 180 | 11 | 105 | dummy_Fail.Fail | ... | 1 | NULL |
| 181 | ... |
| 182 | 20 | 108 | dummy_Fail.Error| ... | 0 | 10 |
| 183 | 21 | 108 | dummy_Fail.Fail | ... | 0 | 11 |
| 184 | __________________________________________________________________________ |
| 185 | Note the invalid bits of the rows for Job(job_idx=105) are set to '1'. |
| 186 | And the 'invalidates_test_idx' fields of the rows for Job(job_idx=108) |
| 187 | are set to 10 and 11 (the test_idx of the rows for the original job). |
| 188 | |
| 189 | @param orig_job_idx: An integer representing the original job's |
| 190 | tko job_idx. Tests associated with this job will |
| 191 | be marked as 'invalid'. |
| 192 | @param retry_job_idx: An integer representing the retry job's |
| 193 | tko job_idx. The field 'invalidates_test_idx' |
| 194 | of the tests associated with this job will be updated. |
| 195 | |
| 196 | """ |
| 197 | msg = 'orig_job_idx: %s, retry_job_idx: %s' % (orig_job_idx, retry_job_idx) |
| 198 | if not orig_job_idx or not retry_job_idx: |
| 199 | tko_utils.dprint('ERROR: Could not invalidate tests: ' + msg) |
| 200 | # Using django models here makes things easier, but make sure that |
| 201 | # before this method is called, all other relevant transactions have been |
| 202 | # committed to avoid race condition. In the long run, we might consider |
| 203 | # to make the rest of parser use django models. |
| 204 | orig_tests = tko_models.Test.objects.filter(job__job_idx=orig_job_idx) |
| 205 | retry_tests = tko_models.Test.objects.filter(job__job_idx=retry_job_idx) |
| 206 | |
| 207 | # Invalidate original tests. |
| 208 | orig_tests.update(invalid=True) |
| 209 | |
| 210 | # Maintain a dictionary that maps (test, subdir) to original tests. |
| 211 | # Note that within the scope of a job, (test, subdir) uniquelly |
| 212 | # identifies a test run, but 'test' does not. |
| 213 | # In a control file, one could run the same test with different |
| 214 | # 'subdir_tag', for example, |
| 215 | # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_1') |
| 216 | # job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_2') |
| 217 | # In tko, we will get |
| 218 | # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_1') |
| 219 | # (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_2') |
| 220 | invalidated_tests = {(orig_test.test, orig_test.subdir): orig_test |
| 221 | for orig_test in orig_tests} |
| 222 | for retry in retry_tests: |
| 223 | # It is possible that (retry.test, retry.subdir) doesn't exist |
| 224 | # in invalidated_tests. This could happen when the original job |
| 225 | # didn't run some of its tests. For example, a dut goes offline |
| 226 | # since the beginning of the job, in which case invalidated_tests |
| 227 | # will only have one entry for 'SERVER_JOB'. |
| 228 | orig_test = invalidated_tests.get((retry.test, retry.subdir), None) |
| 229 | if orig_test: |
| 230 | retry.invalidates_test = orig_test |
| 231 | retry.save() |
| 232 | tko_utils.dprint('DEBUG: Invalidated tests associated to job: ' + msg) |
| 233 | |
| 234 | |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 235 | def parse_one(db, jobname, path, parse_options): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 236 | """Parse a single job. Optionally send email on failure. |
| 237 | |
| 238 | @param db: database object. |
| 239 | @param jobname: the tag used to search for existing job in db, |
| 240 | e.g. '1234-chromeos-test/host1' |
| 241 | @param path: The path to the results to be parsed. |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 242 | @param parse_options: _ParseOptions instance. |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 243 | """ |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 244 | reparse = parse_options.reparse |
| 245 | mail_on_failure = parse_options.mail_on_failure |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 246 | dry_run = parse_options.dry_run |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 247 | suite_report = parse_options.suite_report |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 248 | datastore_creds = parse_options.datastore_creds |
| 249 | export_to_gcloud_path = parse_options.export_to_gcloud_path |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 250 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 251 | tko_utils.dprint("\nScanning %s (%s)" % (jobname, path)) |
jadmanski | 9b6babf | 2009-04-21 17:57:40 +0000 | [diff] [blame] | 252 | old_job_idx = db.find_job(jobname) |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 253 | # old tests is a dict from tuple (test_name, subdir) to test_idx |
| 254 | old_tests = {} |
| 255 | if old_job_idx is not None: |
| 256 | if not reparse: |
| 257 | tko_utils.dprint("! Job is already parsed, done") |
| 258 | return |
| 259 | |
showard | eab66ce | 2009-12-23 00:03:56 +0000 | [diff] [blame] | 260 | raw_old_tests = db.select("test_idx,subdir,test", "tko_tests", |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 261 | {"job_idx": old_job_idx}) |
| 262 | if raw_old_tests: |
| 263 | old_tests = dict(((test, subdir), test_idx) |
| 264 | for test_idx, subdir, test in raw_old_tests) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 265 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 266 | # look up the status version |
jadmanski | db4f9b5 | 2008-12-03 22:52:53 +0000 | [diff] [blame] | 267 | job_keyval = models.job.read_keyval(path) |
| 268 | status_version = job_keyval.get("status_version", 0) |
jadmanski | 6e8bf75 | 2008-05-14 00:17:48 +0000 | [diff] [blame] | 269 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 270 | # parse out the job |
Luigi Semenzato | e706481 | 2017-02-03 14:47:59 -0800 | [diff] [blame] | 271 | parser = parser_lib.parser(status_version) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 272 | job = parser.make_job(path) |
| 273 | status_log = os.path.join(path, "status.log") |
| 274 | if not os.path.exists(status_log): |
| 275 | status_log = os.path.join(path, "status") |
| 276 | if not os.path.exists(status_log): |
| 277 | tko_utils.dprint("! Unable to parse job, no status file") |
| 278 | return |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 279 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 280 | # parse the status logs |
| 281 | tko_utils.dprint("+ Parsing dir=%s, jobname=%s" % (path, jobname)) |
| 282 | status_lines = open(status_log).readlines() |
| 283 | parser.start(job) |
| 284 | tests = parser.end(status_lines) |
jadmanski | 9b6babf | 2009-04-21 17:57:40 +0000 | [diff] [blame] | 285 | |
| 286 | # parser.end can return the same object multiple times, so filter out dups |
| 287 | job.tests = [] |
| 288 | already_added = set() |
| 289 | for test in tests: |
| 290 | if test not in already_added: |
| 291 | already_added.add(test) |
| 292 | job.tests.append(test) |
| 293 | |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 294 | # try and port test_idx over from the old tests, but if old tests stop |
jadmanski | 9b6babf | 2009-04-21 17:57:40 +0000 | [diff] [blame] | 295 | # matching up with new ones just give up |
showard | 0fec8a0 | 2009-12-04 01:19:54 +0000 | [diff] [blame] | 296 | if reparse and old_job_idx is not None: |
| 297 | job.index = old_job_idx |
| 298 | for test in job.tests: |
| 299 | test_idx = old_tests.pop((test.testname, test.subdir), None) |
| 300 | if test_idx is not None: |
| 301 | test.test_idx = test_idx |
| 302 | else: |
| 303 | tko_utils.dprint("! Reparse returned new test " |
| 304 | "testname=%r subdir=%r" % |
| 305 | (test.testname, test.subdir)) |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 306 | if not dry_run: |
| 307 | for test_idx in old_tests.itervalues(): |
| 308 | where = {'test_idx' : test_idx} |
| 309 | db.delete('tko_iteration_result', where) |
| 310 | db.delete('tko_iteration_perf_value', where) |
| 311 | db.delete('tko_iteration_attributes', where) |
| 312 | db.delete('tko_test_attributes', where) |
| 313 | db.delete('tko_test_labels_tests', {'test_id': test_idx}) |
| 314 | db.delete('tko_tests', where) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 315 | |
Benny Peake | feb775c | 2017-02-08 15:14:14 -0800 | [diff] [blame] | 316 | job.build = None |
| 317 | job.board = None |
| 318 | job.build_version = None |
| 319 | job.suite = None |
| 320 | if job.label: |
| 321 | label_info = site_utils.parse_job_name(job.label) |
| 322 | if label_info: |
| 323 | job.build = label_info.get('build', None) |
| 324 | job.build_version = label_info.get('build_version', None) |
| 325 | job.board = label_info.get('board', None) |
| 326 | job.suite = label_info.get('suite', None) |
| 327 | |
Dan Shi | 47199f0 | 2016-10-13 17:15:26 -0700 | [diff] [blame] | 328 | # Upload job details to Sponge. |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 329 | if not dry_run: |
| 330 | sponge_url = sponge_utils.upload_results(job, log=tko_utils.dprint) |
| 331 | if sponge_url: |
| 332 | job.keyval_dict['sponge_url'] = sponge_url |
Dan Shi | 47199f0 | 2016-10-13 17:15:26 -0700 | [diff] [blame] | 333 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 334 | # check for failures |
| 335 | message_lines = [""] |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 336 | job_successful = True |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 337 | for test in job.tests: |
| 338 | if not test.subdir: |
| 339 | continue |
| 340 | tko_utils.dprint("* testname, status, reason: %s %s %s" |
| 341 | % (test.subdir, test.status, test.reason)) |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 342 | if test.status != 'GOOD': |
| 343 | job_successful = False |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 344 | message_lines.append(format_failure_message( |
| 345 | jobname, test.kernel.base, test.subdir, |
| 346 | test.status, test.reason)) |
Simran Basi | 59ca5ac | 2016-09-22 16:57:56 -0700 | [diff] [blame] | 347 | try: |
| 348 | message = "\n".join(message_lines) |
Simran Basi | 1e10e92 | 2015-04-16 15:09:56 -0700 | [diff] [blame] | 349 | |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 350 | if not dry_run: |
| 351 | # send out a email report of failure |
| 352 | if len(message) > 2 and mail_on_failure: |
| 353 | tko_utils.dprint("Sending email report of failure on %s to %s" |
| 354 | % (jobname, job.user)) |
| 355 | mailfailure(jobname, job, message) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 356 | |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 357 | # write the job into the database. |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 358 | job_data = db.insert_job( |
| 359 | jobname, job, |
| 360 | parent_job_id=job_keyval.get(constants.PARENT_JOB_ID, None)) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 361 | |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 362 | # Upload perf values to the perf dashboard, if applicable. |
| 363 | for test in job.tests: |
| 364 | perf_uploader.upload_test(job, test, jobname) |
jamesren | 7a52204 | 2010-06-10 22:53:55 +0000 | [diff] [blame] | 365 | |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 366 | # Although the cursor has autocommit, we still need to force it to |
| 367 | # commit existing changes before we can use django models, otherwise |
| 368 | # it will go into deadlock when django models try to start a new |
| 369 | # trasaction while the current one has not finished yet. |
| 370 | db.commit() |
Dennis Jeffrey | f9bef6c | 2013-08-05 11:01:27 -0700 | [diff] [blame] | 371 | |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 372 | # Handle retry job. |
| 373 | orig_afe_job_id = job_keyval.get(constants.RETRY_ORIGINAL_JOB_ID, |
| 374 | None) |
| 375 | if orig_afe_job_id: |
| 376 | orig_job_idx = tko_models.Job.objects.get( |
| 377 | afe_job_id=orig_afe_job_id).job_idx |
| 378 | _invalidate_original_tests(orig_job_idx, job.index) |
Simran Basi | 59ca5ac | 2016-09-22 16:57:56 -0700 | [diff] [blame] | 379 | except Exception as e: |
| 380 | metadata = {'path': path, 'error': str(e), |
| 381 | 'details': traceback.format_exc()} |
| 382 | tko_utils.dprint("Hit exception while uploading to tko db:\n%s" % |
| 383 | traceback.format_exc()) |
| 384 | autotest_es.post(use_http=True, type_str='parse_failure', |
| 385 | metadata=metadata) |
| 386 | raise e |
Fang Deng | 9ec6680 | 2014-04-28 19:04:33 +0000 | [diff] [blame] | 387 | |
jamesren | 7a52204 | 2010-06-10 22:53:55 +0000 | [diff] [blame] | 388 | # Serializing job into a binary file |
| 389 | try: |
| 390 | from autotest_lib.tko import tko_pb2 |
| 391 | from autotest_lib.tko import job_serializer |
| 392 | |
| 393 | serializer = job_serializer.JobSerializer() |
jamesren | 4826cc4 | 2010-06-15 20:33:22 +0000 | [diff] [blame] | 394 | binary_file_name = os.path.join(path, "job.serialize") |
| 395 | serializer.serialize_to_binary(job, jobname, binary_file_name) |
| 396 | |
| 397 | if reparse: |
| 398 | site_export_file = "autotest_lib.tko.site_export" |
| 399 | site_export = utils.import_site_function(__file__, |
| 400 | site_export_file, |
| 401 | "site_export", |
| 402 | _site_export_dummy) |
| 403 | site_export(binary_file_name) |
| 404 | |
jamesren | 7a52204 | 2010-06-10 22:53:55 +0000 | [diff] [blame] | 405 | except ImportError: |
| 406 | tko_utils.dprint("DEBUG: tko_pb2.py doesn't exist. Create by " |
| 407 | "compiling tko/tko.proto.") |
| 408 | |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 409 | if not dry_run: |
| 410 | db.commit() |
mbligh | 26b992b | 2008-02-19 15:46:21 +0000 | [diff] [blame] | 411 | |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 412 | # Generate a suite report. |
| 413 | # Check whether this is a suite job, a suite job will be a hostless job, its |
| 414 | # jobname will be <JOB_ID>-<USERNAME>/hostless, the suite field will not be |
| 415 | # NULL |
| 416 | try: |
| 417 | if suite_report and jobname.endswith('/hostless') and job_data['suite']: |
| 418 | tko_utils.dprint('Start dumping suite timing report...') |
| 419 | timing_log = os.path.join(path, 'suite_timing.log') |
| 420 | dump_cmd = ("%s/site_utils/dump_suite_report.py %s " |
| 421 | "--output='%s' --debug" % |
| 422 | (common.autotest_dir, job_data['afe_job_id'], |
| 423 | timing_log)) |
| 424 | subprocess.check_output(dump_cmd, shell=True) |
| 425 | tko_utils.dprint('Successfully finish dumping suite timing report') |
| 426 | |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 427 | if (datastore_creds and export_to_gcloud_path |
| 428 | and os.path.exists(export_to_gcloud_path)): |
| 429 | upload_cmd = ("%s %s %s" % |
| 430 | (export_to_gcloud_path, datastore_creds, |
| 431 | timing_log)) |
| 432 | tko_utils.dprint('Start exporting timeline report to gcloud') |
| 433 | subprocess.check_output(upload_cmd, shell=True) |
| 434 | tko_utils.dprint('Successfully export timeline report to ' |
| 435 | 'gcloud') |
| 436 | else: |
| 437 | tko_utils.dprint('DEBUG: skip exporting suite timeline to ' |
| 438 | 'gcloud, because either gcloud creds or ' |
| 439 | 'export_to_gcloud script is not found.') |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 440 | except Exception as e: |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 441 | tko_utils.dprint("WARNING: fail to dump/export suite report. " |
| 442 | "Error:\n%s" % e) |
Shuqian Zhao | 31425d5 | 2016-12-07 09:35:03 -0800 | [diff] [blame] | 443 | |
Dan Shi | 5f62633 | 2016-01-27 15:25:58 -0800 | [diff] [blame] | 444 | # Mark GS_OFFLOADER_NO_OFFLOAD in gs_offloader_instructions at the end of |
| 445 | # the function, so any failure, e.g., db connection error, will stop |
| 446 | # gs_offloader_instructions being updated, and logs can be uploaded for |
| 447 | # troubleshooting. |
| 448 | if job_successful: |
| 449 | # Check if we should not offload this test's results. |
| 450 | if job_keyval.get(constants.JOB_OFFLOAD_FAILURES_KEY, False): |
| 451 | # Update the gs_offloader_instructions json file. |
| 452 | gs_instructions_file = os.path.join( |
| 453 | path, constants.GS_OFFLOADER_INSTRUCTIONS) |
| 454 | gs_offloader_instructions = {} |
| 455 | if os.path.exists(gs_instructions_file): |
| 456 | with open(gs_instructions_file, 'r') as f: |
| 457 | gs_offloader_instructions = json.load(f) |
| 458 | |
| 459 | gs_offloader_instructions[constants.GS_OFFLOADER_NO_OFFLOAD] = True |
| 460 | with open(gs_instructions_file, 'w') as f: |
| 461 | json.dump(gs_offloader_instructions, f) |
| 462 | |
| 463 | |
jamesren | 4826cc4 | 2010-06-15 20:33:22 +0000 | [diff] [blame] | 464 | def _site_export_dummy(binary_file_name): |
| 465 | pass |
mbligh | 26b992b | 2008-02-19 15:46:21 +0000 | [diff] [blame] | 466 | |
Dan Shi | 5f62633 | 2016-01-27 15:25:58 -0800 | [diff] [blame] | 467 | |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 468 | def _get_job_subdirs(path): |
| 469 | """ |
| 470 | Returns a list of job subdirectories at path. Returns None if the test |
| 471 | is itself a job directory. Does not recurse into the subdirs. |
| 472 | """ |
| 473 | # if there's a .machines file, use it to get the subdirs |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 474 | machine_list = os.path.join(path, ".machines") |
| 475 | if os.path.exists(machine_list): |
jadmanski | 42fbd07 | 2009-01-30 15:07:05 +0000 | [diff] [blame] | 476 | subdirs = set(line.strip() for line in file(machine_list)) |
| 477 | existing_subdirs = set(subdir for subdir in subdirs |
| 478 | if os.path.exists(os.path.join(path, subdir))) |
| 479 | if len(existing_subdirs) != 0: |
| 480 | return existing_subdirs |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 481 | |
| 482 | # if this dir contains ONLY subdirectories, return them |
| 483 | contents = set(os.listdir(path)) |
| 484 | contents.discard(".parse.lock") |
| 485 | subdirs = set(sub for sub in contents if |
| 486 | os.path.isdir(os.path.join(path, sub))) |
| 487 | if len(contents) == len(subdirs) != 0: |
| 488 | return subdirs |
| 489 | |
| 490 | # this is a job directory, or something else we don't understand |
| 491 | return None |
| 492 | |
| 493 | |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 494 | def parse_leaf_path(db, path, level, parse_options): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 495 | """Parse a leaf path. |
| 496 | |
| 497 | @param db: database handle. |
| 498 | @param path: The path to the results to be parsed. |
| 499 | @param level: Integer, level of subdirectories to include in the job name. |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 500 | @param parse_options: _ParseOptions instance. |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 501 | |
| 502 | @returns: The job name of the parsed job, e.g. '123-chromeos-test/host1' |
| 503 | """ |
mbligh | a48eeb2 | 2009-03-11 16:44:43 +0000 | [diff] [blame] | 504 | job_elements = path.split("/")[-level:] |
| 505 | jobname = "/".join(job_elements) |
| 506 | try: |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 507 | db.run_with_retry(parse_one, db, jobname, path, parse_options) |
Simran Basi | 8de306c | 2016-12-21 12:04:21 -0800 | [diff] [blame] | 508 | except Exception as e: |
| 509 | tko_utils.dprint("Error parsing leaf path: %s\nException:\n%s\n%s" % |
| 510 | (path, e, traceback.format_exc())) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 511 | return jobname |
mbligh | a48eeb2 | 2009-03-11 16:44:43 +0000 | [diff] [blame] | 512 | |
| 513 | |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 514 | def parse_path(db, path, level, parse_options): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 515 | """Parse a path |
| 516 | |
| 517 | @param db: database handle. |
| 518 | @param path: The path to the results to be parsed. |
| 519 | @param level: Integer, level of subdirectories to include in the job name. |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 520 | @param parse_options: _ParseOptions instance. |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 521 | |
| 522 | @returns: A set of job names of the parsed jobs. |
| 523 | set(['123-chromeos-test/host1', '123-chromeos-test/host2']) |
| 524 | """ |
| 525 | processed_jobs = set() |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 526 | job_subdirs = _get_job_subdirs(path) |
| 527 | if job_subdirs is not None: |
mbligh | a48eeb2 | 2009-03-11 16:44:43 +0000 | [diff] [blame] | 528 | # parse status.log in current directory, if it exists. multi-machine |
| 529 | # synchronous server side tests record output in this directory. without |
| 530 | # this check, we do not parse these results. |
| 531 | if os.path.exists(os.path.join(path, 'status.log')): |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 532 | new_job = parse_leaf_path(db, path, level, parse_options) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 533 | processed_jobs.add(new_job) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 534 | # multi-machine job |
jadmanski | 8e9c257 | 2008-11-11 00:29:02 +0000 | [diff] [blame] | 535 | for subdir in job_subdirs: |
| 536 | jobpath = os.path.join(path, subdir) |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 537 | new_jobs = parse_path(db, jobpath, level + 1, parse_options) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 538 | processed_jobs.update(new_jobs) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 539 | else: |
| 540 | # single machine job |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 541 | new_job = parse_leaf_path(db, path, level, parse_options) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 542 | processed_jobs.add(new_job) |
| 543 | return processed_jobs |
| 544 | |
| 545 | |
| 546 | def record_parsing(processed_jobs, duration_secs): |
| 547 | """Record the time spent on parsing to metadata db. |
| 548 | |
| 549 | @param processed_jobs: A set of job names of the parsed jobs. |
| 550 | set(['123-chromeos-test/host1', '123-chromeos-test/host2']) |
| 551 | @param duration_secs: Total time spent on parsing, in seconds. |
| 552 | """ |
| 553 | |
| 554 | for job_name in processed_jobs: |
| 555 | job_id, hostname = tko_utils.get_afe_job_id_and_hostname(job_name) |
| 556 | if not job_id or not hostname: |
| 557 | tko_utils.dprint('ERROR: can not parse job name %s, ' |
| 558 | 'will not send duration to metadata db.' |
| 559 | % job_name) |
| 560 | continue |
| 561 | else: |
| 562 | job_overhead.record_state_duration( |
| 563 | job_id, hostname, job_overhead.STATUS.PARSING, |
| 564 | duration_secs) |
mbligh | bb7b891 | 2006-10-08 03:59:02 +0000 | [diff] [blame] | 565 | |
| 566 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 567 | def main(): |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 568 | """Main entrance.""" |
| 569 | start_time = datetime.datetime.now() |
| 570 | # Record the processed jobs so that |
| 571 | # we can send the duration of parsing to metadata db. |
| 572 | processed_jobs = set() |
| 573 | |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 574 | options, args = parse_args() |
Aviv Keshet | 0b7bab0 | 2016-10-20 17:17:36 -0700 | [diff] [blame] | 575 | parse_options = _ParseOptions(options.reparse, options.mailit, |
Shuqian Zhao | 19e62fb | 2017-01-09 10:10:14 -0800 | [diff] [blame] | 576 | options.dry_run, options.suite_report, |
| 577 | options.datastore_creds, |
| 578 | options.export_to_gcloud_path) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 579 | results_dir = os.path.abspath(args[0]) |
| 580 | assert os.path.exists(results_dir) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 581 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 582 | pid_file_manager = pidfile.PidFileManager("parser", results_dir) |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 583 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 584 | if options.write_pidfile: |
| 585 | pid_file_manager.open_file() |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 586 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 587 | try: |
| 588 | # build up the list of job dirs to parse |
| 589 | if options.singledir: |
| 590 | jobs_list = [results_dir] |
| 591 | else: |
| 592 | jobs_list = [os.path.join(results_dir, subdir) |
| 593 | for subdir in os.listdir(results_dir)] |
| 594 | |
| 595 | # build up the database |
| 596 | db = tko_db.db(autocommit=False, host=options.db_host, |
| 597 | user=options.db_user, password=options.db_pass, |
| 598 | database=options.db_name) |
| 599 | |
| 600 | # parse all the jobs |
| 601 | for path in jobs_list: |
| 602 | lockfile = open(os.path.join(path, ".parse.lock"), "w") |
| 603 | flags = fcntl.LOCK_EX |
| 604 | if options.noblock: |
mbligh | db18b0e | 2009-01-30 00:34:32 +0000 | [diff] [blame] | 605 | flags |= fcntl.LOCK_NB |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 606 | try: |
| 607 | fcntl.flock(lockfile, flags) |
| 608 | except IOError, e: |
mbligh | db18b0e | 2009-01-30 00:34:32 +0000 | [diff] [blame] | 609 | # lock is not available and nonblock has been requested |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 610 | if e.errno == errno.EWOULDBLOCK: |
| 611 | lockfile.close() |
| 612 | continue |
| 613 | else: |
| 614 | raise # something unexpected happened |
| 615 | try: |
Aviv Keshet | 687d2dc | 2016-10-20 15:41:16 -0700 | [diff] [blame] | 616 | new_jobs = parse_path(db, path, options.level, parse_options) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 617 | processed_jobs.update(new_jobs) |
mbligh | 9e93640 | 2009-05-13 20:42:17 +0000 | [diff] [blame] | 618 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 619 | finally: |
| 620 | fcntl.flock(lockfile, fcntl.LOCK_UN) |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 621 | lockfile.close() |
mbligh | e97e0e6 | 2009-05-21 01:41:58 +0000 | [diff] [blame] | 622 | |
Dan Shi | b7a36ea | 2017-02-28 21:52:20 -0800 | [diff] [blame] | 623 | except Exception as e: |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 624 | pid_file_manager.close_file(1) |
Dan Shi | b7a36ea | 2017-02-28 21:52:20 -0800 | [diff] [blame] | 625 | |
| 626 | metadata = {'results_dir': results_dir, |
| 627 | 'error': str(e), |
| 628 | 'details': traceback.format_exc()} |
| 629 | autotest_es.post(use_http=True, type_str='parse_failure_final', |
| 630 | metadata=metadata) |
| 631 | |
jadmanski | d5ab8c5 | 2008-12-03 16:27:07 +0000 | [diff] [blame] | 632 | raise |
| 633 | else: |
| 634 | pid_file_manager.close_file(0) |
Fang Deng | 4982268 | 2014-10-21 16:29:22 -0700 | [diff] [blame] | 635 | duration_secs = (datetime.datetime.now() - start_time).total_seconds() |
| 636 | if options.record_duration: |
| 637 | record_parsing(processed_jobs, duration_secs) |
mbligh | 71d340d | 2008-03-05 15:51:16 +0000 | [diff] [blame] | 638 | |
mbligh | 532cb27 | 2007-11-26 18:54:20 +0000 | [diff] [blame] | 639 | |
mbligh | 96cf051 | 2008-04-17 15:25:38 +0000 | [diff] [blame] | 640 | if __name__ == "__main__": |
jadmanski | 0afbb63 | 2008-06-06 21:10:57 +0000 | [diff] [blame] | 641 | main() |