[autotest] generate a suite timing report when parsing a suite job
Teach the tko/parse.py to generate a suite timing report when it is
parsing the suite result.
BUG=chromium:641093
TEST=unittest
2. trybot
3. copy all the code to a testing server, and run
$./tko/parse.py -l -2 -r --suite-report -o
/usr/local/autotest/results/24-chromeos-test/ to generate the event.log
report.
4. Run dummy suite with the trybot image in a test server. Event.log is
correctly generated under results/ folder
Change-Id: Ie36e51167c42f1d2c62c523b4989c6daf69842fd
Reviewed-on: https://chromium-review.googlesource.com/417881
Commit-Ready: Shuqian Zhao <shuqianz@chromium.org>
Tested-by: Shuqian Zhao <shuqianz@chromium.org>
Reviewed-by: Shuqian Zhao <shuqianz@chromium.org>
diff --git a/scheduler/postjob_task.py b/scheduler/postjob_task.py
index a817534..9310c57 100644
--- a/scheduler/postjob_task.py
+++ b/scheduler/postjob_task.py
@@ -313,7 +313,7 @@
def _generate_command(self, results_dir):
return [_parser_path, '--write-pidfile', '--record-duration',
- '-l', '2', '-r', '-o', results_dir]
+ '--suite-report', '-l', '2', '-r', '-o', results_dir]
@property
diff --git a/tko/db.py b/tko/db.py
index 9da76e7..99f9621 100644
--- a/tko/db.py
+++ b/tko/db.py
@@ -447,6 +447,8 @@
@param job: The job object.
@param parent_job_id: The parent job id.
@param commit: If commit the transaction .
+
+ @return The dict of data inserted into the tko_jobs table.
"""
job.machine_idx = self.lookup_machine(job.machine)
if not job.machine_idx:
@@ -490,6 +492,8 @@
for test in job.tests:
self.insert_test(job, test, commit=commit)
+ return data
+
def update_job_keyvals(self, job, commit=None):
"""Updates the job key values.
diff --git a/tko/parse.py b/tko/parse.py
index 88a3e37..d98ea61 100755
--- a/tko/parse.py
+++ b/tko/parse.py
@@ -8,6 +8,7 @@
import optparse
import os
import socket
+import subprocess
import sys
import traceback
@@ -26,7 +27,7 @@
_ParseOptions = collections.namedtuple(
- 'ParseOptions', ['reparse', 'mail_on_failure', 'dry_run'])
+ 'ParseOptions', ['reparse', 'mail_on_failure', 'dry_run', 'suite_report'])
def parse_args():
@@ -62,6 +63,12 @@
help="Record timing to metadata db",
dest="record_duration", action="store_true",
default=False)
+ parser.add_option("--suite-report",
+ help=("Allows parsing job to attempt to create a suite "
+ "timing report, if it detects that the job being "
+ "parsed is a suite job."),
+ dest="suite_report", action="store_true",
+ default=False)
options, args = parser.parse_args()
# we need a results directory
@@ -204,6 +211,7 @@
reparse = parse_options.reparse
mail_on_failure = parse_options.mail_on_failure
dry_run = parse_options.dry_run
+ suite_report = parse_options.suite_report
tko_utils.dprint("\nScanning %s (%s)" % (jobname, path))
old_job_idx = db.find_job(jobname)
@@ -300,9 +308,9 @@
mailfailure(jobname, job, message)
# write the job into the database.
- db.insert_job(jobname, job,
- parent_job_id=job_keyval.get(constants.PARENT_JOB_ID,
- None))
+ job_data = db.insert_job(
+ jobname, job,
+ parent_job_id=job_keyval.get(constants.PARENT_JOB_ID, None))
# Upload perf values to the perf dashboard, if applicable.
for test in job.tests:
@@ -354,6 +362,25 @@
if not dry_run:
db.commit()
+ # Generate a suite report.
+ # Check whether this is a suite job, a suite job will be a hostless job, its
+ # jobname will be <JOB_ID>-<USERNAME>/hostless, the suite field will not be
+ # NULL
+ try:
+ if suite_report and jobname.endswith('/hostless') and job_data['suite']:
+ tko_utils.dprint('Start dumping suite timing report...')
+ timing_log = os.path.join(path, 'suite_timing.log')
+ dump_cmd = ("%s/site_utils/dump_suite_report.py %s "
+ "--output='%s' --debug" %
+ (common.autotest_dir, job_data['afe_job_id'],
+ timing_log))
+ subprocess.check_output(dump_cmd, shell=True)
+ tko_utils.dprint('Successfully finish dumping suite timing report')
+
+ #TODO(shuqianz), add code to upload the event.log to datastore later
+ except Exception as e:
+ tko_utils.dprint("WARNING: fail to dump suit report. Error:\n%s" % e)
+
# Mark GS_OFFLOADER_NO_OFFLOAD in gs_offloader_instructions at the end of
# the function, so any failure, e.g., db connection error, will stop
# gs_offloader_instructions being updated, and logs can be uploaded for
@@ -486,7 +513,7 @@
options, args = parse_args()
parse_options = _ParseOptions(options.reparse, options.mailit,
- options.dry_run)
+ options.dry_run, options.suite_report)
results_dir = os.path.abspath(args[0])
assert os.path.exists(results_dir)