blob: f5f13c8c57bf56b400c5b270d45e4b339a3e0c4a [file] [log] [blame]
David Riley5939cde2017-02-28 17:25:49 -08001#!/usr/bin/env python
2
3import common
4import json
5import re
6import sys
7
8from autotest_lib.client.common_lib import time_utils
9from autotest_lib.server import frontend
10from autotest_lib.server.lib import status_history
11from autotest_lib.server.lib import suite_report
12from chromite.lib import cidb
13from chromite.lib import commandline
14from chromite.lib import cros_logging as logging
15
16HostJobHistory = status_history.HostJobHistory
17
18
19def GetParser():
20 """Creates the argparse parser."""
21 parser = commandline.ArgumentParser(description=__doc__)
22 parser.add_argument('--input', type=str, action='store',
23 help='Input JSON file')
24 parser.add_argument('--output', type=str, action='store',
25 help='Output JSON file')
26 parser.add_argument('--name_filter', type=str, action='store',
27 help='Name of task to look for')
28 parser.add_argument('--status_filter', type=str, action='store',
29 help='Status fo task to look for')
30 parser.add_argument('--afe', type=str, action='store',
31 help='AFE server to connect to')
32 parser.add_argument('suite_ids', type=str, nargs='*', action='store',
33 help='Suite ids to resolve')
34 return parser
35
36
37def GetSuiteHQEs(suite_job_id, look_past_seconds, afe=None, tko=None):
38 """Get the host queue entries for active DUTs during a suite job.
39
40 @param suite_job_id: Suite's AFE job id.
41 @param look_past_seconds: Number of seconds past the end of the suite
42 job to look for next HQEs.
43 @param afe: AFE database handle.
44 @param tko: TKO database handle.
45
46 @returns A dictionary keyed on hostname to a list of host queue entry
47 dictionaries. HQE dictionary contains the following keys:
48 name, hostname, job_status, job_url, gs_url, start_time, end_time
49 """
50 if afe is None:
51 afe = frontend.AFE()
52 if tko is None:
53 tko = frontend.TKO()
54
55 # Find the suite job and when it ran.
56 statuses = tko.get_job_test_statuses_from_db(suite_job_id)
57 if len(statuses):
58 for s in statuses:
59 if s.test_started_time == 'None' or s.test_finished_time == 'None':
60 logging.error(
61 'TKO entry missing time: %s %s %s %s %s %s %s %s %s' %
62 (s.id, s.test_name, s.status, s.reason,
63 s.test_started_time, s.test_finished_time,
64 s.job_owner, s.hostname, s.job_tag))
65 start_time = min(int(time_utils.to_epoch_time(s.test_started_time))
66 for s in statuses if s.test_started_time != 'None')
67 finish_time = max(int(time_utils.to_epoch_time(
68 s.test_finished_time)) for s in statuses
69 if s.test_finished_time != 'None')
70 else:
71 start_time = None
72 finish_time = None
73
74 # If there is no start time or finish time, won't be able to get HQEs.
75 if start_time is None or finish_time is None:
76 return {}
77
78 # Find all the HQE entries.
79 child_jobs = afe.get_jobs(parent_job_id=suite_job_id)
80 child_job_ids = {j.id for j in child_jobs}
81 hqes = afe.get_host_queue_entries(job_id__in=list(child_job_ids))
82 hostnames = {h.host.hostname for h in hqes if h.host}
83 host_hqes = {}
84 for hostname in hostnames:
85 history = HostJobHistory.get_host_history(afe, hostname,
86 start_time,
87 finish_time +
88 look_past_seconds)
89 for h in history:
90 gs_url = re.sub(r'http://.*/tko/retrieve_logs.cgi\?job=/results',
91 r'gs://chromeos-autotest-results',
92 h.job_url)
93 entry = {
94 'name': h.name,
95 'hostname': history.hostname,
96 'job_status': h.job_status,
97 'job_url': h.job_url,
98 'gs_url': gs_url,
99 'start_time': h.start_time,
100 'end_time': h.end_time,
101 }
102 host_hqes.setdefault(history.hostname, []).append(entry)
103
104 return host_hqes
105
106
107def FindSpecialTasks(suite_job_id, look_past_seconds=1800,
108 name_filter=None, status_filter=None, afe=None, tko=None):
109 """Find special tasks that happened around a suite job.
110
111 @param suite_job_id: Suite's AFE job id.
112 @param look_past_seconds: Number of seconds past the end of the suite
113 job to look for next HQEs.
114 @param name_filter: If not None, only return tasks with this name.
115 @param status_filter: If not None, only return tasks with this status.
116 @param afe: AFE database handle.
117 @param tko: TKO database handle.
118
119 @returns A dictionary keyed on hostname to a list of host queue entry
120 dictionaries. HQE dictionary contains the following keys:
121 name, hostname, job_status, job_url, gs_url, start_time, end_time,
122 next_entry
123 """
124 host_hqes = GetSuiteHQEs(suite_job_id, look_past_seconds=look_past_seconds,
125 afe=afe, tko=tko)
126
127 task_entries = []
128 for hostname in host_hqes:
129 host_hqes[hostname] = sorted(host_hqes[hostname],
130 key=lambda k: k['start_time'])
131 current = None
132 for e in host_hqes[hostname]:
133 # Check if there is an entry to finish off by adding a pointer
134 # to this new entry.
135 if current:
136 logging.debug(' next task: %(name)s %(job_status)s '
137 '%(gs_url)s %(start_time)s %(end_time)s' % e)
138 # Only record a pointer to the next entry if filtering some out.
139 if name_filter or status_filter:
140 current['next_entry'] = e
141 task_entries.append(current)
142 current = None
143
144 # Perform matching.
145 if ((name_filter and e['name'] != name_filter) or
146 (status_filter and e['job_status'] != status_filter)):
147 continue
148
149 # Instead of appending right away, wait until the next entry
150 # to add a point to it.
151 current = e
152 logging.debug('Task %(name)s: %(job_status)s %(hostname)s '
153 '%(gs_url)s %(start_time)s %(end_time)s' % e)
154
155 # Add the last one even if a next entry wasn't found.
156 if current:
157 task_entries.append(current)
158
159 return task_entries
160
161def main(argv):
162 parser = GetParser()
163 options = parser.parse_args(argv)
164
165 afe = None
166 if options.afe:
167 afe = frontend.AFE(server=options.afe)
168 tko = frontend.TKO()
169
170 special_tasks = []
171 builds = []
172
173 # Handle a JSON file being specified.
174 if options.input:
175 with open(options.input) as f:
176 data = json.load(f)
177 for build in data.get('builds', []):
178 # For each build, amend it to include the list of
179 # special tasks for its suite's jobs.
180 build.setdefault('special_tasks', {})
181 for suite_job_id in build['suite_ids']:
182 suite_tasks = FindSpecialTasks(
183 suite_job_id, name_filter=options.name_filter,
184 status_filter=options.status_filter,
185 afe=afe, tko=tko)
186 special_tasks.extend(suite_tasks)
187 build['special_tasks'][suite_job_id] = suite_tasks
188 logging.debug(build)
189 builds.append(build)
190
191 # Handle and specifically specified suite IDs.
192 for suite_job_id in options.suite_ids:
193 special_tasks.extend(FindSpecialTasks(
194 suite_job_id, name_filter=options.name_filter,
195 status_filter=options.status_filter, afe=afe, tko=tko))
196
197 # Output a resulting JSON file.
198 with open(options.output, 'w') if options.output else sys.stdout as f:
199 output = {
200 'special_tasks': special_tasks,
201 'name_filter': options.name_filter,
202 'status_filter': options.status_filter,
203 }
204 if len(builds):
205 output['builds'] = builds
206 json.dump(output, f)
207
208if __name__ == '__main__':
209 sys.exit(main(sys.argv[1:]))