blob: 37050bfe2061f329fe443a3e76a0341f367d2c4e [file] [log] [blame]
Aviv Keshet0b9cfc92013-02-05 11:36:02 -08001# pylint: disable-msg=C0111
2
mblighe8819cd2008-02-15 16:48:40 +00003"""\
4Functions to expose over the RPC interface.
5
6For all modify* and delete* functions that ask for an 'id' parameter to
7identify the object to operate on, the id may be either
8 * the database row ID
9 * the name of the object (label name, hostname, user login, etc.)
10 * a dictionary containing uniquely identifying field (this option should seldom
11 be used)
12
13When specifying foreign key fields (i.e. adding hosts to a label, or adding
14users to an ACL group), the given value may be either the database row ID or the
15name of the object.
16
17All get* functions return lists of dictionaries. Each dictionary represents one
18object and maps field names to values.
19
20Some examples:
21modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
22modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
23modify_test('sleeptest', test_type='Client', params=', seconds=60')
24delete_acl_group(1) # delete by ID
25delete_acl_group('Everyone') # delete by name
26acl_group_add_users('Everyone', ['mbligh', 'showard'])
27get_jobs(owner='showard', status='Queued')
28
mbligh93c80e62009-02-03 17:48:30 +000029See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000030"""
31
32__author__ = 'showard@google.com (Steve Howard)'
33
MK Ryu9c5fbbe2015-02-11 15:46:22 -080034import sys
showard29f7cd22009-04-29 21:16:24 +000035import datetime
MK Ryu9c5fbbe2015-02-11 15:46:22 -080036
Moises Osorio2dc7a102014-12-02 18:24:02 -080037from django.db.models import Count
showardcafd16e2009-05-29 18:37:49 +000038import common
Simran Basib6ec8ae2014-04-23 12:05:08 -070039from autotest_lib.client.common_lib import priorities
Gabe Black1e1c41b2015-02-04 23:55:15 -080040from autotest_lib.client.common_lib.cros.graphite import autotest_stats
showard6d7b2ff2009-06-10 00:16:47 +000041from autotest_lib.frontend.afe import control_file, rpc_utils
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070042from autotest_lib.frontend.afe import models, model_logic, model_attributes
Simran Basib6ec8ae2014-04-23 12:05:08 -070043from autotest_lib.frontend.afe import site_rpc_interface
Moises Osorio2dc7a102014-12-02 18:24:02 -080044from autotest_lib.frontend.tko import models as tko_models
Jiaxi Luoaac54572014-06-04 13:57:02 -070045from autotest_lib.frontend.tko import rpc_interface as tko_rpc_interface
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070046from autotest_lib.server import frontend
Simran Basi71206ef2014-08-13 13:51:18 -070047from autotest_lib.server import utils
Dan Shid215dbe2015-06-18 16:14:59 -070048from autotest_lib.server.cros import provision
Jiaxi Luo90190c92014-06-18 12:35:57 -070049from autotest_lib.server.cros.dynamic_suite import tools
J. Richard Barnette39255fa2015-04-14 17:23:41 -070050from autotest_lib.site_utils import status_history
mblighe8819cd2008-02-15 16:48:40 +000051
Moises Osorio2dc7a102014-12-02 18:24:02 -080052
Gabe Black1e1c41b2015-02-04 23:55:15 -080053_timer = autotest_stats.Timer('rpc_interface')
Moises Osorio2dc7a102014-12-02 18:24:02 -080054
Eric Lid23bc192011-02-09 14:38:57 -080055def get_parameterized_autoupdate_image_url(job):
56 """Get the parameterized autoupdate image url from a parameterized job."""
57 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
58 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
beeps8bb1f7d2013-08-05 01:30:09 -070059 name='image')
Eric Lid23bc192011-02-09 14:38:57 -080060 para_set = job.parameterized_job.parameterizedjobparameter_set
61 job_test_para = para_set.get(test_parameter=image_parameter)
62 return job_test_para.parameter_value
63
64
mblighe8819cd2008-02-15 16:48:40 +000065# labels
66
mblighe8819cd2008-02-15 16:48:40 +000067def modify_label(id, **data):
MK Ryu8c554cf2015-06-12 11:45:50 -070068 """Modify a label.
69
70 @param id: id or name of a label. More often a label name.
71 @param data: New data for a label.
72 """
73 label_model = models.Label.smart_get(id)
74
75 # Master forwards the RPC to shards
76 if not utils.is_shard():
77 rpc_utils.fanout_rpc(label_model.host_set.all(), 'modify_label', False,
78 id=id, **data)
79
80 label_model.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000081
82
83def delete_label(id):
MK Ryu8c554cf2015-06-12 11:45:50 -070084 """Delete a label.
85
86 @param id: id or name of a label. More often a label name.
87 """
88 label_model = models.Label.smart_get(id)
89
90 # Master forwards the RPC to shards
91 if not utils.is_shard():
92 rpc_utils.fanout_rpc(label_model.host_set.all(), 'delete_label', False,
93 id=id)
94
95 label_model.delete()
mblighe8819cd2008-02-15 16:48:40 +000096
Prashanth Balasubramanian744898f2015-01-13 05:04:16 -080097
MK Ryu9c5fbbe2015-02-11 15:46:22 -080098def add_label(name, ignore_exception_if_exists=False, **kwargs):
MK Ryucf027c62015-03-04 12:00:50 -080099 """Adds a new label of a given name.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800100
101 @param name: label name.
102 @param ignore_exception_if_exists: If True and the exception was
103 thrown due to the duplicated label name when adding a label,
104 then suppress the exception. Default is False.
105 @param kwargs: keyword args that store more info about a label
106 other than the name.
107 @return: int/long id of a new label.
108 """
109 # models.Label.add_object() throws model_logic.ValidationError
110 # when it is given a label name that already exists.
111 # However, ValidationError can be thrown with different errors,
112 # and those errors should be thrown up to the call chain.
113 try:
114 label = models.Label.add_object(name=name, **kwargs)
115 except:
116 exc_info = sys.exc_info()
117 if ignore_exception_if_exists:
118 label = rpc_utils.get_label(name)
119 # If the exception is raised not because of duplicated
120 # "name", then raise the original exception.
121 if label is None:
122 raise exc_info[0], exc_info[1], exc_info[2]
123 else:
124 raise exc_info[0], exc_info[1], exc_info[2]
125 return label.id
126
127
128def add_label_to_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800129 """Adds a label of the given id to the given hosts only in local DB.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800130
131 @param id: id or name of a label. More often a label name.
132 @param hosts: The hostnames of hosts that need the label.
133
134 @raises models.Label.DoesNotExist: If the label with id doesn't exist.
135 """
136 label = models.Label.smart_get(id)
137 host_objs = models.Host.smart_get_bulk(hosts)
138 if label.platform:
139 models.Host.check_no_platform(host_objs)
140 label.host_set.add(*host_objs)
141
142
MK Ryufbb002c2015-06-08 14:13:16 -0700143@rpc_utils.route_rpc_to_master
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800144def label_add_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800145 """Adds a label with the given id to the given hosts.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800146
147 This method should be run only on master not shards.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800148 The given label will be created if it doesn't exist, provided the `id`
149 supplied is a label name not an int/long id.
150
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800151 @param id: id or name of a label. More often a label name.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800152 @param hosts: A list of hostnames or ids. More often hostnames.
153
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800154 @raises ValueError: If the id specified is an int/long (label id)
155 while the label does not exist.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800156 """
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800157 try:
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800158 label = models.Label.smart_get(id)
159 except models.Label.DoesNotExist:
160 # This matches the type checks in smart_get, which is a hack
161 # in and off itself. The aim here is to create any non-existent
162 # label, which we cannot do if the 'id' specified isn't a label name.
163 if isinstance(id, basestring):
164 label = models.Label.smart_get(add_label(id))
165 else:
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800166 raise ValueError('Label id (%s) does not exist. Please specify '
167 'the argument, id, as a string (label name).'
168 % id)
MK Ryucf027c62015-03-04 12:00:50 -0800169
170 host_objs = models.Host.smart_get_bulk(hosts)
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800171 # Make sure the label exists on the shard with the same id
172 # as it is on the master.
MK Ryucf027c62015-03-04 12:00:50 -0800173 # It is possible that the label is already in a shard because
174 # we are adding a new label only to shards of hosts that the label
175 # is going to be attached.
176 # For example, we add a label L1 to a host in shard S1.
177 # Master and S1 will have L1 but other shards won't.
178 # Later, when we add the same label L1 to hosts in shards S1 and S2,
179 # S1 already has the label but S2 doesn't.
180 # S2 should have the new label without any problem.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800181 # We ignore exception in such a case.
182 rpc_utils.fanout_rpc(
183 host_objs, 'add_label', name=label.name, id=label.id,
184 include_hostnames=False, ignore_exception_if_exists=True)
185 rpc_utils.fanout_rpc(host_objs, 'add_label_to_hosts', id=id)
showardbbabf502008-06-06 00:02:02 +0000186
MK Ryu26f0c932015-05-28 18:14:33 -0700187 add_label_to_hosts(id, hosts)
188
showardbbabf502008-06-06 00:02:02 +0000189
MK Ryucf027c62015-03-04 12:00:50 -0800190def remove_label_from_hosts(id, hosts):
191 """Removes a label of the given id from the given hosts only in local DB.
192
193 @param id: id or name of a label.
194 @param hosts: The hostnames of hosts that need to remove the label from.
195 """
showardbe3ec042008-11-12 18:16:07 +0000196 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000197 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +0000198
199
MK Ryufbb002c2015-06-08 14:13:16 -0700200@rpc_utils.route_rpc_to_master
MK Ryucf027c62015-03-04 12:00:50 -0800201def label_remove_hosts(id, hosts):
202 """Removes a label of the given id from the given hosts.
203
204 This method should be run only on master not shards.
205
206 @param id: id or name of a label.
207 @param hosts: A list of hostnames or ids. More often hostnames.
208 """
MK Ryucf027c62015-03-04 12:00:50 -0800209 host_objs = models.Host.smart_get_bulk(hosts)
210 rpc_utils.fanout_rpc(host_objs, 'remove_label_from_hosts', id=id)
211
MK Ryu26f0c932015-05-28 18:14:33 -0700212 remove_label_from_hosts(id, hosts)
213
MK Ryucf027c62015-03-04 12:00:50 -0800214
Jiaxi Luo31874592014-06-11 10:36:35 -0700215def get_labels(exclude_filters=(), **filter_data):
showardc92da832009-04-07 18:14:34 +0000216 """\
Jiaxi Luo31874592014-06-11 10:36:35 -0700217 @param exclude_filters: A sequence of dictionaries of filters.
218
showardc92da832009-04-07 18:14:34 +0000219 @returns A sequence of nested dictionaries of label information.
220 """
Jiaxi Luo31874592014-06-11 10:36:35 -0700221 labels = models.Label.query_objects(filter_data)
222 for exclude_filter in exclude_filters:
223 labels = labels.exclude(**exclude_filter)
224 return rpc_utils.prepare_rows_as_nested_dicts(labels, ('atomic_group',))
showardc92da832009-04-07 18:14:34 +0000225
226
227# atomic groups
228
showarde9450c92009-06-30 01:58:52 +0000229def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +0000230 return models.AtomicGroup.add_object(
231 name=name, max_number_of_machines=max_number_of_machines,
232 description=description).id
233
234
235def modify_atomic_group(id, **data):
236 models.AtomicGroup.smart_get(id).update_object(data)
237
238
239def delete_atomic_group(id):
240 models.AtomicGroup.smart_get(id).delete()
241
242
243def atomic_group_add_labels(id, labels):
244 label_objs = models.Label.smart_get_bulk(labels)
245 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
246
247
248def atomic_group_remove_labels(id, labels):
249 label_objs = models.Label.smart_get_bulk(labels)
250 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
251
252
253def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000254 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000255 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000256
257
258# hosts
259
Matthew Sartori68186332015-04-27 17:19:53 -0700260def add_host(hostname, status=None, locked=None, lock_reason='', protection=None):
261 if locked and not lock_reason:
262 raise model_logic.ValidationError(
263 {'locked': 'Please provide a reason for locking when adding host.'})
264
jadmanski0afbb632008-06-06 21:10:57 +0000265 return models.Host.add_object(hostname=hostname, status=status,
Matthew Sartori68186332015-04-27 17:19:53 -0700266 locked=locked, lock_reason=lock_reason,
267 protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000268
269
Jakob Juelich50e91f72014-10-01 12:43:23 -0700270@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000271def modify_host(id, **data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700272 """Modify local attributes of a host.
273
274 If this is called on the master, but the host is assigned to a shard, this
275 will also forward the call to the responsible shard. This means i.e. if a
276 host is being locked using this function, this change will also propagate to
277 shards.
278
279 @param id: id of the host to modify.
280 @param **data: key=value pairs of values to set on the host.
281 """
showardbe0d8692009-08-20 23:42:44 +0000282 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000283 host = models.Host.smart_get(id)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700284
showardce7c0922009-09-11 18:39:24 +0000285 rpc_utils.check_modify_host_locking(host, data)
286 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000287
288
showard276f9442009-05-20 00:33:16 +0000289def modify_hosts(host_filter_data, update_data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700290 """Modify local attributes of multiple hosts.
291
292 If this is called on the master, but one of the hosts in that match the
293 filters is assigned to a shard, this will also forward the call to the
294 responsible shard.
295
296 The filters are always applied on the master, not on the shards. This means
297 if the states of a host differ on the master and a shard, the state on the
298 master will be used. I.e. this means:
299 A host was synced to Shard 1. On Shard 1 the status of the host was set to
300 'Repair Failed'.
301 - A call to modify_hosts with host_filter_data={'status': 'Ready'} will
302 update the host (both on the shard and on the master), because the state
303 of the host as the master knows it is still 'Ready'.
304 - A call to modify_hosts with host_filter_data={'status': 'Repair failed'
305 will not update the host, because the filter doesn't apply on the master.
306
showardbe0d8692009-08-20 23:42:44 +0000307 @param host_filter_data: Filters out which hosts to modify.
308 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000309 """
showardbe0d8692009-08-20 23:42:44 +0000310 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000311 hosts = models.Host.query_objects(host_filter_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700312
313 affected_shard_hostnames = set()
314 affected_host_ids = []
315
Alex Miller9658a952013-05-14 16:40:02 -0700316 # Check all hosts before changing data for exception safety.
317 for host in hosts:
318 rpc_utils.check_modify_host_locking(host, update_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700319 if host.shard:
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800320 affected_shard_hostnames.add(host.shard.rpc_hostname())
Jakob Juelich50e91f72014-10-01 12:43:23 -0700321 affected_host_ids.append(host.id)
322
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800323 if not utils.is_shard():
Jakob Juelich50e91f72014-10-01 12:43:23 -0700324 # Caution: Changing the filter from the original here. See docstring.
325 rpc_utils.run_rpc_on_multiple_hostnames(
326 'modify_hosts', affected_shard_hostnames,
327 host_filter_data={'id__in': affected_host_ids},
328 update_data=update_data)
329
showard276f9442009-05-20 00:33:16 +0000330 for host in hosts:
331 host.update_object(update_data)
332
333
MK Ryufbb002c2015-06-08 14:13:16 -0700334def add_labels_to_host(id, labels):
335 """Adds labels to a given host only in local DB.
showardcafd16e2009-05-29 18:37:49 +0000336
MK Ryufbb002c2015-06-08 14:13:16 -0700337 @param id: id or hostname for a host.
338 @param labels: ids or names for labels.
339 """
340 label_objs = models.Label.smart_get_bulk(labels)
341 models.Host.smart_get(id).labels.add(*label_objs)
342
343
344@rpc_utils.route_rpc_to_master
345def host_add_labels(id, labels):
346 """Adds labels to a given host.
347
348 @param id: id or hostname for a host.
349 @param labels: ids or names for labels.
350
351 @raises ValidationError: If adding more than one platform label.
352 """
353 label_objs = models.Label.smart_get_bulk(labels)
354 platforms = [label.name for label in label_objs if label.platform]
showardcafd16e2009-05-29 18:37:49 +0000355 if len(platforms) > 1:
356 raise model_logic.ValidationError(
357 {'labels': 'Adding more than one platform label: %s' %
358 ', '.join(platforms)})
MK Ryufbb002c2015-06-08 14:13:16 -0700359
360 host_obj = models.Host.smart_get(id)
showardcafd16e2009-05-29 18:37:49 +0000361 if len(platforms) == 1:
MK Ryufbb002c2015-06-08 14:13:16 -0700362 models.Host.check_no_platform([host_obj])
363
364 rpc_utils.fanout_rpc([host_obj], 'add_labels_to_host', False,
365 id=id, labels=labels)
366 add_labels_to_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000367
368
MK Ryufbb002c2015-06-08 14:13:16 -0700369def remove_labels_from_host(id, labels):
370 """Removes labels from a given host only in local DB.
371
372 @param id: id or hostname for a host.
373 @param labels: ids or names for labels.
374 """
375 label_objs = models.Label.smart_get_bulk(labels)
376 models.Host.smart_get(id).labels.remove(*label_objs)
377
378
379@rpc_utils.route_rpc_to_master
mblighe8819cd2008-02-15 16:48:40 +0000380def host_remove_labels(id, labels):
MK Ryufbb002c2015-06-08 14:13:16 -0700381 """Removes labels from a given host.
382
383 @param id: id or hostname for a host.
384 @param labels: ids or names for labels.
385 """
386 host_obj = models.Host.smart_get(id)
387 rpc_utils.fanout_rpc([host_obj], 'remove_labels_from_host', False,
388 id=id, labels=labels)
389 remove_labels_from_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000390
391
MK Ryuacf35922014-10-03 14:56:49 -0700392def get_host_attribute(attribute, **host_filter_data):
393 """
394 @param attribute: string name of attribute
395 @param host_filter_data: filter data to apply to Hosts to choose hosts to
396 act upon
397 """
398 hosts = rpc_utils.get_host_query((), False, False, True, host_filter_data)
399 hosts = list(hosts)
400 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
401 'attribute_list')
402 host_attr_dicts = []
403 for host_obj in hosts:
404 for attr_obj in host_obj.attribute_list:
405 if attr_obj.attribute == attribute:
406 host_attr_dicts.append(attr_obj.get_object_dict())
407 return rpc_utils.prepare_for_serialization(host_attr_dicts)
408
409
showard0957a842009-05-11 19:25:08 +0000410def set_host_attribute(attribute, value, **host_filter_data):
411 """
MK Ryu26f0c932015-05-28 18:14:33 -0700412 @param attribute: string name of attribute
413 @param value: string, or None to delete an attribute
414 @param host_filter_data: filter data to apply to Hosts to choose hosts to
415 act upon
showard0957a842009-05-11 19:25:08 +0000416 """
417 assert host_filter_data # disallow accidental actions on all hosts
418 hosts = models.Host.query_objects(host_filter_data)
419 models.AclGroup.check_for_acl_violation_hosts(hosts)
420
MK Ryu26f0c932015-05-28 18:14:33 -0700421 # Master forwards this RPC to shards.
422 if not utils.is_shard():
423 rpc_utils.fanout_rpc(hosts, 'set_host_attribute', False,
424 attribute=attribute, value=value, **host_filter_data)
425
showard0957a842009-05-11 19:25:08 +0000426 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000427 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000428
429
Jakob Juelich50e91f72014-10-01 12:43:23 -0700430@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000431def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000432 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000433
434
showard87cc38f2009-08-20 23:37:04 +0000435def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000436 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000437 """
438 @param multiple_labels: match hosts in all of the labels given. Should
439 be a list of label names.
440 @param exclude_only_if_needed_labels: Exclude hosts with at least one
441 "only_if_needed" label applied.
442 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
443 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000444 """
showard43a3d262008-11-12 18:17:05 +0000445 hosts = rpc_utils.get_host_query(multiple_labels,
446 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000447 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000448 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000449 hosts = list(hosts)
450 models.Host.objects.populate_relationships(hosts, models.Label,
451 'label_list')
452 models.Host.objects.populate_relationships(hosts, models.AclGroup,
453 'acl_list')
454 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
455 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000456 host_dicts = []
457 for host_obj in hosts:
458 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000459 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000460 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
461 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000462 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
463 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
464 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000465 host_dicts.append(host_dict)
466 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000467
468
showard87cc38f2009-08-20 23:37:04 +0000469def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000470 exclude_atomic_group_hosts=False, valid_only=True,
471 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000472 """
473 Same parameters as get_hosts().
474
475 @returns The number of matching hosts.
476 """
showard43a3d262008-11-12 18:17:05 +0000477 hosts = rpc_utils.get_host_query(multiple_labels,
478 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000479 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000480 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000481 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000482
mblighe8819cd2008-02-15 16:48:40 +0000483
484# tests
485
showard909c7a62008-07-15 21:52:38 +0000486def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000487 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000488 test_time=None, test_category=None, description=None,
489 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000490 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000491 author=author, dependencies=dependencies,
492 experimental=experimental,
493 run_verify=run_verify, test_time=test_time,
494 test_category=test_category,
495 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000496 test_class=test_class,
497 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000498
499
500def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000501 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000502
503
504def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000505 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000506
507
508def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000509 return rpc_utils.prepare_for_serialization(
510 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000511
512
Moises Osorio2dc7a102014-12-02 18:24:02 -0800513@_timer.decorate
514def get_tests_status_counts_by_job_name_label(job_name_prefix, label_name):
515 """Gets the counts of all passed and failed tests from the matching jobs.
516
517 @param job_name_prefix: Name prefix of the jobs to get the summary from, e.g.,
518 'butterfly-release/R40-6457.21.0/bvt-cq/'.
519 @param label_name: Label that must be set in the jobs, e.g.,
520 'cros-version:butterfly-release/R40-6457.21.0'.
521
522 @returns A summary of the counts of all the passed and failed tests.
523 """
524 job_ids = list(models.Job.objects.filter(
525 name__startswith=job_name_prefix,
526 dependency_labels__name=label_name).values_list(
527 'pk', flat=True))
528 summary = {'passed': 0, 'failed': 0}
529 if not job_ids:
530 return summary
531
532 counts = (tko_models.TestView.objects.filter(
533 afe_job_id__in=job_ids).exclude(
534 test_name='SERVER_JOB').exclude(
535 test_name__startswith='CLIENT_JOB').values(
536 'status').annotate(
537 count=Count('status')))
538 for status in counts:
539 if status['status'] == 'GOOD':
540 summary['passed'] += status['count']
541 else:
542 summary['failed'] += status['count']
543 return summary
544
545
showard2b9a88b2008-06-13 20:55:03 +0000546# profilers
547
548def add_profiler(name, description=None):
549 return models.Profiler.add_object(name=name, description=description).id
550
551
552def modify_profiler(id, **data):
553 models.Profiler.smart_get(id).update_object(data)
554
555
556def delete_profiler(id):
557 models.Profiler.smart_get(id).delete()
558
559
560def get_profilers(**filter_data):
561 return rpc_utils.prepare_for_serialization(
562 models.Profiler.list_objects(filter_data))
563
564
mblighe8819cd2008-02-15 16:48:40 +0000565# users
566
567def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000568 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000569
570
571def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000572 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000573
574
575def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000576 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000577
578
579def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000580 return rpc_utils.prepare_for_serialization(
581 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000582
583
584# acl groups
585
586def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000587 group = models.AclGroup.add_object(name=name, description=description)
showard64a95952010-01-13 21:27:16 +0000588 group.users.add(models.User.current_user())
showard04f2cd82008-07-25 20:53:31 +0000589 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000590
591
592def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000593 group = models.AclGroup.smart_get(id)
594 group.check_for_acl_violation_acl_group()
595 group.update_object(data)
596 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000597
598
599def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000600 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000601 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000602 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000603 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000604
605
606def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000607 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000608 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000609 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000610 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000611 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000612
613
614def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000615 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000616 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000617 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000618 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000619 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000620
621
622def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000623 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000624 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000625 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000626 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000627 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000628
629
630def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000631 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000632
633
634def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000635 acl_groups = models.AclGroup.list_objects(filter_data)
636 for acl_group in acl_groups:
637 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
638 acl_group['users'] = [user.login
639 for user in acl_group_obj.users.all()]
640 acl_group['hosts'] = [host.hostname
641 for host in acl_group_obj.hosts.all()]
642 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000643
644
645# jobs
646
mbligh120351e2009-01-24 01:40:45 +0000647def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000648 client_control_file='', use_container=False,
Matthew Sartori10438092015-06-24 14:30:18 -0700649 profile_only=None, upload_kernel_config=False,
650 db_tests=True):
jadmanski0afbb632008-06-06 21:10:57 +0000651 """
mbligh120351e2009-01-24 01:40:45 +0000652 Generates a client-side control file to load a kernel and run tests.
653
Matthew Sartori10438092015-06-24 14:30:18 -0700654 @param tests List of tests to run. See db_tests for more information.
mbligha3c58d22009-08-24 22:01:51 +0000655 @param kernel A list of kernel info dictionaries configuring which kernels
656 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000657 @param label Name of label to grab kernel config from.
658 @param profilers List of profilers to activate during the job.
659 @param client_control_file The contents of a client-side control file to
660 run at the end of all tests. If this is supplied, all tests must be
661 client side.
662 TODO: in the future we should support server control files directly
663 to wrap with a kernel. That'll require changing the parameter
664 name and adding a boolean to indicate if it is a client or server
665 control file.
666 @param use_container unused argument today. TODO: Enable containers
667 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000668 @param profile_only A boolean that indicates what default profile_only
669 mode to use in the control file. Passing None will generate a
670 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000671 @param upload_kernel_config: if enabled it will generate server control
672 file code that uploads the kernel config file to the client and
673 tells the client of the new (local) path when compiling the kernel;
674 the tests must be server side tests
Matthew Sartori10438092015-06-24 14:30:18 -0700675 @param db_tests: if True, the test object can be found in the database
676 backing the test model. In this case, tests is a tuple
677 of test IDs which are used to retrieve the test objects
678 from the database. If False, tests is a tuple of test
679 dictionaries stored client-side in the AFE.
mbligh120351e2009-01-24 01:40:45 +0000680
681 @returns a dict with the following keys:
682 control_file: str, The control file text.
683 is_server: bool, is the control file a server-side control file?
684 synch_count: How many machines the job uses per autoserv execution.
685 synch_count == 1 means the job is asynchronous.
686 dependencies: A list of the names of labels on which the job depends.
687 """
showardd86debe2009-06-10 17:37:56 +0000688 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000689 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000690 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000691
showard989f25d2008-10-01 11:38:11 +0000692 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000693 rpc_utils.prepare_generate_control_file(tests, kernel, label,
Matthew Sartori10438092015-06-24 14:30:18 -0700694 profilers, db_tests))
showard989f25d2008-10-01 11:38:11 +0000695 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000696 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000697 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000698 client_control_file=client_control_file, profile_only=profile_only,
699 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000700 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000701
702
jamesren4a41e012010-07-16 22:33:48 +0000703def create_parameterized_job(name, priority, test, parameters, kernel=None,
704 label=None, profilers=(), profiler_parameters=None,
705 use_container=False, profile_only=None,
706 upload_kernel_config=False, hosts=(),
707 meta_hosts=(), one_time_hosts=(),
708 atomic_group_name=None, synch_count=None,
709 is_template=False, timeout=None,
Simran Basi7e605742013-11-12 13:43:36 -0800710 timeout_mins=None, max_runtime_mins=None,
711 run_verify=False, email_list='', dependencies=(),
712 reboot_before=None, reboot_after=None,
713 parse_failed_repair=None, hostless=False,
Dan Shiec1d47d2015-02-13 11:38:13 -0800714 keyvals=None, drone_set=None, run_reset=True,
715 require_ssq=None):
jamesren4a41e012010-07-16 22:33:48 +0000716 """
717 Creates and enqueues a parameterized job.
718
719 Most parameters a combination of the parameters for generate_control_file()
720 and create_job(), with the exception of:
721
722 @param test name or ID of the test to run
723 @param parameters a map of parameter name ->
724 tuple of (param value, param type)
725 @param profiler_parameters a dictionary of parameters for the profilers:
726 key: profiler name
727 value: dict of param name -> tuple of
728 (param value,
729 param type)
730 """
731 # Save the values of the passed arguments here. What we're going to do with
732 # them is pass them all to rpc_utils.get_create_job_common_args(), which
733 # will extract the subset of these arguments that apply for
734 # rpc_utils.create_job_common(), which we then pass in to that function.
735 args = locals()
736
737 # Set up the parameterized job configs
738 test_obj = models.Test.smart_get(test)
Aviv Keshet3dd8beb2013-05-13 17:36:04 -0700739 control_type = test_obj.test_type
jamesren4a41e012010-07-16 22:33:48 +0000740
741 try:
742 label = models.Label.smart_get(label)
743 except models.Label.DoesNotExist:
744 label = None
745
746 kernel_objs = models.Kernel.create_kernels(kernel)
747 profiler_objs = [models.Profiler.smart_get(profiler)
748 for profiler in profilers]
749
750 parameterized_job = models.ParameterizedJob.objects.create(
751 test=test_obj, label=label, use_container=use_container,
752 profile_only=profile_only,
753 upload_kernel_config=upload_kernel_config)
754 parameterized_job.kernels.add(*kernel_objs)
755
756 for profiler in profiler_objs:
757 parameterized_profiler = models.ParameterizedJobProfiler.objects.create(
758 parameterized_job=parameterized_job,
759 profiler=profiler)
760 profiler_params = profiler_parameters.get(profiler.name, {})
761 for name, (value, param_type) in profiler_params.iteritems():
762 models.ParameterizedJobProfilerParameter.objects.create(
763 parameterized_job_profiler=parameterized_profiler,
764 parameter_name=name,
765 parameter_value=value,
766 parameter_type=param_type)
767
768 try:
769 for parameter in test_obj.testparameter_set.all():
770 if parameter.name in parameters:
771 param_value, param_type = parameters.pop(parameter.name)
772 parameterized_job.parameterizedjobparameter_set.create(
773 test_parameter=parameter, parameter_value=param_value,
774 parameter_type=param_type)
775
776 if parameters:
777 raise Exception('Extra parameters remain: %r' % parameters)
778
779 return rpc_utils.create_job_common(
780 parameterized_job=parameterized_job.id,
781 control_type=control_type,
782 **rpc_utils.get_create_job_common_args(args))
783 except:
784 parameterized_job.delete()
785 raise
786
787
Simran Basib6ec8ae2014-04-23 12:05:08 -0700788def create_job_page_handler(name, priority, control_file, control_type,
Dan Shid215dbe2015-06-18 16:14:59 -0700789 image=None, hostless=False, firmware_rw_build=None,
790 firmware_ro_build=None, test_source_build=None,
791 **kwargs):
Simran Basib6ec8ae2014-04-23 12:05:08 -0700792 """\
793 Create and enqueue a job.
794
795 @param name name of this job
796 @param priority Integer priority of this job. Higher is more important.
797 @param control_file String contents of the control file.
798 @param control_type Type of control file, Client or Server.
Dan Shid215dbe2015-06-18 16:14:59 -0700799 @param image: ChromeOS build to be installed in the dut. Default to None.
800 @param firmware_rw_build: Firmware build to update RW firmware. Default to
801 None, i.e., RW firmware will not be updated.
802 @param firmware_ro_build: Firmware build to update RO firmware. Default to
803 None, i.e., RO firmware will not be updated.
804 @param test_source_build: Build to be used to retrieve test code. Default
805 to None.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700806 @param kwargs extra args that will be required by create_suite_job or
807 create_job.
808
809 @returns The created Job id number.
810 """
811 control_file = rpc_utils.encode_ascii(control_file)
Jiaxi Luodd67beb2014-07-18 16:28:31 -0700812 if not control_file:
813 raise model_logic.ValidationError({
814 'control_file' : "Control file cannot be empty"})
Simran Basib6ec8ae2014-04-23 12:05:08 -0700815
816 if image and hostless:
Dan Shid215dbe2015-06-18 16:14:59 -0700817 builds = {}
818 builds[provision.CROS_VERSION_PREFIX] = image
819 if firmware_rw_build:
Dan Shi0723bf52015-06-24 10:52:38 -0700820 builds[provision.FW_RW_VERSION_PREFIX] = firmware_rw_build
Dan Shid215dbe2015-06-18 16:14:59 -0700821 if firmware_ro_build:
822 builds[provision.FW_RO_VERSION_PREFIX] = firmware_ro_build
Simran Basib6ec8ae2014-04-23 12:05:08 -0700823 return site_rpc_interface.create_suite_job(
824 name=name, control_file=control_file, priority=priority,
Dan Shid215dbe2015-06-18 16:14:59 -0700825 builds=builds, test_source_build=test_source_build, **kwargs)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700826 return create_job(name, priority, control_file, control_type, image=image,
827 hostless=hostless, **kwargs)
828
829
showard12f3e322009-05-13 21:27:42 +0000830def create_job(name, priority, control_file, control_type,
831 hosts=(), meta_hosts=(), one_time_hosts=(),
832 atomic_group_name=None, synch_count=None, is_template=False,
Simran Basi7e605742013-11-12 13:43:36 -0800833 timeout=None, timeout_mins=None, max_runtime_mins=None,
834 run_verify=False, email_list='', dependencies=(),
835 reboot_before=None, reboot_after=None, parse_failed_repair=None,
836 hostless=False, keyvals=None, drone_set=None, image=None,
Dan Shiec1d47d2015-02-13 11:38:13 -0800837 parent_job_id=None, test_retry=0, run_reset=True,
838 require_ssp=None, args=(), **kwargs):
jadmanski0afbb632008-06-06 21:10:57 +0000839 """\
840 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000841
showarda1e74b32009-05-12 17:32:04 +0000842 @param name name of this job
Alex Miller7d658cf2013-09-04 16:00:35 -0700843 @param priority Integer priority of this job. Higher is more important.
showarda1e74b32009-05-12 17:32:04 +0000844 @param control_file String contents of the control file.
845 @param control_type Type of control file, Client or Server.
846 @param synch_count How many machines the job uses per autoserv execution.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700847 synch_count == 1 means the job is asynchronous. If an atomic group is
848 given this value is treated as a minimum.
showarda1e74b32009-05-12 17:32:04 +0000849 @param is_template If true then create a template job.
850 @param timeout Hours after this call returns until the job times out.
Simran Basi7e605742013-11-12 13:43:36 -0800851 @param timeout_mins Minutes after this call returns until the job times
Jiaxi Luo90190c92014-06-18 12:35:57 -0700852 out.
Simran Basi34217022012-11-06 13:43:15 -0800853 @param max_runtime_mins Minutes from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000854 @param run_verify Should the host be verified before running the test?
855 @param email_list String containing emails to mail when the job is done
856 @param dependencies List of label names on which this job depends
857 @param reboot_before Never, If dirty, or Always
858 @param reboot_after Never, If all tests passed, or Always
859 @param parse_failed_repair if true, results of failed repairs launched by
Jiaxi Luo90190c92014-06-18 12:35:57 -0700860 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000861 @param hostless if true, create a hostless job
showardc1a98d12010-01-15 00:22:22 +0000862 @param keyvals dict of keyvals to associate with the job
showarda1e74b32009-05-12 17:32:04 +0000863 @param hosts List of hosts to run job on.
864 @param meta_hosts List where each entry is a label name, and for each entry
Jiaxi Luo90190c92014-06-18 12:35:57 -0700865 one host will be chosen from that label to run the job on.
showarda1e74b32009-05-12 17:32:04 +0000866 @param one_time_hosts List of hosts not in the database to run the job on.
867 @param atomic_group_name The name of an atomic group to schedule the job on.
jamesren76fcf192010-04-21 20:39:50 +0000868 @param drone_set The name of the drone set to run this test on.
Paul Pendlebury5a8c6ad2011-02-01 07:20:17 -0800869 @param image OS image to install before running job.
Aviv Keshet0b9cfc92013-02-05 11:36:02 -0800870 @param parent_job_id id of a job considered to be parent of created job.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700871 @param test_retry Number of times to retry test if the test did not
Jiaxi Luo90190c92014-06-18 12:35:57 -0700872 complete successfully. (optional, default: 0)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700873 @param run_reset Should the host be reset before running the test?
Dan Shiec1d47d2015-02-13 11:38:13 -0800874 @param require_ssp Set to True to require server-side packaging to run the
875 test. If it's set to None, drone will still try to run
876 the server side with server-side packaging. If the
877 autotest-server package doesn't exist for the build or
878 image is not set, drone will run the test without server-
879 side packaging. Default is None.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700880 @param args A list of args to be injected into control file.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700881 @param kwargs extra keyword args. NOT USED.
showardc92da832009-04-07 18:14:34 +0000882
883 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000884 """
Jiaxi Luo90190c92014-06-18 12:35:57 -0700885 if args:
886 control_file = tools.inject_vars({'args': args}, control_file)
887
Simran Basiab5a1bf2014-05-28 15:39:44 -0700888 if image is None:
889 return rpc_utils.create_job_common(
890 **rpc_utils.get_create_job_common_args(locals()))
891
892 # When image is supplied use a known parameterized test already in the
893 # database to pass the OS image path from the front end, through the
894 # scheduler, and finally to autoserv as the --image parameter.
895
896 # The test autoupdate_ParameterizedJob is in afe_autotests and used to
897 # instantiate a Test object and from there a ParameterizedJob.
898 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
899 known_parameterized_job = models.ParameterizedJob.objects.create(
900 test=known_test_obj)
901
902 # autoupdate_ParameterizedJob has a single parameter, the image parameter,
903 # stored in the table afe_test_parameters. We retrieve and set this
904 # instance of the parameter to the OS image path.
905 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
906 name='image')
907 known_parameterized_job.parameterizedjobparameter_set.create(
908 test_parameter=image_parameter, parameter_value=image,
909 parameter_type='string')
910
Dan Shid215dbe2015-06-18 16:14:59 -0700911 # TODO(crbug.com/502638): save firmware build etc to parameterized_job.
912
Simran Basiab5a1bf2014-05-28 15:39:44 -0700913 # By passing a parameterized_job to create_job_common the job entry in
914 # the afe_jobs table will have the field parameterized_job_id set.
915 # The scheduler uses this id in the afe_parameterized_jobs table to
916 # match this job to our known test, and then with the
917 # afe_parameterized_job_parameters table to get the actual image path.
jamesren4a41e012010-07-16 22:33:48 +0000918 return rpc_utils.create_job_common(
Simran Basiab5a1bf2014-05-28 15:39:44 -0700919 parameterized_job=known_parameterized_job.id,
jamesren4a41e012010-07-16 22:33:48 +0000920 **rpc_utils.get_create_job_common_args(locals()))
mblighe8819cd2008-02-15 16:48:40 +0000921
922
showard9dbdcda2008-10-14 17:34:36 +0000923def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000924 """\
showard9dbdcda2008-10-14 17:34:36 +0000925 Abort a set of host queue entries.
Fang Deng63b0e452014-12-19 14:38:15 -0800926
927 @return: A list of dictionaries, each contains information
928 about an aborted HQE.
jadmanski0afbb632008-06-06 21:10:57 +0000929 """
showard9dbdcda2008-10-14 17:34:36 +0000930 query = models.HostQueueEntry.query_objects(filter_data)
beepsfaecbce2013-10-29 11:35:10 -0700931
932 # Dont allow aborts on:
933 # 1. Jobs that have already completed (whether or not they were aborted)
934 # 2. Jobs that we have already been aborted (but may not have completed)
935 query = query.filter(complete=False).filter(aborted=False)
showarddc817512008-11-12 18:16:41 +0000936 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000937 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000938 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000939
Simran Basic1b26762013-06-26 14:23:21 -0700940 models.HostQueueEntry.abort_host_queue_entries(host_queue_entries)
Fang Deng63b0e452014-12-19 14:38:15 -0800941 hqe_info = [{'HostQueueEntry': hqe.id, 'Job': hqe.job_id,
942 'Job name': hqe.job.name} for hqe in host_queue_entries]
943 return hqe_info
showard9d821ab2008-07-11 16:54:29 +0000944
945
beeps8bb1f7d2013-08-05 01:30:09 -0700946def abort_special_tasks(**filter_data):
947 """\
948 Abort the special task, or tasks, specified in the filter.
949 """
950 query = models.SpecialTask.query_objects(filter_data)
951 special_tasks = query.filter(is_active=True)
952 for task in special_tasks:
953 task.abort()
954
955
Simran Basi73dae552013-02-25 14:57:46 -0800956def _call_special_tasks_on_hosts(task, hosts):
957 """\
958 Schedules a set of hosts for a special task.
959
960 @returns A list of hostnames that a special task was created for.
961 """
962 models.AclGroup.check_for_acl_violation_hosts(hosts)
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800963 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts)
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800964 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800965 raise ValueError('The following hosts are on shards, please '
966 'follow the link to the shards and create jobs '
967 'there instead. %s.' % shard_host_map)
Simran Basi73dae552013-02-25 14:57:46 -0800968 for host in hosts:
969 models.SpecialTask.schedule_special_task(host, task)
970 return list(sorted(host.hostname for host in hosts))
971
972
showard1ff7b2e2009-05-15 23:17:18 +0000973def reverify_hosts(**filter_data):
974 """\
975 Schedules a set of hosts for verify.
mbligh4e545a52009-12-19 05:30:39 +0000976
977 @returns A list of hostnames that a verify task was created for.
showard1ff7b2e2009-05-15 23:17:18 +0000978 """
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800979 hosts = models.Host.query_objects(filter_data)
980 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts, rpc_hostnames=True)
981
982 # Filter out hosts on a shard from those on the master, forward
983 # rpcs to the shard with an additional hostname__in filter, and
984 # create a local SpecialTask for each remaining host.
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800985 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800986 hosts = [h for h in hosts if h.shard is None]
987 for shard, hostnames in shard_host_map.iteritems():
988
989 # The main client of this module is the frontend website, and
990 # it invokes it with an 'id' or an 'id__in' filter. Regardless,
991 # the 'hostname' filter should narrow down the list of hosts on
992 # each shard even though we supply all the ids in filter_data.
993 # This method uses hostname instead of id because it fits better
994 # with the overall architecture of redirection functions in rpc_utils.
995 shard_filter = filter_data.copy()
996 shard_filter['hostname__in'] = hostnames
997 rpc_utils.run_rpc_on_multiple_hostnames(
998 'reverify_hosts', [shard], **shard_filter)
999
1000 # There is a race condition here if someone assigns a shard to one of these
1001 # hosts before we create the task. The host will stay on the master if:
1002 # 1. The host is not Ready
1003 # 2. The host is Ready but has a task
1004 # But if the host is Ready and doesn't have a task yet, it will get sent
1005 # to the shard as we're creating a task here.
1006
1007 # Given that we only rarely verify Ready hosts it isn't worth putting this
1008 # entire method in a transaction. The worst case scenario is that we have
1009 # a verify running on a Ready host while the shard is using it, if the verify
1010 # fails no subsequent tasks will be created against the host on the master,
1011 # and verifies are safe enough that this is OK.
1012 return _call_special_tasks_on_hosts(models.SpecialTask.Task.VERIFY, hosts)
Simran Basi73dae552013-02-25 14:57:46 -08001013
1014
1015def repair_hosts(**filter_data):
1016 """\
1017 Schedules a set of hosts for repair.
1018
1019 @returns A list of hostnames that a repair task was created for.
1020 """
1021 return _call_special_tasks_on_hosts(models.SpecialTask.Task.REPAIR,
1022 models.Host.query_objects(filter_data))
showard1ff7b2e2009-05-15 23:17:18 +00001023
1024
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001025def get_jobs(not_yet_run=False, running=False, finished=False,
1026 suite=False, sub=False, standalone=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001027 """\
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001028 Extra status filter args for get_jobs:
jadmanski0afbb632008-06-06 21:10:57 +00001029 -not_yet_run: Include only jobs that have not yet started running.
1030 -running: Include only jobs that have start running but for which not
1031 all hosts have completed.
1032 -finished: Include only jobs for which all hosts have completed (or
1033 aborted).
1034 At most one of these three fields should be specified.
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001035
1036 Extra type filter args for get_jobs:
1037 -suite: Include only jobs with child jobs.
1038 -sub: Include only jobs with a parent job.
1039 -standalone: Inlcude only jobs with no child or parent jobs.
1040 At most one of these three fields should be specified.
jadmanski0afbb632008-06-06 21:10:57 +00001041 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001042 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1043 running,
1044 finished)
1045 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1046 suite,
1047 sub,
1048 standalone)
showard0957a842009-05-11 19:25:08 +00001049 job_dicts = []
1050 jobs = list(models.Job.query_objects(filter_data))
1051 models.Job.objects.populate_relationships(jobs, models.Label,
1052 'dependencies')
showardc1a98d12010-01-15 00:22:22 +00001053 models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals')
showard0957a842009-05-11 19:25:08 +00001054 for job in jobs:
1055 job_dict = job.get_object_dict()
1056 job_dict['dependencies'] = ','.join(label.name
1057 for label in job.dependencies)
showardc1a98d12010-01-15 00:22:22 +00001058 job_dict['keyvals'] = dict((keyval.key, keyval.value)
1059 for keyval in job.keyvals)
Eric Lid23bc192011-02-09 14:38:57 -08001060 if job.parameterized_job:
1061 job_dict['image'] = get_parameterized_autoupdate_image_url(job)
showard0957a842009-05-11 19:25:08 +00001062 job_dicts.append(job_dict)
1063 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +00001064
1065
1066def get_num_jobs(not_yet_run=False, running=False, finished=False,
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001067 suite=False, sub=False, standalone=False,
jadmanski0afbb632008-06-06 21:10:57 +00001068 **filter_data):
1069 """\
1070 See get_jobs() for documentation of extra filter parameters.
1071 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001072 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1073 running,
1074 finished)
1075 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1076 suite,
1077 sub,
1078 standalone)
jadmanski0afbb632008-06-06 21:10:57 +00001079 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +00001080
1081
mblighe8819cd2008-02-15 16:48:40 +00001082def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001083 """\
Jiaxi Luoaac54572014-06-04 13:57:02 -07001084 Like get_jobs(), but adds 'status_counts' and 'result_counts' field.
1085
1086 'status_counts' filed is a dictionary mapping status strings to the number
1087 of hosts currently with that status, i.e. {'Queued' : 4, 'Running' : 2}.
1088
1089 'result_counts' field is piped to tko's rpc_interface and has the return
1090 format specified under get_group_counts.
jadmanski0afbb632008-06-06 21:10:57 +00001091 """
1092 jobs = get_jobs(**filter_data)
1093 ids = [job['id'] for job in jobs]
1094 all_status_counts = models.Job.objects.get_status_counts(ids)
1095 for job in jobs:
1096 job['status_counts'] = all_status_counts[job['id']]
Jiaxi Luoaac54572014-06-04 13:57:02 -07001097 job['result_counts'] = tko_rpc_interface.get_status_counts(
1098 ['afe_job_id', 'afe_job_id'],
1099 header_groups=[['afe_job_id'], ['afe_job_id']],
1100 **{'afe_job_id': job['id']})
jadmanski0afbb632008-06-06 21:10:57 +00001101 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +00001102
1103
showarda965cef2009-05-15 23:17:41 +00001104def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +00001105 """\
1106 Retrieves all the information needed to clone a job.
1107 """
showarda8709c52008-07-03 19:44:54 +00001108 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +00001109 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +00001110 preserve_metahosts,
1111 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +00001112
showardd9992fe2008-07-31 02:15:03 +00001113 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +00001114 for host in job_info['hosts']:
1115 host_dict = get_hosts(id=host.id)[0]
1116 other_labels = host_dict['labels']
1117 if host_dict['platform']:
1118 other_labels.remove(host_dict['platform'])
1119 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +00001120 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001121
showard29f7cd22009-04-29 21:16:24 +00001122 for host in job_info['one_time_hosts']:
1123 host_dict = dict(hostname=host.hostname,
1124 id=host.id,
1125 platform='(one-time host)',
1126 locked_text='')
1127 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001128
showard4d077562009-05-08 18:24:36 +00001129 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +00001130 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +00001131 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +00001132
1133 info = dict(job=job.get_object_dict(),
1134 meta_host_counts=meta_host_counts,
1135 hosts=host_dicts)
1136 info['job']['dependencies'] = job_info['dependencies']
1137 if job_info['atomic_group']:
1138 info['atomic_group_name'] = (job_info['atomic_group']).name
1139 else:
1140 info['atomic_group_name'] = None
jamesren2275ef12010-04-12 18:25:06 +00001141 info['hostless'] = job_info['hostless']
jamesren76fcf192010-04-21 20:39:50 +00001142 info['drone_set'] = job.drone_set and job.drone_set.name
showarda8709c52008-07-03 19:44:54 +00001143
Eric Lid23bc192011-02-09 14:38:57 -08001144 if job.parameterized_job:
1145 info['job']['image'] = get_parameterized_autoupdate_image_url(job)
1146
showarda8709c52008-07-03 19:44:54 +00001147 return rpc_utils.prepare_for_serialization(info)
1148
1149
showard34dc5fa2008-04-24 20:58:40 +00001150# host queue entries
1151
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001152def get_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001153 """\
showardc92da832009-04-07 18:14:34 +00001154 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +00001155 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001156 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1157 'started_on__lte',
1158 start_time,
1159 end_time,
1160 **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001161 return rpc_utils.prepare_rows_as_nested_dicts(
1162 models.HostQueueEntry.query_objects(filter_data),
1163 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +00001164
1165
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001166def get_num_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001167 """\
1168 Get the number of host queue entries associated with this job.
1169 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001170 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1171 'started_on__lte',
1172 start_time,
1173 end_time,
1174 **filter_data)
jadmanski0afbb632008-06-06 21:10:57 +00001175 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +00001176
1177
showard1e935f12008-07-11 00:11:36 +00001178def get_hqe_percentage_complete(**filter_data):
1179 """
showardc92da832009-04-07 18:14:34 +00001180 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +00001181 that are complete.
1182 """
1183 query = models.HostQueueEntry.query_objects(filter_data)
1184 complete_count = query.filter(complete=True).count()
1185 total_count = query.count()
1186 if total_count == 0:
1187 return 1
1188 return float(complete_count) / total_count
1189
1190
showard1a5a4082009-07-28 20:01:37 +00001191# special tasks
1192
1193def get_special_tasks(**filter_data):
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001194 """Get special task entries from the local database.
1195
1196 Query the special tasks table for tasks matching the given
1197 `filter_data`, and return a list of the results. No attempt is
1198 made to forward the call to shards; the buck will stop here.
1199 The caller is expected to know the target shard for such reasons
1200 as:
1201 * The caller is a service (such as gs_offloader) configured
1202 to operate on behalf of one specific shard, and no other.
1203 * The caller has a host as a parameter, and knows that this is
1204 the shard assigned to that host.
1205
1206 @param filter_data Filter keywords to pass to the underlying
1207 database query.
1208
1209 """
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001210 return rpc_utils.prepare_rows_as_nested_dicts(
1211 models.SpecialTask.query_objects(filter_data),
1212 ('host', 'queue_entry'))
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001213
1214
1215def get_host_special_tasks(host_id, **filter_data):
1216 """Get special task entries for a given host.
1217
1218 Query the special tasks table for tasks that ran on the host
1219 given by `host_id` and matching the given `filter_data`.
1220 Return a list of the results. If the host is assigned to a
1221 shard, forward this call to that shard.
1222
1223 @param host_id Id in the database of the target host.
1224 @param filter_data Filter keywords to pass to the underlying
1225 database query.
1226
1227 """
MK Ryu0c1a37d2015-04-30 12:00:55 -07001228 # Retrieve host data even if the host is in an invalid state.
1229 host = models.Host.smart_get(host_id, False)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001230 if not host.shard:
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001231 return get_special_tasks(host_id=host_id, **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001232 else:
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001233 # The return values from AFE methods are post-processed
1234 # objects that aren't JSON-serializable. So, we have to
1235 # call AFE.run() to get the raw, serializable output from
1236 # the shard.
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001237 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1238 return shard_afe.run('get_special_tasks',
1239 host_id=host_id, **filter_data)
showard1a5a4082009-07-28 20:01:37 +00001240
1241
MK Ryu0c1a37d2015-04-30 12:00:55 -07001242def get_num_special_tasks(**kwargs):
1243 """Get the number of special task entries from the local database.
1244
1245 Query the special tasks table for tasks matching the given 'kwargs',
1246 and return the number of the results. No attempt is made to forward
1247 the call to shards; the buck will stop here.
1248
1249 @param kwargs Filter keywords to pass to the underlying database query.
1250
1251 """
1252 return models.SpecialTask.query_count(kwargs)
1253
1254
1255def get_host_num_special_tasks(host, **kwargs):
1256 """Get special task entries for a given host.
1257
1258 Query the special tasks table for tasks that ran on the host
1259 given by 'host' and matching the given 'kwargs'.
1260 Return a list of the results. If the host is assigned to a
1261 shard, forward this call to that shard.
1262
1263 @param host id or name of a host. More often a hostname.
1264 @param kwargs Filter keywords to pass to the underlying database query.
1265
1266 """
1267 # Retrieve host data even if the host is in an invalid state.
1268 host_model = models.Host.smart_get(host, False)
1269 if not host_model.shard:
1270 return get_num_special_tasks(host=host, **kwargs)
1271 else:
1272 shard_afe = frontend.AFE(server=host_model.shard.rpc_hostname())
1273 return shard_afe.run('get_num_special_tasks', host=host, **kwargs)
1274
1275
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001276def get_status_task(host_id, end_time):
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001277 """Get the "status task" for a host from the local shard.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001278
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001279 Returns a single special task representing the given host's
1280 "status task". The status task is a completed special task that
1281 identifies whether the corresponding host was working or broken
1282 when it completed. A successful task indicates a working host;
1283 a failed task indicates broken.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001284
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001285 This call will not be forward to a shard; the receiving server
1286 must be the shard that owns the host.
1287
1288 @param host_id Id in the database of the target host.
1289 @param end_time Time reference for the host's status.
1290
1291 @return A single task; its status (successful or not)
1292 corresponds to the status of the host (working or
1293 broken) at the given time. If no task is found, return
1294 `None`.
1295
1296 """
1297 tasklist = rpc_utils.prepare_rows_as_nested_dicts(
1298 status_history.get_status_task(host_id, end_time),
1299 ('host', 'queue_entry'))
1300 return tasklist[0] if tasklist else None
1301
1302
1303def get_host_status_task(host_id, end_time):
1304 """Get the "status task" for a host from its owning shard.
1305
1306 Finds the given host's owning shard, and forwards to it a call
1307 to `get_status_task()` (see above).
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001308
1309 @param host_id Id in the database of the target host.
1310 @param end_time Time reference for the host's status.
1311
1312 @return A single task; its status (successful or not)
1313 corresponds to the status of the host (working or
1314 broken) at the given time. If no task is found, return
1315 `None`.
1316
1317 """
1318 host = models.Host.smart_get(host_id)
1319 if not host.shard:
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001320 return get_status_task(host_id, end_time)
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001321 else:
1322 # The return values from AFE methods are post-processed
1323 # objects that aren't JSON-serializable. So, we have to
1324 # call AFE.run() to get the raw, serializable output from
1325 # the shard.
1326 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1327 return shard_afe.run('get_status_task',
1328 host_id=host_id, end_time=end_time)
1329
1330
J. Richard Barnette8abbfd62015-06-23 12:46:54 -07001331def get_host_diagnosis_interval(host_id, end_time, success):
1332 """Find a "diagnosis interval" for a given host.
1333
1334 A "diagnosis interval" identifies a start and end time where
1335 the host went from "working" to "broken", or vice versa. The
1336 interval's starting time is the starting time of the last status
1337 task with the old status; the end time is the finish time of the
1338 first status task with the new status.
1339
1340 This routine finds the most recent diagnosis interval for the
1341 given host prior to `end_time`, with a starting status matching
1342 `success`. If `success` is true, the interval will start with a
1343 successful status task; if false the interval will start with a
1344 failed status task.
1345
1346 @param host_id Id in the database of the target host.
1347 @param end_time Time reference for the diagnosis interval.
1348 @param success Whether the diagnosis interval should start
1349 with a successful or failed status task.
1350
1351 @return A list of two strings. The first is the timestamp for
1352 the beginning of the interval; the second is the
1353 timestamp for the end. If the host has never changed
1354 state, the list is empty.
1355
1356 """
1357 host = models.Host.smart_get(host_id)
1358 if not host.shard:
1359 return status_history.get_diagnosis_interval(
1360 host_id, end_time, success)
1361 else:
1362 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1363 return shard_afe.get_host_diagnosis_interval(
1364 host_id, end_time, success)
1365
1366
showardc0ac3a72009-07-08 21:14:45 +00001367# support for host detail view
1368
MK Ryu0c1a37d2015-04-30 12:00:55 -07001369def get_host_queue_entries_and_special_tasks(host, query_start=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001370 query_limit=None, start_time=None,
1371 end_time=None):
showardc0ac3a72009-07-08 21:14:45 +00001372 """
1373 @returns an interleaved list of HostQueueEntries and SpecialTasks,
1374 in approximate run order. each dict contains keys for type, host,
1375 job, status, started_on, execution_path, and ID.
1376 """
1377 total_limit = None
1378 if query_limit is not None:
1379 total_limit = query_start + query_limit
MK Ryu0c1a37d2015-04-30 12:00:55 -07001380 filter_data_common = {'host': host,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001381 'query_limit': total_limit,
1382 'sort_by': ['-id']}
showardc0ac3a72009-07-08 21:14:45 +00001383
MK Ryu0c1a37d2015-04-30 12:00:55 -07001384 filter_data_special_tasks = rpc_utils.inject_times_to_filter(
1385 'time_started__gte', 'time_started__lte', start_time, end_time,
1386 **filter_data_common)
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001387
MK Ryu0c1a37d2015-04-30 12:00:55 -07001388 queue_entries = get_host_queue_entries(
1389 start_time, end_time, **filter_data_common)
1390 special_tasks = get_host_special_tasks(host, **filter_data_special_tasks)
showardc0ac3a72009-07-08 21:14:45 +00001391
1392 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
1393 special_tasks)
1394 if query_start is not None:
1395 interleaved_entries = interleaved_entries[query_start:]
1396 if query_limit is not None:
1397 interleaved_entries = interleaved_entries[:query_limit]
MK Ryu0c1a37d2015-04-30 12:00:55 -07001398 return rpc_utils.prepare_host_queue_entries_and_special_tasks(
1399 interleaved_entries, queue_entries)
showardc0ac3a72009-07-08 21:14:45 +00001400
1401
MK Ryu0c1a37d2015-04-30 12:00:55 -07001402def get_num_host_queue_entries_and_special_tasks(host, start_time=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001403 end_time=None):
MK Ryu0c1a37d2015-04-30 12:00:55 -07001404 filter_data_common = {'host': host}
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001405
1406 filter_data_queue_entries, filter_data_special_tasks = (
1407 rpc_utils.inject_times_to_hqe_special_tasks_filters(
1408 filter_data_common, start_time, end_time))
1409
1410 return (models.HostQueueEntry.query_count(filter_data_queue_entries)
MK Ryu0c1a37d2015-04-30 12:00:55 -07001411 + get_host_num_special_tasks(**filter_data_special_tasks))
showardc0ac3a72009-07-08 21:14:45 +00001412
1413
showard29f7cd22009-04-29 21:16:24 +00001414# recurring run
1415
1416def get_recurring(**filter_data):
1417 return rpc_utils.prepare_rows_as_nested_dicts(
1418 models.RecurringRun.query_objects(filter_data),
1419 ('job', 'owner'))
1420
1421
1422def get_num_recurring(**filter_data):
1423 return models.RecurringRun.query_count(filter_data)
1424
1425
1426def delete_recurring_runs(**filter_data):
1427 to_delete = models.RecurringRun.query_objects(filter_data)
1428 to_delete.delete()
1429
1430
1431def create_recurring_run(job_id, start_date, loop_period, loop_count):
showard64a95952010-01-13 21:27:16 +00001432 owner = models.User.current_user().login
showard29f7cd22009-04-29 21:16:24 +00001433 job = models.Job.objects.get(id=job_id)
1434 return job.create_recurring_job(start_date=start_date,
1435 loop_period=loop_period,
1436 loop_count=loop_count,
1437 owner=owner)
1438
1439
mblighe8819cd2008-02-15 16:48:40 +00001440# other
1441
showarde0b63622008-08-04 20:58:47 +00001442def echo(data=""):
1443 """\
1444 Returns a passed in string. For doing a basic test to see if RPC calls
1445 can successfully be made.
1446 """
1447 return data
1448
1449
showardb7a52fd2009-04-27 20:10:56 +00001450def get_motd():
1451 """\
1452 Returns the message of the day as a string.
1453 """
1454 return rpc_utils.get_motd()
1455
1456
mblighe8819cd2008-02-15 16:48:40 +00001457def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +00001458 """\
1459 Returns a dictionary containing a bunch of data that shouldn't change
1460 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +00001461
1462 priorities: List of job priority choices.
1463 default_priority: Default priority value for new jobs.
1464 users: Sorted list of all users.
Jiaxi Luo31874592014-06-11 10:36:35 -07001465 labels: Sorted list of labels not start with 'cros-version' and
1466 'fw-version'.
showardc92da832009-04-07 18:14:34 +00001467 atomic_groups: Sorted list of all atomic groups.
1468 tests: Sorted list of all tests.
1469 profilers: Sorted list of all profilers.
1470 current_user: Logged-in username.
1471 host_statuses: Sorted list of possible Host statuses.
1472 job_statuses: Sorted list of possible HostQueueEntry statuses.
Simran Basi7e605742013-11-12 13:43:36 -08001473 job_timeout_default: The default job timeout length in minutes.
showarda1e74b32009-05-12 17:32:04 +00001474 parse_failed_repair_default: Default value for the parse_failed_repair job
Jiaxi Luo31874592014-06-11 10:36:35 -07001475 option.
showardc92da832009-04-07 18:14:34 +00001476 reboot_before_options: A list of valid RebootBefore string enums.
1477 reboot_after_options: A list of valid RebootAfter string enums.
1478 motd: Server's message of the day.
1479 status_dictionary: A mapping from one word job status names to a more
1480 informative description.
jadmanski0afbb632008-06-06 21:10:57 +00001481 """
showard21baa452008-10-21 00:08:39 +00001482
1483 job_fields = models.Job.get_field_dict()
jamesren76fcf192010-04-21 20:39:50 +00001484 default_drone_set_name = models.DroneSet.default_drone_set_name()
1485 drone_sets = ([default_drone_set_name] +
1486 sorted(drone_set.name for drone_set in
1487 models.DroneSet.objects.exclude(
1488 name=default_drone_set_name)))
showard21baa452008-10-21 00:08:39 +00001489
jadmanski0afbb632008-06-06 21:10:57 +00001490 result = {}
Alex Miller7d658cf2013-09-04 16:00:35 -07001491 result['priorities'] = priorities.Priority.choices()
1492 default_priority = priorities.Priority.DEFAULT
1493 result['default_priority'] = 'Default'
1494 result['max_schedulable_priority'] = priorities.Priority.DEFAULT
jadmanski0afbb632008-06-06 21:10:57 +00001495 result['users'] = get_users(sort_by=['login'])
Jiaxi Luo31874592014-06-11 10:36:35 -07001496
1497 label_exclude_filters = [{'name__startswith': 'cros-version'},
1498 {'name__startswith': 'fw-version'}]
1499 result['labels'] = get_labels(
1500 label_exclude_filters,
1501 sort_by=['-platform', 'name'])
1502
showardc92da832009-04-07 18:14:34 +00001503 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +00001504 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +00001505 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +00001506 result['current_user'] = rpc_utils.prepare_for_serialization(
showard64a95952010-01-13 21:27:16 +00001507 models.User.current_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +00001508 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +00001509 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
Simran Basi7e605742013-11-12 13:43:36 -08001510 result['job_timeout_mins_default'] = models.Job.DEFAULT_TIMEOUT_MINS
Simran Basi34217022012-11-06 13:43:15 -08001511 result['job_max_runtime_mins_default'] = (
1512 models.Job.DEFAULT_MAX_RUNTIME_MINS)
showarda1e74b32009-05-12 17:32:04 +00001513 result['parse_failed_repair_default'] = bool(
1514 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
jamesrendd855242010-03-02 22:23:44 +00001515 result['reboot_before_options'] = model_attributes.RebootBefore.names
1516 result['reboot_after_options'] = model_attributes.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +00001517 result['motd'] = rpc_utils.get_motd()
jamesren76fcf192010-04-21 20:39:50 +00001518 result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled()
1519 result['drone_sets'] = drone_sets
jamesren4a41e012010-07-16 22:33:48 +00001520 result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled()
showard8ac29b42008-07-17 17:01:55 +00001521
showardd3dc1992009-04-22 21:01:40 +00001522 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +00001523 "Verifying": "Verifying Host",
Alex Millerdfff2fd2013-05-28 13:05:06 -07001524 "Provisioning": "Provisioning Host",
showard8ac29b42008-07-17 17:01:55 +00001525 "Pending": "Waiting on other hosts",
1526 "Running": "Running autoserv",
1527 "Completed": "Autoserv completed",
1528 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +00001529 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +00001530 "Starting": "Next in host's queue",
1531 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +00001532 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +00001533 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +00001534 "Template": "Template job for recurring run",
mbligh4608b002010-01-05 18:22:35 +00001535 "Waiting": "Waiting for scheduler action",
Dan Shi07e09af2013-04-12 09:31:29 -07001536 "Archiving": "Archiving results",
1537 "Resetting": "Resetting hosts"}
Jiaxi Luo421608e2014-07-07 14:38:00 -07001538
1539 result['wmatrix_url'] = rpc_utils.get_wmatrix_url()
Simran Basi71206ef2014-08-13 13:51:18 -07001540 result['is_moblab'] = bool(utils.is_moblab())
Jiaxi Luo421608e2014-07-07 14:38:00 -07001541
jadmanski0afbb632008-06-06 21:10:57 +00001542 return result
showard29f7cd22009-04-29 21:16:24 +00001543
1544
1545def get_server_time():
1546 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")