blob: 20c53837284ff515c6af18a41a9c8ae87d63a197 [file] [log] [blame]
Aviv Keshet0b9cfc92013-02-05 11:36:02 -08001# pylint: disable-msg=C0111
2
mblighe8819cd2008-02-15 16:48:40 +00003"""\
4Functions to expose over the RPC interface.
5
6For all modify* and delete* functions that ask for an 'id' parameter to
7identify the object to operate on, the id may be either
8 * the database row ID
9 * the name of the object (label name, hostname, user login, etc.)
10 * a dictionary containing uniquely identifying field (this option should seldom
11 be used)
12
13When specifying foreign key fields (i.e. adding hosts to a label, or adding
14users to an ACL group), the given value may be either the database row ID or the
15name of the object.
16
17All get* functions return lists of dictionaries. Each dictionary represents one
18object and maps field names to values.
19
20Some examples:
21modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
22modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
23modify_test('sleeptest', test_type='Client', params=', seconds=60')
24delete_acl_group(1) # delete by ID
25delete_acl_group('Everyone') # delete by name
26acl_group_add_users('Everyone', ['mbligh', 'showard'])
27get_jobs(owner='showard', status='Queued')
28
mbligh93c80e62009-02-03 17:48:30 +000029See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000030"""
31
32__author__ = 'showard@google.com (Steve Howard)'
33
MK Ryu9c5fbbe2015-02-11 15:46:22 -080034import sys
showard29f7cd22009-04-29 21:16:24 +000035import datetime
MK Ryu9c5fbbe2015-02-11 15:46:22 -080036
Moises Osorio2dc7a102014-12-02 18:24:02 -080037from django.db.models import Count
showardcafd16e2009-05-29 18:37:49 +000038import common
Simran Basib6ec8ae2014-04-23 12:05:08 -070039from autotest_lib.client.common_lib import priorities
Gabe Black1e1c41b2015-02-04 23:55:15 -080040from autotest_lib.client.common_lib.cros.graphite import autotest_stats
showard6d7b2ff2009-06-10 00:16:47 +000041from autotest_lib.frontend.afe import control_file, rpc_utils
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070042from autotest_lib.frontend.afe import models, model_logic, model_attributes
Simran Basib6ec8ae2014-04-23 12:05:08 -070043from autotest_lib.frontend.afe import site_rpc_interface
Moises Osorio2dc7a102014-12-02 18:24:02 -080044from autotest_lib.frontend.tko import models as tko_models
Jiaxi Luoaac54572014-06-04 13:57:02 -070045from autotest_lib.frontend.tko import rpc_interface as tko_rpc_interface
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070046from autotest_lib.server import frontend
Simran Basi71206ef2014-08-13 13:51:18 -070047from autotest_lib.server import utils
Dan Shid215dbe2015-06-18 16:14:59 -070048from autotest_lib.server.cros import provision
Jiaxi Luo90190c92014-06-18 12:35:57 -070049from autotest_lib.server.cros.dynamic_suite import tools
J. Richard Barnette39255fa2015-04-14 17:23:41 -070050from autotest_lib.site_utils import status_history
mblighe8819cd2008-02-15 16:48:40 +000051
Moises Osorio2dc7a102014-12-02 18:24:02 -080052
Gabe Black1e1c41b2015-02-04 23:55:15 -080053_timer = autotest_stats.Timer('rpc_interface')
Moises Osorio2dc7a102014-12-02 18:24:02 -080054
Eric Lid23bc192011-02-09 14:38:57 -080055def get_parameterized_autoupdate_image_url(job):
56 """Get the parameterized autoupdate image url from a parameterized job."""
57 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
58 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
beeps8bb1f7d2013-08-05 01:30:09 -070059 name='image')
Eric Lid23bc192011-02-09 14:38:57 -080060 para_set = job.parameterized_job.parameterizedjobparameter_set
61 job_test_para = para_set.get(test_parameter=image_parameter)
62 return job_test_para.parameter_value
63
64
mblighe8819cd2008-02-15 16:48:40 +000065# labels
66
mblighe8819cd2008-02-15 16:48:40 +000067def modify_label(id, **data):
MK Ryu8c554cf2015-06-12 11:45:50 -070068 """Modify a label.
69
70 @param id: id or name of a label. More often a label name.
71 @param data: New data for a label.
72 """
73 label_model = models.Label.smart_get(id)
74
75 # Master forwards the RPC to shards
76 if not utils.is_shard():
77 rpc_utils.fanout_rpc(label_model.host_set.all(), 'modify_label', False,
78 id=id, **data)
79
80 label_model.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000081
82
83def delete_label(id):
MK Ryu8c554cf2015-06-12 11:45:50 -070084 """Delete a label.
85
86 @param id: id or name of a label. More often a label name.
87 """
88 label_model = models.Label.smart_get(id)
89
90 # Master forwards the RPC to shards
91 if not utils.is_shard():
92 rpc_utils.fanout_rpc(label_model.host_set.all(), 'delete_label', False,
93 id=id)
94
95 label_model.delete()
mblighe8819cd2008-02-15 16:48:40 +000096
Prashanth Balasubramanian744898f2015-01-13 05:04:16 -080097
MK Ryu9c5fbbe2015-02-11 15:46:22 -080098def add_label(name, ignore_exception_if_exists=False, **kwargs):
MK Ryucf027c62015-03-04 12:00:50 -080099 """Adds a new label of a given name.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800100
101 @param name: label name.
102 @param ignore_exception_if_exists: If True and the exception was
103 thrown due to the duplicated label name when adding a label,
104 then suppress the exception. Default is False.
105 @param kwargs: keyword args that store more info about a label
106 other than the name.
107 @return: int/long id of a new label.
108 """
109 # models.Label.add_object() throws model_logic.ValidationError
110 # when it is given a label name that already exists.
111 # However, ValidationError can be thrown with different errors,
112 # and those errors should be thrown up to the call chain.
113 try:
114 label = models.Label.add_object(name=name, **kwargs)
115 except:
116 exc_info = sys.exc_info()
117 if ignore_exception_if_exists:
118 label = rpc_utils.get_label(name)
119 # If the exception is raised not because of duplicated
120 # "name", then raise the original exception.
121 if label is None:
122 raise exc_info[0], exc_info[1], exc_info[2]
123 else:
124 raise exc_info[0], exc_info[1], exc_info[2]
125 return label.id
126
127
128def add_label_to_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800129 """Adds a label of the given id to the given hosts only in local DB.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800130
131 @param id: id or name of a label. More often a label name.
132 @param hosts: The hostnames of hosts that need the label.
133
134 @raises models.Label.DoesNotExist: If the label with id doesn't exist.
135 """
136 label = models.Label.smart_get(id)
137 host_objs = models.Host.smart_get_bulk(hosts)
138 if label.platform:
139 models.Host.check_no_platform(host_objs)
140 label.host_set.add(*host_objs)
141
142
MK Ryufbb002c2015-06-08 14:13:16 -0700143@rpc_utils.route_rpc_to_master
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800144def label_add_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800145 """Adds a label with the given id to the given hosts.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800146
147 This method should be run only on master not shards.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800148 The given label will be created if it doesn't exist, provided the `id`
149 supplied is a label name not an int/long id.
150
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800151 @param id: id or name of a label. More often a label name.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800152 @param hosts: A list of hostnames or ids. More often hostnames.
153
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800154 @raises ValueError: If the id specified is an int/long (label id)
155 while the label does not exist.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800156 """
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800157 try:
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800158 label = models.Label.smart_get(id)
159 except models.Label.DoesNotExist:
160 # This matches the type checks in smart_get, which is a hack
161 # in and off itself. The aim here is to create any non-existent
162 # label, which we cannot do if the 'id' specified isn't a label name.
163 if isinstance(id, basestring):
164 label = models.Label.smart_get(add_label(id))
165 else:
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800166 raise ValueError('Label id (%s) does not exist. Please specify '
167 'the argument, id, as a string (label name).'
168 % id)
MK Ryucf027c62015-03-04 12:00:50 -0800169
170 host_objs = models.Host.smart_get_bulk(hosts)
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800171 # Make sure the label exists on the shard with the same id
172 # as it is on the master.
MK Ryucf027c62015-03-04 12:00:50 -0800173 # It is possible that the label is already in a shard because
174 # we are adding a new label only to shards of hosts that the label
175 # is going to be attached.
176 # For example, we add a label L1 to a host in shard S1.
177 # Master and S1 will have L1 but other shards won't.
178 # Later, when we add the same label L1 to hosts in shards S1 and S2,
179 # S1 already has the label but S2 doesn't.
180 # S2 should have the new label without any problem.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800181 # We ignore exception in such a case.
182 rpc_utils.fanout_rpc(
183 host_objs, 'add_label', name=label.name, id=label.id,
184 include_hostnames=False, ignore_exception_if_exists=True)
185 rpc_utils.fanout_rpc(host_objs, 'add_label_to_hosts', id=id)
showardbbabf502008-06-06 00:02:02 +0000186
MK Ryu26f0c932015-05-28 18:14:33 -0700187 add_label_to_hosts(id, hosts)
188
showardbbabf502008-06-06 00:02:02 +0000189
MK Ryucf027c62015-03-04 12:00:50 -0800190def remove_label_from_hosts(id, hosts):
191 """Removes a label of the given id from the given hosts only in local DB.
192
193 @param id: id or name of a label.
194 @param hosts: The hostnames of hosts that need to remove the label from.
195 """
showardbe3ec042008-11-12 18:16:07 +0000196 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000197 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +0000198
199
MK Ryufbb002c2015-06-08 14:13:16 -0700200@rpc_utils.route_rpc_to_master
MK Ryucf027c62015-03-04 12:00:50 -0800201def label_remove_hosts(id, hosts):
202 """Removes a label of the given id from the given hosts.
203
204 This method should be run only on master not shards.
205
206 @param id: id or name of a label.
207 @param hosts: A list of hostnames or ids. More often hostnames.
208 """
MK Ryucf027c62015-03-04 12:00:50 -0800209 host_objs = models.Host.smart_get_bulk(hosts)
210 rpc_utils.fanout_rpc(host_objs, 'remove_label_from_hosts', id=id)
211
MK Ryu26f0c932015-05-28 18:14:33 -0700212 remove_label_from_hosts(id, hosts)
213
MK Ryucf027c62015-03-04 12:00:50 -0800214
Jiaxi Luo31874592014-06-11 10:36:35 -0700215def get_labels(exclude_filters=(), **filter_data):
showardc92da832009-04-07 18:14:34 +0000216 """\
Jiaxi Luo31874592014-06-11 10:36:35 -0700217 @param exclude_filters: A sequence of dictionaries of filters.
218
showardc92da832009-04-07 18:14:34 +0000219 @returns A sequence of nested dictionaries of label information.
220 """
Jiaxi Luo31874592014-06-11 10:36:35 -0700221 labels = models.Label.query_objects(filter_data)
222 for exclude_filter in exclude_filters:
223 labels = labels.exclude(**exclude_filter)
224 return rpc_utils.prepare_rows_as_nested_dicts(labels, ('atomic_group',))
showardc92da832009-04-07 18:14:34 +0000225
226
227# atomic groups
228
showarde9450c92009-06-30 01:58:52 +0000229def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +0000230 return models.AtomicGroup.add_object(
231 name=name, max_number_of_machines=max_number_of_machines,
232 description=description).id
233
234
235def modify_atomic_group(id, **data):
236 models.AtomicGroup.smart_get(id).update_object(data)
237
238
239def delete_atomic_group(id):
240 models.AtomicGroup.smart_get(id).delete()
241
242
243def atomic_group_add_labels(id, labels):
244 label_objs = models.Label.smart_get_bulk(labels)
245 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
246
247
248def atomic_group_remove_labels(id, labels):
249 label_objs = models.Label.smart_get_bulk(labels)
250 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
251
252
253def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000254 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000255 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000256
257
258# hosts
259
Matthew Sartori68186332015-04-27 17:19:53 -0700260def add_host(hostname, status=None, locked=None, lock_reason='', protection=None):
261 if locked and not lock_reason:
262 raise model_logic.ValidationError(
263 {'locked': 'Please provide a reason for locking when adding host.'})
264
jadmanski0afbb632008-06-06 21:10:57 +0000265 return models.Host.add_object(hostname=hostname, status=status,
Matthew Sartori68186332015-04-27 17:19:53 -0700266 locked=locked, lock_reason=lock_reason,
267 protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000268
269
Jakob Juelich50e91f72014-10-01 12:43:23 -0700270@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000271def modify_host(id, **data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700272 """Modify local attributes of a host.
273
274 If this is called on the master, but the host is assigned to a shard, this
275 will also forward the call to the responsible shard. This means i.e. if a
276 host is being locked using this function, this change will also propagate to
277 shards.
278
279 @param id: id of the host to modify.
280 @param **data: key=value pairs of values to set on the host.
281 """
showardbe0d8692009-08-20 23:42:44 +0000282 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000283 host = models.Host.smart_get(id)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700284
showardce7c0922009-09-11 18:39:24 +0000285 rpc_utils.check_modify_host_locking(host, data)
286 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000287
288
showard276f9442009-05-20 00:33:16 +0000289def modify_hosts(host_filter_data, update_data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700290 """Modify local attributes of multiple hosts.
291
292 If this is called on the master, but one of the hosts in that match the
293 filters is assigned to a shard, this will also forward the call to the
294 responsible shard.
295
296 The filters are always applied on the master, not on the shards. This means
297 if the states of a host differ on the master and a shard, the state on the
298 master will be used. I.e. this means:
299 A host was synced to Shard 1. On Shard 1 the status of the host was set to
300 'Repair Failed'.
301 - A call to modify_hosts with host_filter_data={'status': 'Ready'} will
302 update the host (both on the shard and on the master), because the state
303 of the host as the master knows it is still 'Ready'.
304 - A call to modify_hosts with host_filter_data={'status': 'Repair failed'
305 will not update the host, because the filter doesn't apply on the master.
306
showardbe0d8692009-08-20 23:42:44 +0000307 @param host_filter_data: Filters out which hosts to modify.
308 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000309 """
showardbe0d8692009-08-20 23:42:44 +0000310 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000311 hosts = models.Host.query_objects(host_filter_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700312
313 affected_shard_hostnames = set()
314 affected_host_ids = []
315
Alex Miller9658a952013-05-14 16:40:02 -0700316 # Check all hosts before changing data for exception safety.
317 for host in hosts:
318 rpc_utils.check_modify_host_locking(host, update_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700319 if host.shard:
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800320 affected_shard_hostnames.add(host.shard.rpc_hostname())
Jakob Juelich50e91f72014-10-01 12:43:23 -0700321 affected_host_ids.append(host.id)
322
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800323 if not utils.is_shard():
Jakob Juelich50e91f72014-10-01 12:43:23 -0700324 # Caution: Changing the filter from the original here. See docstring.
325 rpc_utils.run_rpc_on_multiple_hostnames(
326 'modify_hosts', affected_shard_hostnames,
327 host_filter_data={'id__in': affected_host_ids},
328 update_data=update_data)
329
showard276f9442009-05-20 00:33:16 +0000330 for host in hosts:
331 host.update_object(update_data)
332
333
MK Ryufbb002c2015-06-08 14:13:16 -0700334def add_labels_to_host(id, labels):
335 """Adds labels to a given host only in local DB.
showardcafd16e2009-05-29 18:37:49 +0000336
MK Ryufbb002c2015-06-08 14:13:16 -0700337 @param id: id or hostname for a host.
338 @param labels: ids or names for labels.
339 """
340 label_objs = models.Label.smart_get_bulk(labels)
341 models.Host.smart_get(id).labels.add(*label_objs)
342
343
344@rpc_utils.route_rpc_to_master
345def host_add_labels(id, labels):
346 """Adds labels to a given host.
347
348 @param id: id or hostname for a host.
349 @param labels: ids or names for labels.
350
351 @raises ValidationError: If adding more than one platform label.
352 """
353 label_objs = models.Label.smart_get_bulk(labels)
354 platforms = [label.name for label in label_objs if label.platform]
showardcafd16e2009-05-29 18:37:49 +0000355 if len(platforms) > 1:
356 raise model_logic.ValidationError(
357 {'labels': 'Adding more than one platform label: %s' %
358 ', '.join(platforms)})
MK Ryufbb002c2015-06-08 14:13:16 -0700359
360 host_obj = models.Host.smart_get(id)
showardcafd16e2009-05-29 18:37:49 +0000361 if len(platforms) == 1:
MK Ryufbb002c2015-06-08 14:13:16 -0700362 models.Host.check_no_platform([host_obj])
363
364 rpc_utils.fanout_rpc([host_obj], 'add_labels_to_host', False,
365 id=id, labels=labels)
366 add_labels_to_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000367
368
MK Ryufbb002c2015-06-08 14:13:16 -0700369def remove_labels_from_host(id, labels):
370 """Removes labels from a given host only in local DB.
371
372 @param id: id or hostname for a host.
373 @param labels: ids or names for labels.
374 """
375 label_objs = models.Label.smart_get_bulk(labels)
376 models.Host.smart_get(id).labels.remove(*label_objs)
377
378
379@rpc_utils.route_rpc_to_master
mblighe8819cd2008-02-15 16:48:40 +0000380def host_remove_labels(id, labels):
MK Ryufbb002c2015-06-08 14:13:16 -0700381 """Removes labels from a given host.
382
383 @param id: id or hostname for a host.
384 @param labels: ids or names for labels.
385 """
386 host_obj = models.Host.smart_get(id)
387 rpc_utils.fanout_rpc([host_obj], 'remove_labels_from_host', False,
388 id=id, labels=labels)
389 remove_labels_from_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000390
391
MK Ryuacf35922014-10-03 14:56:49 -0700392def get_host_attribute(attribute, **host_filter_data):
393 """
394 @param attribute: string name of attribute
395 @param host_filter_data: filter data to apply to Hosts to choose hosts to
396 act upon
397 """
398 hosts = rpc_utils.get_host_query((), False, False, True, host_filter_data)
399 hosts = list(hosts)
400 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
401 'attribute_list')
402 host_attr_dicts = []
403 for host_obj in hosts:
404 for attr_obj in host_obj.attribute_list:
405 if attr_obj.attribute == attribute:
406 host_attr_dicts.append(attr_obj.get_object_dict())
407 return rpc_utils.prepare_for_serialization(host_attr_dicts)
408
409
showard0957a842009-05-11 19:25:08 +0000410def set_host_attribute(attribute, value, **host_filter_data):
411 """
MK Ryu26f0c932015-05-28 18:14:33 -0700412 @param attribute: string name of attribute
413 @param value: string, or None to delete an attribute
414 @param host_filter_data: filter data to apply to Hosts to choose hosts to
415 act upon
showard0957a842009-05-11 19:25:08 +0000416 """
417 assert host_filter_data # disallow accidental actions on all hosts
418 hosts = models.Host.query_objects(host_filter_data)
419 models.AclGroup.check_for_acl_violation_hosts(hosts)
420
MK Ryu26f0c932015-05-28 18:14:33 -0700421 # Master forwards this RPC to shards.
422 if not utils.is_shard():
423 rpc_utils.fanout_rpc(hosts, 'set_host_attribute', False,
424 attribute=attribute, value=value, **host_filter_data)
425
showard0957a842009-05-11 19:25:08 +0000426 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000427 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000428
429
Jakob Juelich50e91f72014-10-01 12:43:23 -0700430@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000431def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000432 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000433
434
showard87cc38f2009-08-20 23:37:04 +0000435def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000436 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000437 """
438 @param multiple_labels: match hosts in all of the labels given. Should
439 be a list of label names.
440 @param exclude_only_if_needed_labels: Exclude hosts with at least one
441 "only_if_needed" label applied.
442 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
443 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000444 """
showard43a3d262008-11-12 18:17:05 +0000445 hosts = rpc_utils.get_host_query(multiple_labels,
446 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000447 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000448 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000449 hosts = list(hosts)
450 models.Host.objects.populate_relationships(hosts, models.Label,
451 'label_list')
452 models.Host.objects.populate_relationships(hosts, models.AclGroup,
453 'acl_list')
454 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
455 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000456 host_dicts = []
457 for host_obj in hosts:
458 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000459 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000460 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
461 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000462 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
463 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
464 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000465 host_dicts.append(host_dict)
466 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000467
468
showard87cc38f2009-08-20 23:37:04 +0000469def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000470 exclude_atomic_group_hosts=False, valid_only=True,
471 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000472 """
473 Same parameters as get_hosts().
474
475 @returns The number of matching hosts.
476 """
showard43a3d262008-11-12 18:17:05 +0000477 hosts = rpc_utils.get_host_query(multiple_labels,
478 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000479 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000480 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000481 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000482
mblighe8819cd2008-02-15 16:48:40 +0000483
484# tests
485
showard909c7a62008-07-15 21:52:38 +0000486def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000487 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000488 test_time=None, test_category=None, description=None,
489 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000490 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000491 author=author, dependencies=dependencies,
492 experimental=experimental,
493 run_verify=run_verify, test_time=test_time,
494 test_category=test_category,
495 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000496 test_class=test_class,
497 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000498
499
500def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000501 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000502
503
504def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000505 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000506
507
508def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000509 return rpc_utils.prepare_for_serialization(
510 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000511
512
Moises Osorio2dc7a102014-12-02 18:24:02 -0800513@_timer.decorate
514def get_tests_status_counts_by_job_name_label(job_name_prefix, label_name):
515 """Gets the counts of all passed and failed tests from the matching jobs.
516
517 @param job_name_prefix: Name prefix of the jobs to get the summary from, e.g.,
518 'butterfly-release/R40-6457.21.0/bvt-cq/'.
519 @param label_name: Label that must be set in the jobs, e.g.,
520 'cros-version:butterfly-release/R40-6457.21.0'.
521
522 @returns A summary of the counts of all the passed and failed tests.
523 """
524 job_ids = list(models.Job.objects.filter(
525 name__startswith=job_name_prefix,
526 dependency_labels__name=label_name).values_list(
527 'pk', flat=True))
528 summary = {'passed': 0, 'failed': 0}
529 if not job_ids:
530 return summary
531
532 counts = (tko_models.TestView.objects.filter(
533 afe_job_id__in=job_ids).exclude(
534 test_name='SERVER_JOB').exclude(
535 test_name__startswith='CLIENT_JOB').values(
536 'status').annotate(
537 count=Count('status')))
538 for status in counts:
539 if status['status'] == 'GOOD':
540 summary['passed'] += status['count']
541 else:
542 summary['failed'] += status['count']
543 return summary
544
545
showard2b9a88b2008-06-13 20:55:03 +0000546# profilers
547
548def add_profiler(name, description=None):
549 return models.Profiler.add_object(name=name, description=description).id
550
551
552def modify_profiler(id, **data):
553 models.Profiler.smart_get(id).update_object(data)
554
555
556def delete_profiler(id):
557 models.Profiler.smart_get(id).delete()
558
559
560def get_profilers(**filter_data):
561 return rpc_utils.prepare_for_serialization(
562 models.Profiler.list_objects(filter_data))
563
564
mblighe8819cd2008-02-15 16:48:40 +0000565# users
566
567def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000568 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000569
570
571def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000572 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000573
574
575def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000576 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000577
578
579def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000580 return rpc_utils.prepare_for_serialization(
581 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000582
583
584# acl groups
585
586def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000587 group = models.AclGroup.add_object(name=name, description=description)
showard64a95952010-01-13 21:27:16 +0000588 group.users.add(models.User.current_user())
showard04f2cd82008-07-25 20:53:31 +0000589 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000590
591
592def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000593 group = models.AclGroup.smart_get(id)
594 group.check_for_acl_violation_acl_group()
595 group.update_object(data)
596 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000597
598
599def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000600 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000601 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000602 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000603 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000604
605
606def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000607 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000608 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000609 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000610 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000611 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000612
613
614def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000615 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000616 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000617 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000618 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000619 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000620
621
622def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000623 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000624 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000625 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000626 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000627 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000628
629
630def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000631 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000632
633
634def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000635 acl_groups = models.AclGroup.list_objects(filter_data)
636 for acl_group in acl_groups:
637 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
638 acl_group['users'] = [user.login
639 for user in acl_group_obj.users.all()]
640 acl_group['hosts'] = [host.hostname
641 for host in acl_group_obj.hosts.all()]
642 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000643
644
645# jobs
646
mbligh120351e2009-01-24 01:40:45 +0000647def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000648 client_control_file='', use_container=False,
Matthew Sartori10438092015-06-24 14:30:18 -0700649 profile_only=None, upload_kernel_config=False,
650 db_tests=True):
jadmanski0afbb632008-06-06 21:10:57 +0000651 """
mbligh120351e2009-01-24 01:40:45 +0000652 Generates a client-side control file to load a kernel and run tests.
653
Matthew Sartori10438092015-06-24 14:30:18 -0700654 @param tests List of tests to run. See db_tests for more information.
mbligha3c58d22009-08-24 22:01:51 +0000655 @param kernel A list of kernel info dictionaries configuring which kernels
656 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000657 @param label Name of label to grab kernel config from.
658 @param profilers List of profilers to activate during the job.
659 @param client_control_file The contents of a client-side control file to
660 run at the end of all tests. If this is supplied, all tests must be
661 client side.
662 TODO: in the future we should support server control files directly
663 to wrap with a kernel. That'll require changing the parameter
664 name and adding a boolean to indicate if it is a client or server
665 control file.
666 @param use_container unused argument today. TODO: Enable containers
667 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000668 @param profile_only A boolean that indicates what default profile_only
669 mode to use in the control file. Passing None will generate a
670 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000671 @param upload_kernel_config: if enabled it will generate server control
672 file code that uploads the kernel config file to the client and
673 tells the client of the new (local) path when compiling the kernel;
674 the tests must be server side tests
Matthew Sartori10438092015-06-24 14:30:18 -0700675 @param db_tests: if True, the test object can be found in the database
676 backing the test model. In this case, tests is a tuple
677 of test IDs which are used to retrieve the test objects
678 from the database. If False, tests is a tuple of test
679 dictionaries stored client-side in the AFE.
mbligh120351e2009-01-24 01:40:45 +0000680
681 @returns a dict with the following keys:
682 control_file: str, The control file text.
683 is_server: bool, is the control file a server-side control file?
684 synch_count: How many machines the job uses per autoserv execution.
685 synch_count == 1 means the job is asynchronous.
686 dependencies: A list of the names of labels on which the job depends.
687 """
showardd86debe2009-06-10 17:37:56 +0000688 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000689 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000690 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000691
showard989f25d2008-10-01 11:38:11 +0000692 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000693 rpc_utils.prepare_generate_control_file(tests, kernel, label,
Matthew Sartori10438092015-06-24 14:30:18 -0700694 profilers, db_tests))
showard989f25d2008-10-01 11:38:11 +0000695 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000696 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000697 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000698 client_control_file=client_control_file, profile_only=profile_only,
699 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000700 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000701
702
jamesren4a41e012010-07-16 22:33:48 +0000703def create_parameterized_job(name, priority, test, parameters, kernel=None,
704 label=None, profilers=(), profiler_parameters=None,
705 use_container=False, profile_only=None,
706 upload_kernel_config=False, hosts=(),
707 meta_hosts=(), one_time_hosts=(),
708 atomic_group_name=None, synch_count=None,
709 is_template=False, timeout=None,
Simran Basi7e605742013-11-12 13:43:36 -0800710 timeout_mins=None, max_runtime_mins=None,
711 run_verify=False, email_list='', dependencies=(),
712 reboot_before=None, reboot_after=None,
713 parse_failed_repair=None, hostless=False,
Dan Shiec1d47d2015-02-13 11:38:13 -0800714 keyvals=None, drone_set=None, run_reset=True,
715 require_ssq=None):
jamesren4a41e012010-07-16 22:33:48 +0000716 """
717 Creates and enqueues a parameterized job.
718
719 Most parameters a combination of the parameters for generate_control_file()
720 and create_job(), with the exception of:
721
722 @param test name or ID of the test to run
723 @param parameters a map of parameter name ->
724 tuple of (param value, param type)
725 @param profiler_parameters a dictionary of parameters for the profilers:
726 key: profiler name
727 value: dict of param name -> tuple of
728 (param value,
729 param type)
730 """
731 # Save the values of the passed arguments here. What we're going to do with
732 # them is pass them all to rpc_utils.get_create_job_common_args(), which
733 # will extract the subset of these arguments that apply for
734 # rpc_utils.create_job_common(), which we then pass in to that function.
735 args = locals()
736
737 # Set up the parameterized job configs
738 test_obj = models.Test.smart_get(test)
Aviv Keshet3dd8beb2013-05-13 17:36:04 -0700739 control_type = test_obj.test_type
jamesren4a41e012010-07-16 22:33:48 +0000740
741 try:
742 label = models.Label.smart_get(label)
743 except models.Label.DoesNotExist:
744 label = None
745
746 kernel_objs = models.Kernel.create_kernels(kernel)
747 profiler_objs = [models.Profiler.smart_get(profiler)
748 for profiler in profilers]
749
750 parameterized_job = models.ParameterizedJob.objects.create(
751 test=test_obj, label=label, use_container=use_container,
752 profile_only=profile_only,
753 upload_kernel_config=upload_kernel_config)
754 parameterized_job.kernels.add(*kernel_objs)
755
756 for profiler in profiler_objs:
757 parameterized_profiler = models.ParameterizedJobProfiler.objects.create(
758 parameterized_job=parameterized_job,
759 profiler=profiler)
760 profiler_params = profiler_parameters.get(profiler.name, {})
761 for name, (value, param_type) in profiler_params.iteritems():
762 models.ParameterizedJobProfilerParameter.objects.create(
763 parameterized_job_profiler=parameterized_profiler,
764 parameter_name=name,
765 parameter_value=value,
766 parameter_type=param_type)
767
768 try:
769 for parameter in test_obj.testparameter_set.all():
770 if parameter.name in parameters:
771 param_value, param_type = parameters.pop(parameter.name)
772 parameterized_job.parameterizedjobparameter_set.create(
773 test_parameter=parameter, parameter_value=param_value,
774 parameter_type=param_type)
775
776 if parameters:
777 raise Exception('Extra parameters remain: %r' % parameters)
778
779 return rpc_utils.create_job_common(
780 parameterized_job=parameterized_job.id,
781 control_type=control_type,
782 **rpc_utils.get_create_job_common_args(args))
783 except:
784 parameterized_job.delete()
785 raise
786
787
Simran Basib6ec8ae2014-04-23 12:05:08 -0700788def create_job_page_handler(name, priority, control_file, control_type,
Dan Shid215dbe2015-06-18 16:14:59 -0700789 image=None, hostless=False, firmware_rw_build=None,
790 firmware_ro_build=None, test_source_build=None,
791 **kwargs):
Simran Basib6ec8ae2014-04-23 12:05:08 -0700792 """\
793 Create and enqueue a job.
794
795 @param name name of this job
796 @param priority Integer priority of this job. Higher is more important.
797 @param control_file String contents of the control file.
798 @param control_type Type of control file, Client or Server.
Dan Shid215dbe2015-06-18 16:14:59 -0700799 @param image: ChromeOS build to be installed in the dut. Default to None.
800 @param firmware_rw_build: Firmware build to update RW firmware. Default to
801 None, i.e., RW firmware will not be updated.
802 @param firmware_ro_build: Firmware build to update RO firmware. Default to
803 None, i.e., RO firmware will not be updated.
804 @param test_source_build: Build to be used to retrieve test code. Default
805 to None.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700806 @param kwargs extra args that will be required by create_suite_job or
807 create_job.
808
809 @returns The created Job id number.
810 """
811 control_file = rpc_utils.encode_ascii(control_file)
Jiaxi Luodd67beb2014-07-18 16:28:31 -0700812 if not control_file:
813 raise model_logic.ValidationError({
814 'control_file' : "Control file cannot be empty"})
Simran Basib6ec8ae2014-04-23 12:05:08 -0700815
816 if image and hostless:
Dan Shid215dbe2015-06-18 16:14:59 -0700817 builds = {}
818 builds[provision.CROS_VERSION_PREFIX] = image
819 if firmware_rw_build:
Dan Shi0723bf52015-06-24 10:52:38 -0700820 builds[provision.FW_RW_VERSION_PREFIX] = firmware_rw_build
Dan Shid215dbe2015-06-18 16:14:59 -0700821 if firmware_ro_build:
822 builds[provision.FW_RO_VERSION_PREFIX] = firmware_ro_build
Simran Basib6ec8ae2014-04-23 12:05:08 -0700823 return site_rpc_interface.create_suite_job(
824 name=name, control_file=control_file, priority=priority,
Dan Shid215dbe2015-06-18 16:14:59 -0700825 builds=builds, test_source_build=test_source_build, **kwargs)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700826 return create_job(name, priority, control_file, control_type, image=image,
827 hostless=hostless, **kwargs)
828
829
MK Ryue301eb72015-06-25 12:51:02 -0700830@rpc_utils.route_rpc_to_master
showard12f3e322009-05-13 21:27:42 +0000831def create_job(name, priority, control_file, control_type,
832 hosts=(), meta_hosts=(), one_time_hosts=(),
833 atomic_group_name=None, synch_count=None, is_template=False,
Simran Basi7e605742013-11-12 13:43:36 -0800834 timeout=None, timeout_mins=None, max_runtime_mins=None,
835 run_verify=False, email_list='', dependencies=(),
836 reboot_before=None, reboot_after=None, parse_failed_repair=None,
837 hostless=False, keyvals=None, drone_set=None, image=None,
Dan Shiec1d47d2015-02-13 11:38:13 -0800838 parent_job_id=None, test_retry=0, run_reset=True,
839 require_ssp=None, args=(), **kwargs):
jadmanski0afbb632008-06-06 21:10:57 +0000840 """\
841 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000842
showarda1e74b32009-05-12 17:32:04 +0000843 @param name name of this job
Alex Miller7d658cf2013-09-04 16:00:35 -0700844 @param priority Integer priority of this job. Higher is more important.
showarda1e74b32009-05-12 17:32:04 +0000845 @param control_file String contents of the control file.
846 @param control_type Type of control file, Client or Server.
847 @param synch_count How many machines the job uses per autoserv execution.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700848 synch_count == 1 means the job is asynchronous. If an atomic group is
849 given this value is treated as a minimum.
showarda1e74b32009-05-12 17:32:04 +0000850 @param is_template If true then create a template job.
851 @param timeout Hours after this call returns until the job times out.
Simran Basi7e605742013-11-12 13:43:36 -0800852 @param timeout_mins Minutes after this call returns until the job times
Jiaxi Luo90190c92014-06-18 12:35:57 -0700853 out.
Simran Basi34217022012-11-06 13:43:15 -0800854 @param max_runtime_mins Minutes from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000855 @param run_verify Should the host be verified before running the test?
856 @param email_list String containing emails to mail when the job is done
857 @param dependencies List of label names on which this job depends
858 @param reboot_before Never, If dirty, or Always
859 @param reboot_after Never, If all tests passed, or Always
860 @param parse_failed_repair if true, results of failed repairs launched by
Jiaxi Luo90190c92014-06-18 12:35:57 -0700861 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000862 @param hostless if true, create a hostless job
showardc1a98d12010-01-15 00:22:22 +0000863 @param keyvals dict of keyvals to associate with the job
showarda1e74b32009-05-12 17:32:04 +0000864 @param hosts List of hosts to run job on.
865 @param meta_hosts List where each entry is a label name, and for each entry
Jiaxi Luo90190c92014-06-18 12:35:57 -0700866 one host will be chosen from that label to run the job on.
showarda1e74b32009-05-12 17:32:04 +0000867 @param one_time_hosts List of hosts not in the database to run the job on.
868 @param atomic_group_name The name of an atomic group to schedule the job on.
jamesren76fcf192010-04-21 20:39:50 +0000869 @param drone_set The name of the drone set to run this test on.
Paul Pendlebury5a8c6ad2011-02-01 07:20:17 -0800870 @param image OS image to install before running job.
Aviv Keshet0b9cfc92013-02-05 11:36:02 -0800871 @param parent_job_id id of a job considered to be parent of created job.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700872 @param test_retry Number of times to retry test if the test did not
Jiaxi Luo90190c92014-06-18 12:35:57 -0700873 complete successfully. (optional, default: 0)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700874 @param run_reset Should the host be reset before running the test?
Dan Shiec1d47d2015-02-13 11:38:13 -0800875 @param require_ssp Set to True to require server-side packaging to run the
876 test. If it's set to None, drone will still try to run
877 the server side with server-side packaging. If the
878 autotest-server package doesn't exist for the build or
879 image is not set, drone will run the test without server-
880 side packaging. Default is None.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700881 @param args A list of args to be injected into control file.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700882 @param kwargs extra keyword args. NOT USED.
showardc92da832009-04-07 18:14:34 +0000883
884 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000885 """
Jiaxi Luo90190c92014-06-18 12:35:57 -0700886 if args:
887 control_file = tools.inject_vars({'args': args}, control_file)
888
Simran Basiab5a1bf2014-05-28 15:39:44 -0700889 if image is None:
890 return rpc_utils.create_job_common(
891 **rpc_utils.get_create_job_common_args(locals()))
892
893 # When image is supplied use a known parameterized test already in the
894 # database to pass the OS image path from the front end, through the
895 # scheduler, and finally to autoserv as the --image parameter.
896
897 # The test autoupdate_ParameterizedJob is in afe_autotests and used to
898 # instantiate a Test object and from there a ParameterizedJob.
899 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
900 known_parameterized_job = models.ParameterizedJob.objects.create(
901 test=known_test_obj)
902
903 # autoupdate_ParameterizedJob has a single parameter, the image parameter,
904 # stored in the table afe_test_parameters. We retrieve and set this
905 # instance of the parameter to the OS image path.
906 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
907 name='image')
908 known_parameterized_job.parameterizedjobparameter_set.create(
909 test_parameter=image_parameter, parameter_value=image,
910 parameter_type='string')
911
Dan Shid215dbe2015-06-18 16:14:59 -0700912 # TODO(crbug.com/502638): save firmware build etc to parameterized_job.
913
Simran Basiab5a1bf2014-05-28 15:39:44 -0700914 # By passing a parameterized_job to create_job_common the job entry in
915 # the afe_jobs table will have the field parameterized_job_id set.
916 # The scheduler uses this id in the afe_parameterized_jobs table to
917 # match this job to our known test, and then with the
918 # afe_parameterized_job_parameters table to get the actual image path.
jamesren4a41e012010-07-16 22:33:48 +0000919 return rpc_utils.create_job_common(
Simran Basiab5a1bf2014-05-28 15:39:44 -0700920 parameterized_job=known_parameterized_job.id,
jamesren4a41e012010-07-16 22:33:48 +0000921 **rpc_utils.get_create_job_common_args(locals()))
mblighe8819cd2008-02-15 16:48:40 +0000922
923
showard9dbdcda2008-10-14 17:34:36 +0000924def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000925 """\
showard9dbdcda2008-10-14 17:34:36 +0000926 Abort a set of host queue entries.
Fang Deng63b0e452014-12-19 14:38:15 -0800927
928 @return: A list of dictionaries, each contains information
929 about an aborted HQE.
jadmanski0afbb632008-06-06 21:10:57 +0000930 """
showard9dbdcda2008-10-14 17:34:36 +0000931 query = models.HostQueueEntry.query_objects(filter_data)
beepsfaecbce2013-10-29 11:35:10 -0700932
933 # Dont allow aborts on:
934 # 1. Jobs that have already completed (whether or not they were aborted)
935 # 2. Jobs that we have already been aborted (but may not have completed)
936 query = query.filter(complete=False).filter(aborted=False)
showarddc817512008-11-12 18:16:41 +0000937 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000938 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000939 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000940
Simran Basic1b26762013-06-26 14:23:21 -0700941 models.HostQueueEntry.abort_host_queue_entries(host_queue_entries)
Fang Deng63b0e452014-12-19 14:38:15 -0800942 hqe_info = [{'HostQueueEntry': hqe.id, 'Job': hqe.job_id,
943 'Job name': hqe.job.name} for hqe in host_queue_entries]
944 return hqe_info
showard9d821ab2008-07-11 16:54:29 +0000945
946
beeps8bb1f7d2013-08-05 01:30:09 -0700947def abort_special_tasks(**filter_data):
948 """\
949 Abort the special task, or tasks, specified in the filter.
950 """
951 query = models.SpecialTask.query_objects(filter_data)
952 special_tasks = query.filter(is_active=True)
953 for task in special_tasks:
954 task.abort()
955
956
Simran Basi73dae552013-02-25 14:57:46 -0800957def _call_special_tasks_on_hosts(task, hosts):
958 """\
959 Schedules a set of hosts for a special task.
960
961 @returns A list of hostnames that a special task was created for.
962 """
963 models.AclGroup.check_for_acl_violation_hosts(hosts)
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800964 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts)
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800965 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800966 raise ValueError('The following hosts are on shards, please '
967 'follow the link to the shards and create jobs '
968 'there instead. %s.' % shard_host_map)
Simran Basi73dae552013-02-25 14:57:46 -0800969 for host in hosts:
970 models.SpecialTask.schedule_special_task(host, task)
971 return list(sorted(host.hostname for host in hosts))
972
973
showard1ff7b2e2009-05-15 23:17:18 +0000974def reverify_hosts(**filter_data):
975 """\
976 Schedules a set of hosts for verify.
mbligh4e545a52009-12-19 05:30:39 +0000977
978 @returns A list of hostnames that a verify task was created for.
showard1ff7b2e2009-05-15 23:17:18 +0000979 """
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800980 hosts = models.Host.query_objects(filter_data)
981 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts, rpc_hostnames=True)
982
983 # Filter out hosts on a shard from those on the master, forward
984 # rpcs to the shard with an additional hostname__in filter, and
985 # create a local SpecialTask for each remaining host.
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800986 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800987 hosts = [h for h in hosts if h.shard is None]
988 for shard, hostnames in shard_host_map.iteritems():
989
990 # The main client of this module is the frontend website, and
991 # it invokes it with an 'id' or an 'id__in' filter. Regardless,
992 # the 'hostname' filter should narrow down the list of hosts on
993 # each shard even though we supply all the ids in filter_data.
994 # This method uses hostname instead of id because it fits better
995 # with the overall architecture of redirection functions in rpc_utils.
996 shard_filter = filter_data.copy()
997 shard_filter['hostname__in'] = hostnames
998 rpc_utils.run_rpc_on_multiple_hostnames(
999 'reverify_hosts', [shard], **shard_filter)
1000
1001 # There is a race condition here if someone assigns a shard to one of these
1002 # hosts before we create the task. The host will stay on the master if:
1003 # 1. The host is not Ready
1004 # 2. The host is Ready but has a task
1005 # But if the host is Ready and doesn't have a task yet, it will get sent
1006 # to the shard as we're creating a task here.
1007
1008 # Given that we only rarely verify Ready hosts it isn't worth putting this
1009 # entire method in a transaction. The worst case scenario is that we have
1010 # a verify running on a Ready host while the shard is using it, if the verify
1011 # fails no subsequent tasks will be created against the host on the master,
1012 # and verifies are safe enough that this is OK.
1013 return _call_special_tasks_on_hosts(models.SpecialTask.Task.VERIFY, hosts)
Simran Basi73dae552013-02-25 14:57:46 -08001014
1015
1016def repair_hosts(**filter_data):
1017 """\
1018 Schedules a set of hosts for repair.
1019
1020 @returns A list of hostnames that a repair task was created for.
1021 """
1022 return _call_special_tasks_on_hosts(models.SpecialTask.Task.REPAIR,
1023 models.Host.query_objects(filter_data))
showard1ff7b2e2009-05-15 23:17:18 +00001024
1025
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001026def get_jobs(not_yet_run=False, running=False, finished=False,
1027 suite=False, sub=False, standalone=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001028 """\
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001029 Extra status filter args for get_jobs:
jadmanski0afbb632008-06-06 21:10:57 +00001030 -not_yet_run: Include only jobs that have not yet started running.
1031 -running: Include only jobs that have start running but for which not
1032 all hosts have completed.
1033 -finished: Include only jobs for which all hosts have completed (or
1034 aborted).
1035 At most one of these three fields should be specified.
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001036
1037 Extra type filter args for get_jobs:
1038 -suite: Include only jobs with child jobs.
1039 -sub: Include only jobs with a parent job.
1040 -standalone: Inlcude only jobs with no child or parent jobs.
1041 At most one of these three fields should be specified.
jadmanski0afbb632008-06-06 21:10:57 +00001042 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001043 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1044 running,
1045 finished)
1046 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1047 suite,
1048 sub,
1049 standalone)
showard0957a842009-05-11 19:25:08 +00001050 job_dicts = []
1051 jobs = list(models.Job.query_objects(filter_data))
1052 models.Job.objects.populate_relationships(jobs, models.Label,
1053 'dependencies')
showardc1a98d12010-01-15 00:22:22 +00001054 models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals')
showard0957a842009-05-11 19:25:08 +00001055 for job in jobs:
1056 job_dict = job.get_object_dict()
1057 job_dict['dependencies'] = ','.join(label.name
1058 for label in job.dependencies)
showardc1a98d12010-01-15 00:22:22 +00001059 job_dict['keyvals'] = dict((keyval.key, keyval.value)
1060 for keyval in job.keyvals)
Eric Lid23bc192011-02-09 14:38:57 -08001061 if job.parameterized_job:
1062 job_dict['image'] = get_parameterized_autoupdate_image_url(job)
showard0957a842009-05-11 19:25:08 +00001063 job_dicts.append(job_dict)
1064 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +00001065
1066
1067def get_num_jobs(not_yet_run=False, running=False, finished=False,
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001068 suite=False, sub=False, standalone=False,
jadmanski0afbb632008-06-06 21:10:57 +00001069 **filter_data):
1070 """\
1071 See get_jobs() for documentation of extra filter parameters.
1072 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001073 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1074 running,
1075 finished)
1076 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1077 suite,
1078 sub,
1079 standalone)
jadmanski0afbb632008-06-06 21:10:57 +00001080 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +00001081
1082
mblighe8819cd2008-02-15 16:48:40 +00001083def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001084 """\
Jiaxi Luoaac54572014-06-04 13:57:02 -07001085 Like get_jobs(), but adds 'status_counts' and 'result_counts' field.
1086
1087 'status_counts' filed is a dictionary mapping status strings to the number
1088 of hosts currently with that status, i.e. {'Queued' : 4, 'Running' : 2}.
1089
1090 'result_counts' field is piped to tko's rpc_interface and has the return
1091 format specified under get_group_counts.
jadmanski0afbb632008-06-06 21:10:57 +00001092 """
1093 jobs = get_jobs(**filter_data)
1094 ids = [job['id'] for job in jobs]
1095 all_status_counts = models.Job.objects.get_status_counts(ids)
1096 for job in jobs:
1097 job['status_counts'] = all_status_counts[job['id']]
Jiaxi Luoaac54572014-06-04 13:57:02 -07001098 job['result_counts'] = tko_rpc_interface.get_status_counts(
1099 ['afe_job_id', 'afe_job_id'],
1100 header_groups=[['afe_job_id'], ['afe_job_id']],
1101 **{'afe_job_id': job['id']})
jadmanski0afbb632008-06-06 21:10:57 +00001102 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +00001103
1104
showarda965cef2009-05-15 23:17:41 +00001105def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +00001106 """\
1107 Retrieves all the information needed to clone a job.
1108 """
showarda8709c52008-07-03 19:44:54 +00001109 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +00001110 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +00001111 preserve_metahosts,
1112 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +00001113
showardd9992fe2008-07-31 02:15:03 +00001114 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +00001115 for host in job_info['hosts']:
1116 host_dict = get_hosts(id=host.id)[0]
1117 other_labels = host_dict['labels']
1118 if host_dict['platform']:
1119 other_labels.remove(host_dict['platform'])
1120 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +00001121 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001122
showard29f7cd22009-04-29 21:16:24 +00001123 for host in job_info['one_time_hosts']:
1124 host_dict = dict(hostname=host.hostname,
1125 id=host.id,
1126 platform='(one-time host)',
1127 locked_text='')
1128 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001129
showard4d077562009-05-08 18:24:36 +00001130 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +00001131 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +00001132 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +00001133
1134 info = dict(job=job.get_object_dict(),
1135 meta_host_counts=meta_host_counts,
1136 hosts=host_dicts)
1137 info['job']['dependencies'] = job_info['dependencies']
1138 if job_info['atomic_group']:
1139 info['atomic_group_name'] = (job_info['atomic_group']).name
1140 else:
1141 info['atomic_group_name'] = None
jamesren2275ef12010-04-12 18:25:06 +00001142 info['hostless'] = job_info['hostless']
jamesren76fcf192010-04-21 20:39:50 +00001143 info['drone_set'] = job.drone_set and job.drone_set.name
showarda8709c52008-07-03 19:44:54 +00001144
Eric Lid23bc192011-02-09 14:38:57 -08001145 if job.parameterized_job:
1146 info['job']['image'] = get_parameterized_autoupdate_image_url(job)
1147
showarda8709c52008-07-03 19:44:54 +00001148 return rpc_utils.prepare_for_serialization(info)
1149
1150
showard34dc5fa2008-04-24 20:58:40 +00001151# host queue entries
1152
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001153def get_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001154 """\
showardc92da832009-04-07 18:14:34 +00001155 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +00001156 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001157 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1158 'started_on__lte',
1159 start_time,
1160 end_time,
1161 **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001162 return rpc_utils.prepare_rows_as_nested_dicts(
1163 models.HostQueueEntry.query_objects(filter_data),
1164 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +00001165
1166
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001167def get_num_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001168 """\
1169 Get the number of host queue entries associated with this job.
1170 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001171 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1172 'started_on__lte',
1173 start_time,
1174 end_time,
1175 **filter_data)
jadmanski0afbb632008-06-06 21:10:57 +00001176 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +00001177
1178
showard1e935f12008-07-11 00:11:36 +00001179def get_hqe_percentage_complete(**filter_data):
1180 """
showardc92da832009-04-07 18:14:34 +00001181 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +00001182 that are complete.
1183 """
1184 query = models.HostQueueEntry.query_objects(filter_data)
1185 complete_count = query.filter(complete=True).count()
1186 total_count = query.count()
1187 if total_count == 0:
1188 return 1
1189 return float(complete_count) / total_count
1190
1191
showard1a5a4082009-07-28 20:01:37 +00001192# special tasks
1193
1194def get_special_tasks(**filter_data):
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001195 """Get special task entries from the local database.
1196
1197 Query the special tasks table for tasks matching the given
1198 `filter_data`, and return a list of the results. No attempt is
1199 made to forward the call to shards; the buck will stop here.
1200 The caller is expected to know the target shard for such reasons
1201 as:
1202 * The caller is a service (such as gs_offloader) configured
1203 to operate on behalf of one specific shard, and no other.
1204 * The caller has a host as a parameter, and knows that this is
1205 the shard assigned to that host.
1206
1207 @param filter_data Filter keywords to pass to the underlying
1208 database query.
1209
1210 """
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001211 return rpc_utils.prepare_rows_as_nested_dicts(
1212 models.SpecialTask.query_objects(filter_data),
1213 ('host', 'queue_entry'))
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001214
1215
1216def get_host_special_tasks(host_id, **filter_data):
1217 """Get special task entries for a given host.
1218
1219 Query the special tasks table for tasks that ran on the host
1220 given by `host_id` and matching the given `filter_data`.
1221 Return a list of the results. If the host is assigned to a
1222 shard, forward this call to that shard.
1223
1224 @param host_id Id in the database of the target host.
1225 @param filter_data Filter keywords to pass to the underlying
1226 database query.
1227
1228 """
MK Ryu0c1a37d2015-04-30 12:00:55 -07001229 # Retrieve host data even if the host is in an invalid state.
1230 host = models.Host.smart_get(host_id, False)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001231 if not host.shard:
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001232 return get_special_tasks(host_id=host_id, **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001233 else:
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001234 # The return values from AFE methods are post-processed
1235 # objects that aren't JSON-serializable. So, we have to
1236 # call AFE.run() to get the raw, serializable output from
1237 # the shard.
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001238 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1239 return shard_afe.run('get_special_tasks',
1240 host_id=host_id, **filter_data)
showard1a5a4082009-07-28 20:01:37 +00001241
1242
MK Ryu0c1a37d2015-04-30 12:00:55 -07001243def get_num_special_tasks(**kwargs):
1244 """Get the number of special task entries from the local database.
1245
1246 Query the special tasks table for tasks matching the given 'kwargs',
1247 and return the number of the results. No attempt is made to forward
1248 the call to shards; the buck will stop here.
1249
1250 @param kwargs Filter keywords to pass to the underlying database query.
1251
1252 """
1253 return models.SpecialTask.query_count(kwargs)
1254
1255
1256def get_host_num_special_tasks(host, **kwargs):
1257 """Get special task entries for a given host.
1258
1259 Query the special tasks table for tasks that ran on the host
1260 given by 'host' and matching the given 'kwargs'.
1261 Return a list of the results. If the host is assigned to a
1262 shard, forward this call to that shard.
1263
1264 @param host id or name of a host. More often a hostname.
1265 @param kwargs Filter keywords to pass to the underlying database query.
1266
1267 """
1268 # Retrieve host data even if the host is in an invalid state.
1269 host_model = models.Host.smart_get(host, False)
1270 if not host_model.shard:
1271 return get_num_special_tasks(host=host, **kwargs)
1272 else:
1273 shard_afe = frontend.AFE(server=host_model.shard.rpc_hostname())
1274 return shard_afe.run('get_num_special_tasks', host=host, **kwargs)
1275
1276
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001277def get_status_task(host_id, end_time):
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001278 """Get the "status task" for a host from the local shard.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001279
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001280 Returns a single special task representing the given host's
1281 "status task". The status task is a completed special task that
1282 identifies whether the corresponding host was working or broken
1283 when it completed. A successful task indicates a working host;
1284 a failed task indicates broken.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001285
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001286 This call will not be forward to a shard; the receiving server
1287 must be the shard that owns the host.
1288
1289 @param host_id Id in the database of the target host.
1290 @param end_time Time reference for the host's status.
1291
1292 @return A single task; its status (successful or not)
1293 corresponds to the status of the host (working or
1294 broken) at the given time. If no task is found, return
1295 `None`.
1296
1297 """
1298 tasklist = rpc_utils.prepare_rows_as_nested_dicts(
1299 status_history.get_status_task(host_id, end_time),
1300 ('host', 'queue_entry'))
1301 return tasklist[0] if tasklist else None
1302
1303
1304def get_host_status_task(host_id, end_time):
1305 """Get the "status task" for a host from its owning shard.
1306
1307 Finds the given host's owning shard, and forwards to it a call
1308 to `get_status_task()` (see above).
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001309
1310 @param host_id Id in the database of the target host.
1311 @param end_time Time reference for the host's status.
1312
1313 @return A single task; its status (successful or not)
1314 corresponds to the status of the host (working or
1315 broken) at the given time. If no task is found, return
1316 `None`.
1317
1318 """
1319 host = models.Host.smart_get(host_id)
1320 if not host.shard:
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001321 return get_status_task(host_id, end_time)
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001322 else:
1323 # The return values from AFE methods are post-processed
1324 # objects that aren't JSON-serializable. So, we have to
1325 # call AFE.run() to get the raw, serializable output from
1326 # the shard.
1327 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1328 return shard_afe.run('get_status_task',
1329 host_id=host_id, end_time=end_time)
1330
1331
J. Richard Barnette8abbfd62015-06-23 12:46:54 -07001332def get_host_diagnosis_interval(host_id, end_time, success):
1333 """Find a "diagnosis interval" for a given host.
1334
1335 A "diagnosis interval" identifies a start and end time where
1336 the host went from "working" to "broken", or vice versa. The
1337 interval's starting time is the starting time of the last status
1338 task with the old status; the end time is the finish time of the
1339 first status task with the new status.
1340
1341 This routine finds the most recent diagnosis interval for the
1342 given host prior to `end_time`, with a starting status matching
1343 `success`. If `success` is true, the interval will start with a
1344 successful status task; if false the interval will start with a
1345 failed status task.
1346
1347 @param host_id Id in the database of the target host.
1348 @param end_time Time reference for the diagnosis interval.
1349 @param success Whether the diagnosis interval should start
1350 with a successful or failed status task.
1351
1352 @return A list of two strings. The first is the timestamp for
1353 the beginning of the interval; the second is the
1354 timestamp for the end. If the host has never changed
1355 state, the list is empty.
1356
1357 """
1358 host = models.Host.smart_get(host_id)
1359 if not host.shard:
1360 return status_history.get_diagnosis_interval(
1361 host_id, end_time, success)
1362 else:
1363 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1364 return shard_afe.get_host_diagnosis_interval(
1365 host_id, end_time, success)
1366
1367
showardc0ac3a72009-07-08 21:14:45 +00001368# support for host detail view
1369
MK Ryu0c1a37d2015-04-30 12:00:55 -07001370def get_host_queue_entries_and_special_tasks(host, query_start=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001371 query_limit=None, start_time=None,
1372 end_time=None):
showardc0ac3a72009-07-08 21:14:45 +00001373 """
1374 @returns an interleaved list of HostQueueEntries and SpecialTasks,
1375 in approximate run order. each dict contains keys for type, host,
1376 job, status, started_on, execution_path, and ID.
1377 """
1378 total_limit = None
1379 if query_limit is not None:
1380 total_limit = query_start + query_limit
MK Ryu0c1a37d2015-04-30 12:00:55 -07001381 filter_data_common = {'host': host,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001382 'query_limit': total_limit,
1383 'sort_by': ['-id']}
showardc0ac3a72009-07-08 21:14:45 +00001384
MK Ryu0c1a37d2015-04-30 12:00:55 -07001385 filter_data_special_tasks = rpc_utils.inject_times_to_filter(
1386 'time_started__gte', 'time_started__lte', start_time, end_time,
1387 **filter_data_common)
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001388
MK Ryu0c1a37d2015-04-30 12:00:55 -07001389 queue_entries = get_host_queue_entries(
1390 start_time, end_time, **filter_data_common)
1391 special_tasks = get_host_special_tasks(host, **filter_data_special_tasks)
showardc0ac3a72009-07-08 21:14:45 +00001392
1393 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
1394 special_tasks)
1395 if query_start is not None:
1396 interleaved_entries = interleaved_entries[query_start:]
1397 if query_limit is not None:
1398 interleaved_entries = interleaved_entries[:query_limit]
MK Ryu0c1a37d2015-04-30 12:00:55 -07001399 return rpc_utils.prepare_host_queue_entries_and_special_tasks(
1400 interleaved_entries, queue_entries)
showardc0ac3a72009-07-08 21:14:45 +00001401
1402
MK Ryu0c1a37d2015-04-30 12:00:55 -07001403def get_num_host_queue_entries_and_special_tasks(host, start_time=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001404 end_time=None):
MK Ryu0c1a37d2015-04-30 12:00:55 -07001405 filter_data_common = {'host': host}
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001406
1407 filter_data_queue_entries, filter_data_special_tasks = (
1408 rpc_utils.inject_times_to_hqe_special_tasks_filters(
1409 filter_data_common, start_time, end_time))
1410
1411 return (models.HostQueueEntry.query_count(filter_data_queue_entries)
MK Ryu0c1a37d2015-04-30 12:00:55 -07001412 + get_host_num_special_tasks(**filter_data_special_tasks))
showardc0ac3a72009-07-08 21:14:45 +00001413
1414
showard29f7cd22009-04-29 21:16:24 +00001415# recurring run
1416
1417def get_recurring(**filter_data):
1418 return rpc_utils.prepare_rows_as_nested_dicts(
1419 models.RecurringRun.query_objects(filter_data),
1420 ('job', 'owner'))
1421
1422
1423def get_num_recurring(**filter_data):
1424 return models.RecurringRun.query_count(filter_data)
1425
1426
1427def delete_recurring_runs(**filter_data):
1428 to_delete = models.RecurringRun.query_objects(filter_data)
1429 to_delete.delete()
1430
1431
1432def create_recurring_run(job_id, start_date, loop_period, loop_count):
showard64a95952010-01-13 21:27:16 +00001433 owner = models.User.current_user().login
showard29f7cd22009-04-29 21:16:24 +00001434 job = models.Job.objects.get(id=job_id)
1435 return job.create_recurring_job(start_date=start_date,
1436 loop_period=loop_period,
1437 loop_count=loop_count,
1438 owner=owner)
1439
1440
mblighe8819cd2008-02-15 16:48:40 +00001441# other
1442
showarde0b63622008-08-04 20:58:47 +00001443def echo(data=""):
1444 """\
1445 Returns a passed in string. For doing a basic test to see if RPC calls
1446 can successfully be made.
1447 """
1448 return data
1449
1450
showardb7a52fd2009-04-27 20:10:56 +00001451def get_motd():
1452 """\
1453 Returns the message of the day as a string.
1454 """
1455 return rpc_utils.get_motd()
1456
1457
mblighe8819cd2008-02-15 16:48:40 +00001458def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +00001459 """\
1460 Returns a dictionary containing a bunch of data that shouldn't change
1461 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +00001462
1463 priorities: List of job priority choices.
1464 default_priority: Default priority value for new jobs.
1465 users: Sorted list of all users.
Jiaxi Luo31874592014-06-11 10:36:35 -07001466 labels: Sorted list of labels not start with 'cros-version' and
1467 'fw-version'.
showardc92da832009-04-07 18:14:34 +00001468 atomic_groups: Sorted list of all atomic groups.
1469 tests: Sorted list of all tests.
1470 profilers: Sorted list of all profilers.
1471 current_user: Logged-in username.
1472 host_statuses: Sorted list of possible Host statuses.
1473 job_statuses: Sorted list of possible HostQueueEntry statuses.
Simran Basi7e605742013-11-12 13:43:36 -08001474 job_timeout_default: The default job timeout length in minutes.
showarda1e74b32009-05-12 17:32:04 +00001475 parse_failed_repair_default: Default value for the parse_failed_repair job
Jiaxi Luo31874592014-06-11 10:36:35 -07001476 option.
showardc92da832009-04-07 18:14:34 +00001477 reboot_before_options: A list of valid RebootBefore string enums.
1478 reboot_after_options: A list of valid RebootAfter string enums.
1479 motd: Server's message of the day.
1480 status_dictionary: A mapping from one word job status names to a more
1481 informative description.
jadmanski0afbb632008-06-06 21:10:57 +00001482 """
showard21baa452008-10-21 00:08:39 +00001483
1484 job_fields = models.Job.get_field_dict()
jamesren76fcf192010-04-21 20:39:50 +00001485 default_drone_set_name = models.DroneSet.default_drone_set_name()
1486 drone_sets = ([default_drone_set_name] +
1487 sorted(drone_set.name for drone_set in
1488 models.DroneSet.objects.exclude(
1489 name=default_drone_set_name)))
showard21baa452008-10-21 00:08:39 +00001490
jadmanski0afbb632008-06-06 21:10:57 +00001491 result = {}
Alex Miller7d658cf2013-09-04 16:00:35 -07001492 result['priorities'] = priorities.Priority.choices()
1493 default_priority = priorities.Priority.DEFAULT
1494 result['default_priority'] = 'Default'
1495 result['max_schedulable_priority'] = priorities.Priority.DEFAULT
jadmanski0afbb632008-06-06 21:10:57 +00001496 result['users'] = get_users(sort_by=['login'])
Jiaxi Luo31874592014-06-11 10:36:35 -07001497
1498 label_exclude_filters = [{'name__startswith': 'cros-version'},
1499 {'name__startswith': 'fw-version'}]
1500 result['labels'] = get_labels(
1501 label_exclude_filters,
1502 sort_by=['-platform', 'name'])
1503
showardc92da832009-04-07 18:14:34 +00001504 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +00001505 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +00001506 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +00001507 result['current_user'] = rpc_utils.prepare_for_serialization(
showard64a95952010-01-13 21:27:16 +00001508 models.User.current_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +00001509 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +00001510 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
Simran Basi7e605742013-11-12 13:43:36 -08001511 result['job_timeout_mins_default'] = models.Job.DEFAULT_TIMEOUT_MINS
Simran Basi34217022012-11-06 13:43:15 -08001512 result['job_max_runtime_mins_default'] = (
1513 models.Job.DEFAULT_MAX_RUNTIME_MINS)
showarda1e74b32009-05-12 17:32:04 +00001514 result['parse_failed_repair_default'] = bool(
1515 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
jamesrendd855242010-03-02 22:23:44 +00001516 result['reboot_before_options'] = model_attributes.RebootBefore.names
1517 result['reboot_after_options'] = model_attributes.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +00001518 result['motd'] = rpc_utils.get_motd()
jamesren76fcf192010-04-21 20:39:50 +00001519 result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled()
1520 result['drone_sets'] = drone_sets
jamesren4a41e012010-07-16 22:33:48 +00001521 result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled()
showard8ac29b42008-07-17 17:01:55 +00001522
showardd3dc1992009-04-22 21:01:40 +00001523 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +00001524 "Verifying": "Verifying Host",
Alex Millerdfff2fd2013-05-28 13:05:06 -07001525 "Provisioning": "Provisioning Host",
showard8ac29b42008-07-17 17:01:55 +00001526 "Pending": "Waiting on other hosts",
1527 "Running": "Running autoserv",
1528 "Completed": "Autoserv completed",
1529 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +00001530 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +00001531 "Starting": "Next in host's queue",
1532 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +00001533 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +00001534 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +00001535 "Template": "Template job for recurring run",
mbligh4608b002010-01-05 18:22:35 +00001536 "Waiting": "Waiting for scheduler action",
Dan Shi07e09af2013-04-12 09:31:29 -07001537 "Archiving": "Archiving results",
1538 "Resetting": "Resetting hosts"}
Jiaxi Luo421608e2014-07-07 14:38:00 -07001539
1540 result['wmatrix_url'] = rpc_utils.get_wmatrix_url()
Simran Basi71206ef2014-08-13 13:51:18 -07001541 result['is_moblab'] = bool(utils.is_moblab())
Jiaxi Luo421608e2014-07-07 14:38:00 -07001542
jadmanski0afbb632008-06-06 21:10:57 +00001543 return result
showard29f7cd22009-04-29 21:16:24 +00001544
1545
1546def get_server_time():
1547 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")