blob: e801872a902dca73081eaf7a43360affe8e39b03 [file] [log] [blame]
Aviv Keshet0b9cfc92013-02-05 11:36:02 -08001# pylint: disable-msg=C0111
2
mblighe8819cd2008-02-15 16:48:40 +00003"""\
4Functions to expose over the RPC interface.
5
6For all modify* and delete* functions that ask for an 'id' parameter to
7identify the object to operate on, the id may be either
8 * the database row ID
9 * the name of the object (label name, hostname, user login, etc.)
10 * a dictionary containing uniquely identifying field (this option should seldom
11 be used)
12
13When specifying foreign key fields (i.e. adding hosts to a label, or adding
14users to an ACL group), the given value may be either the database row ID or the
15name of the object.
16
17All get* functions return lists of dictionaries. Each dictionary represents one
18object and maps field names to values.
19
20Some examples:
21modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
22modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
23modify_test('sleeptest', test_type='Client', params=', seconds=60')
24delete_acl_group(1) # delete by ID
25delete_acl_group('Everyone') # delete by name
26acl_group_add_users('Everyone', ['mbligh', 'showard'])
27get_jobs(owner='showard', status='Queued')
28
mbligh93c80e62009-02-03 17:48:30 +000029See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000030"""
31
32__author__ = 'showard@google.com (Steve Howard)'
33
MK Ryu9c5fbbe2015-02-11 15:46:22 -080034import sys
showard29f7cd22009-04-29 21:16:24 +000035import datetime
MK Ryu9c5fbbe2015-02-11 15:46:22 -080036
Moises Osorio2dc7a102014-12-02 18:24:02 -080037from django.db.models import Count
showardcafd16e2009-05-29 18:37:49 +000038import common
Simran Basib6ec8ae2014-04-23 12:05:08 -070039from autotest_lib.client.common_lib import priorities
Gabe Black1e1c41b2015-02-04 23:55:15 -080040from autotest_lib.client.common_lib.cros.graphite import autotest_stats
showard6d7b2ff2009-06-10 00:16:47 +000041from autotest_lib.frontend.afe import control_file, rpc_utils
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070042from autotest_lib.frontend.afe import models, model_logic, model_attributes
Simran Basib6ec8ae2014-04-23 12:05:08 -070043from autotest_lib.frontend.afe import site_rpc_interface
Moises Osorio2dc7a102014-12-02 18:24:02 -080044from autotest_lib.frontend.tko import models as tko_models
Jiaxi Luoaac54572014-06-04 13:57:02 -070045from autotest_lib.frontend.tko import rpc_interface as tko_rpc_interface
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070046from autotest_lib.server import frontend
Simran Basi71206ef2014-08-13 13:51:18 -070047from autotest_lib.server import utils
Dan Shid215dbe2015-06-18 16:14:59 -070048from autotest_lib.server.cros import provision
Jiaxi Luo90190c92014-06-18 12:35:57 -070049from autotest_lib.server.cros.dynamic_suite import tools
J. Richard Barnette39255fa2015-04-14 17:23:41 -070050from autotest_lib.site_utils import status_history
mblighe8819cd2008-02-15 16:48:40 +000051
Moises Osorio2dc7a102014-12-02 18:24:02 -080052
Gabe Black1e1c41b2015-02-04 23:55:15 -080053_timer = autotest_stats.Timer('rpc_interface')
Moises Osorio2dc7a102014-12-02 18:24:02 -080054
Eric Lid23bc192011-02-09 14:38:57 -080055def get_parameterized_autoupdate_image_url(job):
56 """Get the parameterized autoupdate image url from a parameterized job."""
57 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
58 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
beeps8bb1f7d2013-08-05 01:30:09 -070059 name='image')
Eric Lid23bc192011-02-09 14:38:57 -080060 para_set = job.parameterized_job.parameterizedjobparameter_set
61 job_test_para = para_set.get(test_parameter=image_parameter)
62 return job_test_para.parameter_value
63
64
mblighe8819cd2008-02-15 16:48:40 +000065# labels
66
mblighe8819cd2008-02-15 16:48:40 +000067def modify_label(id, **data):
MK Ryu8c554cf2015-06-12 11:45:50 -070068 """Modify a label.
69
70 @param id: id or name of a label. More often a label name.
71 @param data: New data for a label.
72 """
73 label_model = models.Label.smart_get(id)
74
75 # Master forwards the RPC to shards
76 if not utils.is_shard():
77 rpc_utils.fanout_rpc(label_model.host_set.all(), 'modify_label', False,
78 id=id, **data)
79
80 label_model.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000081
82
83def delete_label(id):
MK Ryu8c554cf2015-06-12 11:45:50 -070084 """Delete a label.
85
86 @param id: id or name of a label. More often a label name.
87 """
88 label_model = models.Label.smart_get(id)
89
90 # Master forwards the RPC to shards
91 if not utils.is_shard():
92 rpc_utils.fanout_rpc(label_model.host_set.all(), 'delete_label', False,
93 id=id)
94
95 label_model.delete()
mblighe8819cd2008-02-15 16:48:40 +000096
Prashanth Balasubramanian744898f2015-01-13 05:04:16 -080097
MK Ryu9c5fbbe2015-02-11 15:46:22 -080098def add_label(name, ignore_exception_if_exists=False, **kwargs):
MK Ryucf027c62015-03-04 12:00:50 -080099 """Adds a new label of a given name.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800100
101 @param name: label name.
102 @param ignore_exception_if_exists: If True and the exception was
103 thrown due to the duplicated label name when adding a label,
104 then suppress the exception. Default is False.
105 @param kwargs: keyword args that store more info about a label
106 other than the name.
107 @return: int/long id of a new label.
108 """
109 # models.Label.add_object() throws model_logic.ValidationError
110 # when it is given a label name that already exists.
111 # However, ValidationError can be thrown with different errors,
112 # and those errors should be thrown up to the call chain.
113 try:
114 label = models.Label.add_object(name=name, **kwargs)
115 except:
116 exc_info = sys.exc_info()
117 if ignore_exception_if_exists:
118 label = rpc_utils.get_label(name)
119 # If the exception is raised not because of duplicated
120 # "name", then raise the original exception.
121 if label is None:
122 raise exc_info[0], exc_info[1], exc_info[2]
123 else:
124 raise exc_info[0], exc_info[1], exc_info[2]
125 return label.id
126
127
128def add_label_to_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800129 """Adds a label of the given id to the given hosts only in local DB.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800130
131 @param id: id or name of a label. More often a label name.
132 @param hosts: The hostnames of hosts that need the label.
133
134 @raises models.Label.DoesNotExist: If the label with id doesn't exist.
135 """
136 label = models.Label.smart_get(id)
137 host_objs = models.Host.smart_get_bulk(hosts)
138 if label.platform:
139 models.Host.check_no_platform(host_objs)
140 label.host_set.add(*host_objs)
141
142
MK Ryufbb002c2015-06-08 14:13:16 -0700143@rpc_utils.route_rpc_to_master
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800144def label_add_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800145 """Adds a label with the given id to the given hosts.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800146
147 This method should be run only on master not shards.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800148 The given label will be created if it doesn't exist, provided the `id`
149 supplied is a label name not an int/long id.
150
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800151 @param id: id or name of a label. More often a label name.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800152 @param hosts: A list of hostnames or ids. More often hostnames.
153
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800154 @raises ValueError: If the id specified is an int/long (label id)
155 while the label does not exist.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800156 """
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800157 try:
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800158 label = models.Label.smart_get(id)
159 except models.Label.DoesNotExist:
160 # This matches the type checks in smart_get, which is a hack
161 # in and off itself. The aim here is to create any non-existent
162 # label, which we cannot do if the 'id' specified isn't a label name.
163 if isinstance(id, basestring):
164 label = models.Label.smart_get(add_label(id))
165 else:
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800166 raise ValueError('Label id (%s) does not exist. Please specify '
167 'the argument, id, as a string (label name).'
168 % id)
MK Ryucf027c62015-03-04 12:00:50 -0800169
170 host_objs = models.Host.smart_get_bulk(hosts)
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800171 # Make sure the label exists on the shard with the same id
172 # as it is on the master.
MK Ryucf027c62015-03-04 12:00:50 -0800173 # It is possible that the label is already in a shard because
174 # we are adding a new label only to shards of hosts that the label
175 # is going to be attached.
176 # For example, we add a label L1 to a host in shard S1.
177 # Master and S1 will have L1 but other shards won't.
178 # Later, when we add the same label L1 to hosts in shards S1 and S2,
179 # S1 already has the label but S2 doesn't.
180 # S2 should have the new label without any problem.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800181 # We ignore exception in such a case.
182 rpc_utils.fanout_rpc(
MK Ryue019aae2015-07-07 12:46:07 -0700183 host_objs, 'add_label', include_hostnames=False,
184 name=label.name, ignore_exception_if_exists=True,
185 id=label.id, platform=label.platform)
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800186 rpc_utils.fanout_rpc(host_objs, 'add_label_to_hosts', id=id)
showardbbabf502008-06-06 00:02:02 +0000187
MK Ryu26f0c932015-05-28 18:14:33 -0700188 add_label_to_hosts(id, hosts)
189
showardbbabf502008-06-06 00:02:02 +0000190
MK Ryucf027c62015-03-04 12:00:50 -0800191def remove_label_from_hosts(id, hosts):
192 """Removes a label of the given id from the given hosts only in local DB.
193
194 @param id: id or name of a label.
195 @param hosts: The hostnames of hosts that need to remove the label from.
196 """
showardbe3ec042008-11-12 18:16:07 +0000197 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000198 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +0000199
200
MK Ryufbb002c2015-06-08 14:13:16 -0700201@rpc_utils.route_rpc_to_master
MK Ryucf027c62015-03-04 12:00:50 -0800202def label_remove_hosts(id, hosts):
203 """Removes a label of the given id from the given hosts.
204
205 This method should be run only on master not shards.
206
207 @param id: id or name of a label.
208 @param hosts: A list of hostnames or ids. More often hostnames.
209 """
MK Ryucf027c62015-03-04 12:00:50 -0800210 host_objs = models.Host.smart_get_bulk(hosts)
211 rpc_utils.fanout_rpc(host_objs, 'remove_label_from_hosts', id=id)
212
MK Ryu26f0c932015-05-28 18:14:33 -0700213 remove_label_from_hosts(id, hosts)
214
MK Ryucf027c62015-03-04 12:00:50 -0800215
Jiaxi Luo31874592014-06-11 10:36:35 -0700216def get_labels(exclude_filters=(), **filter_data):
showardc92da832009-04-07 18:14:34 +0000217 """\
Jiaxi Luo31874592014-06-11 10:36:35 -0700218 @param exclude_filters: A sequence of dictionaries of filters.
219
showardc92da832009-04-07 18:14:34 +0000220 @returns A sequence of nested dictionaries of label information.
221 """
Jiaxi Luo31874592014-06-11 10:36:35 -0700222 labels = models.Label.query_objects(filter_data)
223 for exclude_filter in exclude_filters:
224 labels = labels.exclude(**exclude_filter)
225 return rpc_utils.prepare_rows_as_nested_dicts(labels, ('atomic_group',))
showardc92da832009-04-07 18:14:34 +0000226
227
228# atomic groups
229
showarde9450c92009-06-30 01:58:52 +0000230def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +0000231 return models.AtomicGroup.add_object(
232 name=name, max_number_of_machines=max_number_of_machines,
233 description=description).id
234
235
236def modify_atomic_group(id, **data):
237 models.AtomicGroup.smart_get(id).update_object(data)
238
239
240def delete_atomic_group(id):
241 models.AtomicGroup.smart_get(id).delete()
242
243
244def atomic_group_add_labels(id, labels):
245 label_objs = models.Label.smart_get_bulk(labels)
246 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
247
248
249def atomic_group_remove_labels(id, labels):
250 label_objs = models.Label.smart_get_bulk(labels)
251 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
252
253
254def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000255 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000256 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000257
258
259# hosts
260
Matthew Sartori68186332015-04-27 17:19:53 -0700261def add_host(hostname, status=None, locked=None, lock_reason='', protection=None):
262 if locked and not lock_reason:
263 raise model_logic.ValidationError(
264 {'locked': 'Please provide a reason for locking when adding host.'})
265
jadmanski0afbb632008-06-06 21:10:57 +0000266 return models.Host.add_object(hostname=hostname, status=status,
Matthew Sartori68186332015-04-27 17:19:53 -0700267 locked=locked, lock_reason=lock_reason,
268 protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000269
270
Jakob Juelich50e91f72014-10-01 12:43:23 -0700271@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000272def modify_host(id, **data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700273 """Modify local attributes of a host.
274
275 If this is called on the master, but the host is assigned to a shard, this
276 will also forward the call to the responsible shard. This means i.e. if a
277 host is being locked using this function, this change will also propagate to
278 shards.
279
280 @param id: id of the host to modify.
281 @param **data: key=value pairs of values to set on the host.
282 """
showardbe0d8692009-08-20 23:42:44 +0000283 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000284 host = models.Host.smart_get(id)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700285
showardce7c0922009-09-11 18:39:24 +0000286 rpc_utils.check_modify_host_locking(host, data)
287 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000288
289
showard276f9442009-05-20 00:33:16 +0000290def modify_hosts(host_filter_data, update_data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700291 """Modify local attributes of multiple hosts.
292
293 If this is called on the master, but one of the hosts in that match the
294 filters is assigned to a shard, this will also forward the call to the
295 responsible shard.
296
297 The filters are always applied on the master, not on the shards. This means
298 if the states of a host differ on the master and a shard, the state on the
299 master will be used. I.e. this means:
300 A host was synced to Shard 1. On Shard 1 the status of the host was set to
301 'Repair Failed'.
302 - A call to modify_hosts with host_filter_data={'status': 'Ready'} will
303 update the host (both on the shard and on the master), because the state
304 of the host as the master knows it is still 'Ready'.
305 - A call to modify_hosts with host_filter_data={'status': 'Repair failed'
306 will not update the host, because the filter doesn't apply on the master.
307
showardbe0d8692009-08-20 23:42:44 +0000308 @param host_filter_data: Filters out which hosts to modify.
309 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000310 """
showardbe0d8692009-08-20 23:42:44 +0000311 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000312 hosts = models.Host.query_objects(host_filter_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700313
314 affected_shard_hostnames = set()
315 affected_host_ids = []
316
Alex Miller9658a952013-05-14 16:40:02 -0700317 # Check all hosts before changing data for exception safety.
318 for host in hosts:
319 rpc_utils.check_modify_host_locking(host, update_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700320 if host.shard:
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800321 affected_shard_hostnames.add(host.shard.rpc_hostname())
Jakob Juelich50e91f72014-10-01 12:43:23 -0700322 affected_host_ids.append(host.id)
323
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800324 if not utils.is_shard():
Jakob Juelich50e91f72014-10-01 12:43:23 -0700325 # Caution: Changing the filter from the original here. See docstring.
326 rpc_utils.run_rpc_on_multiple_hostnames(
327 'modify_hosts', affected_shard_hostnames,
328 host_filter_data={'id__in': affected_host_ids},
329 update_data=update_data)
330
showard276f9442009-05-20 00:33:16 +0000331 for host in hosts:
332 host.update_object(update_data)
333
334
MK Ryufbb002c2015-06-08 14:13:16 -0700335def add_labels_to_host(id, labels):
336 """Adds labels to a given host only in local DB.
showardcafd16e2009-05-29 18:37:49 +0000337
MK Ryufbb002c2015-06-08 14:13:16 -0700338 @param id: id or hostname for a host.
339 @param labels: ids or names for labels.
340 """
341 label_objs = models.Label.smart_get_bulk(labels)
342 models.Host.smart_get(id).labels.add(*label_objs)
343
344
345@rpc_utils.route_rpc_to_master
346def host_add_labels(id, labels):
347 """Adds labels to a given host.
348
349 @param id: id or hostname for a host.
350 @param labels: ids or names for labels.
351
352 @raises ValidationError: If adding more than one platform label.
353 """
354 label_objs = models.Label.smart_get_bulk(labels)
355 platforms = [label.name for label in label_objs if label.platform]
showardcafd16e2009-05-29 18:37:49 +0000356 if len(platforms) > 1:
357 raise model_logic.ValidationError(
358 {'labels': 'Adding more than one platform label: %s' %
359 ', '.join(platforms)})
MK Ryufbb002c2015-06-08 14:13:16 -0700360
361 host_obj = models.Host.smart_get(id)
showardcafd16e2009-05-29 18:37:49 +0000362 if len(platforms) == 1:
MK Ryufbb002c2015-06-08 14:13:16 -0700363 models.Host.check_no_platform([host_obj])
364
365 rpc_utils.fanout_rpc([host_obj], 'add_labels_to_host', False,
366 id=id, labels=labels)
367 add_labels_to_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000368
369
MK Ryufbb002c2015-06-08 14:13:16 -0700370def remove_labels_from_host(id, labels):
371 """Removes labels from a given host only in local DB.
372
373 @param id: id or hostname for a host.
374 @param labels: ids or names for labels.
375 """
376 label_objs = models.Label.smart_get_bulk(labels)
377 models.Host.smart_get(id).labels.remove(*label_objs)
378
379
380@rpc_utils.route_rpc_to_master
mblighe8819cd2008-02-15 16:48:40 +0000381def host_remove_labels(id, labels):
MK Ryufbb002c2015-06-08 14:13:16 -0700382 """Removes labels from a given host.
383
384 @param id: id or hostname for a host.
385 @param labels: ids or names for labels.
386 """
387 host_obj = models.Host.smart_get(id)
388 rpc_utils.fanout_rpc([host_obj], 'remove_labels_from_host', False,
389 id=id, labels=labels)
390 remove_labels_from_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000391
392
MK Ryuacf35922014-10-03 14:56:49 -0700393def get_host_attribute(attribute, **host_filter_data):
394 """
395 @param attribute: string name of attribute
396 @param host_filter_data: filter data to apply to Hosts to choose hosts to
397 act upon
398 """
399 hosts = rpc_utils.get_host_query((), False, False, True, host_filter_data)
400 hosts = list(hosts)
401 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
402 'attribute_list')
403 host_attr_dicts = []
404 for host_obj in hosts:
405 for attr_obj in host_obj.attribute_list:
406 if attr_obj.attribute == attribute:
407 host_attr_dicts.append(attr_obj.get_object_dict())
408 return rpc_utils.prepare_for_serialization(host_attr_dicts)
409
410
showard0957a842009-05-11 19:25:08 +0000411def set_host_attribute(attribute, value, **host_filter_data):
412 """
MK Ryu26f0c932015-05-28 18:14:33 -0700413 @param attribute: string name of attribute
414 @param value: string, or None to delete an attribute
415 @param host_filter_data: filter data to apply to Hosts to choose hosts to
416 act upon
showard0957a842009-05-11 19:25:08 +0000417 """
418 assert host_filter_data # disallow accidental actions on all hosts
419 hosts = models.Host.query_objects(host_filter_data)
420 models.AclGroup.check_for_acl_violation_hosts(hosts)
421
MK Ryu26f0c932015-05-28 18:14:33 -0700422 # Master forwards this RPC to shards.
423 if not utils.is_shard():
424 rpc_utils.fanout_rpc(hosts, 'set_host_attribute', False,
425 attribute=attribute, value=value, **host_filter_data)
426
showard0957a842009-05-11 19:25:08 +0000427 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000428 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000429
430
Jakob Juelich50e91f72014-10-01 12:43:23 -0700431@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000432def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000433 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000434
435
showard87cc38f2009-08-20 23:37:04 +0000436def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000437 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000438 """
439 @param multiple_labels: match hosts in all of the labels given. Should
440 be a list of label names.
441 @param exclude_only_if_needed_labels: Exclude hosts with at least one
442 "only_if_needed" label applied.
443 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
444 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000445 """
showard43a3d262008-11-12 18:17:05 +0000446 hosts = rpc_utils.get_host_query(multiple_labels,
447 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000448 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000449 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000450 hosts = list(hosts)
451 models.Host.objects.populate_relationships(hosts, models.Label,
452 'label_list')
453 models.Host.objects.populate_relationships(hosts, models.AclGroup,
454 'acl_list')
455 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
456 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000457 host_dicts = []
458 for host_obj in hosts:
459 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000460 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000461 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
462 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000463 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
464 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
465 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000466 host_dicts.append(host_dict)
467 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000468
469
showard87cc38f2009-08-20 23:37:04 +0000470def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000471 exclude_atomic_group_hosts=False, valid_only=True,
472 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000473 """
474 Same parameters as get_hosts().
475
476 @returns The number of matching hosts.
477 """
showard43a3d262008-11-12 18:17:05 +0000478 hosts = rpc_utils.get_host_query(multiple_labels,
479 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000480 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000481 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000482 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000483
mblighe8819cd2008-02-15 16:48:40 +0000484
485# tests
486
showard909c7a62008-07-15 21:52:38 +0000487def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000488 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000489 test_time=None, test_category=None, description=None,
490 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000491 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000492 author=author, dependencies=dependencies,
493 experimental=experimental,
494 run_verify=run_verify, test_time=test_time,
495 test_category=test_category,
496 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000497 test_class=test_class,
498 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000499
500
501def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000502 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000503
504
505def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000506 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000507
508
509def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000510 return rpc_utils.prepare_for_serialization(
511 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000512
513
Moises Osorio2dc7a102014-12-02 18:24:02 -0800514@_timer.decorate
515def get_tests_status_counts_by_job_name_label(job_name_prefix, label_name):
516 """Gets the counts of all passed and failed tests from the matching jobs.
517
518 @param job_name_prefix: Name prefix of the jobs to get the summary from, e.g.,
519 'butterfly-release/R40-6457.21.0/bvt-cq/'.
520 @param label_name: Label that must be set in the jobs, e.g.,
521 'cros-version:butterfly-release/R40-6457.21.0'.
522
523 @returns A summary of the counts of all the passed and failed tests.
524 """
525 job_ids = list(models.Job.objects.filter(
526 name__startswith=job_name_prefix,
527 dependency_labels__name=label_name).values_list(
528 'pk', flat=True))
529 summary = {'passed': 0, 'failed': 0}
530 if not job_ids:
531 return summary
532
533 counts = (tko_models.TestView.objects.filter(
534 afe_job_id__in=job_ids).exclude(
535 test_name='SERVER_JOB').exclude(
536 test_name__startswith='CLIENT_JOB').values(
537 'status').annotate(
538 count=Count('status')))
539 for status in counts:
540 if status['status'] == 'GOOD':
541 summary['passed'] += status['count']
542 else:
543 summary['failed'] += status['count']
544 return summary
545
546
showard2b9a88b2008-06-13 20:55:03 +0000547# profilers
548
549def add_profiler(name, description=None):
550 return models.Profiler.add_object(name=name, description=description).id
551
552
553def modify_profiler(id, **data):
554 models.Profiler.smart_get(id).update_object(data)
555
556
557def delete_profiler(id):
558 models.Profiler.smart_get(id).delete()
559
560
561def get_profilers(**filter_data):
562 return rpc_utils.prepare_for_serialization(
563 models.Profiler.list_objects(filter_data))
564
565
mblighe8819cd2008-02-15 16:48:40 +0000566# users
567
568def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000569 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000570
571
572def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000573 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000574
575
576def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000577 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000578
579
580def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000581 return rpc_utils.prepare_for_serialization(
582 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000583
584
585# acl groups
586
587def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000588 group = models.AclGroup.add_object(name=name, description=description)
showard64a95952010-01-13 21:27:16 +0000589 group.users.add(models.User.current_user())
showard04f2cd82008-07-25 20:53:31 +0000590 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000591
592
593def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000594 group = models.AclGroup.smart_get(id)
595 group.check_for_acl_violation_acl_group()
596 group.update_object(data)
597 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000598
599
600def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000601 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000602 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000603 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000604 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000605
606
607def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000608 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000609 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000610 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000611 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000612 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000613
614
615def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000616 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000617 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000618 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000619 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000620 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000621
622
623def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000624 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000625 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000626 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000627 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000628 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000629
630
631def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000632 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000633
634
635def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000636 acl_groups = models.AclGroup.list_objects(filter_data)
637 for acl_group in acl_groups:
638 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
639 acl_group['users'] = [user.login
640 for user in acl_group_obj.users.all()]
641 acl_group['hosts'] = [host.hostname
642 for host in acl_group_obj.hosts.all()]
643 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000644
645
646# jobs
647
mbligh120351e2009-01-24 01:40:45 +0000648def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000649 client_control_file='', use_container=False,
Matthew Sartori10438092015-06-24 14:30:18 -0700650 profile_only=None, upload_kernel_config=False,
651 db_tests=True):
jadmanski0afbb632008-06-06 21:10:57 +0000652 """
mbligh120351e2009-01-24 01:40:45 +0000653 Generates a client-side control file to load a kernel and run tests.
654
Matthew Sartori10438092015-06-24 14:30:18 -0700655 @param tests List of tests to run. See db_tests for more information.
mbligha3c58d22009-08-24 22:01:51 +0000656 @param kernel A list of kernel info dictionaries configuring which kernels
657 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000658 @param label Name of label to grab kernel config from.
659 @param profilers List of profilers to activate during the job.
660 @param client_control_file The contents of a client-side control file to
661 run at the end of all tests. If this is supplied, all tests must be
662 client side.
663 TODO: in the future we should support server control files directly
664 to wrap with a kernel. That'll require changing the parameter
665 name and adding a boolean to indicate if it is a client or server
666 control file.
667 @param use_container unused argument today. TODO: Enable containers
668 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000669 @param profile_only A boolean that indicates what default profile_only
670 mode to use in the control file. Passing None will generate a
671 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000672 @param upload_kernel_config: if enabled it will generate server control
673 file code that uploads the kernel config file to the client and
674 tells the client of the new (local) path when compiling the kernel;
675 the tests must be server side tests
Matthew Sartori10438092015-06-24 14:30:18 -0700676 @param db_tests: if True, the test object can be found in the database
677 backing the test model. In this case, tests is a tuple
678 of test IDs which are used to retrieve the test objects
679 from the database. If False, tests is a tuple of test
680 dictionaries stored client-side in the AFE.
mbligh120351e2009-01-24 01:40:45 +0000681
682 @returns a dict with the following keys:
683 control_file: str, The control file text.
684 is_server: bool, is the control file a server-side control file?
685 synch_count: How many machines the job uses per autoserv execution.
686 synch_count == 1 means the job is asynchronous.
687 dependencies: A list of the names of labels on which the job depends.
688 """
showardd86debe2009-06-10 17:37:56 +0000689 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000690 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000691 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000692
showard989f25d2008-10-01 11:38:11 +0000693 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000694 rpc_utils.prepare_generate_control_file(tests, kernel, label,
Matthew Sartori10438092015-06-24 14:30:18 -0700695 profilers, db_tests))
showard989f25d2008-10-01 11:38:11 +0000696 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000697 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000698 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000699 client_control_file=client_control_file, profile_only=profile_only,
700 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000701 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000702
703
jamesren4a41e012010-07-16 22:33:48 +0000704def create_parameterized_job(name, priority, test, parameters, kernel=None,
705 label=None, profilers=(), profiler_parameters=None,
706 use_container=False, profile_only=None,
707 upload_kernel_config=False, hosts=(),
708 meta_hosts=(), one_time_hosts=(),
709 atomic_group_name=None, synch_count=None,
710 is_template=False, timeout=None,
Simran Basi7e605742013-11-12 13:43:36 -0800711 timeout_mins=None, max_runtime_mins=None,
712 run_verify=False, email_list='', dependencies=(),
713 reboot_before=None, reboot_after=None,
714 parse_failed_repair=None, hostless=False,
Dan Shiec1d47d2015-02-13 11:38:13 -0800715 keyvals=None, drone_set=None, run_reset=True,
Dan Shi2a5297b2015-07-23 17:03:29 -0700716 require_ssp=None):
jamesren4a41e012010-07-16 22:33:48 +0000717 """
718 Creates and enqueues a parameterized job.
719
720 Most parameters a combination of the parameters for generate_control_file()
721 and create_job(), with the exception of:
722
723 @param test name or ID of the test to run
724 @param parameters a map of parameter name ->
725 tuple of (param value, param type)
726 @param profiler_parameters a dictionary of parameters for the profilers:
727 key: profiler name
728 value: dict of param name -> tuple of
729 (param value,
730 param type)
731 """
732 # Save the values of the passed arguments here. What we're going to do with
733 # them is pass them all to rpc_utils.get_create_job_common_args(), which
734 # will extract the subset of these arguments that apply for
735 # rpc_utils.create_job_common(), which we then pass in to that function.
736 args = locals()
737
738 # Set up the parameterized job configs
739 test_obj = models.Test.smart_get(test)
Aviv Keshet3dd8beb2013-05-13 17:36:04 -0700740 control_type = test_obj.test_type
jamesren4a41e012010-07-16 22:33:48 +0000741
742 try:
743 label = models.Label.smart_get(label)
744 except models.Label.DoesNotExist:
745 label = None
746
747 kernel_objs = models.Kernel.create_kernels(kernel)
748 profiler_objs = [models.Profiler.smart_get(profiler)
749 for profiler in profilers]
750
751 parameterized_job = models.ParameterizedJob.objects.create(
752 test=test_obj, label=label, use_container=use_container,
753 profile_only=profile_only,
754 upload_kernel_config=upload_kernel_config)
755 parameterized_job.kernels.add(*kernel_objs)
756
757 for profiler in profiler_objs:
758 parameterized_profiler = models.ParameterizedJobProfiler.objects.create(
759 parameterized_job=parameterized_job,
760 profiler=profiler)
761 profiler_params = profiler_parameters.get(profiler.name, {})
762 for name, (value, param_type) in profiler_params.iteritems():
763 models.ParameterizedJobProfilerParameter.objects.create(
764 parameterized_job_profiler=parameterized_profiler,
765 parameter_name=name,
766 parameter_value=value,
767 parameter_type=param_type)
768
769 try:
770 for parameter in test_obj.testparameter_set.all():
771 if parameter.name in parameters:
772 param_value, param_type = parameters.pop(parameter.name)
773 parameterized_job.parameterizedjobparameter_set.create(
774 test_parameter=parameter, parameter_value=param_value,
775 parameter_type=param_type)
776
777 if parameters:
778 raise Exception('Extra parameters remain: %r' % parameters)
779
780 return rpc_utils.create_job_common(
781 parameterized_job=parameterized_job.id,
782 control_type=control_type,
783 **rpc_utils.get_create_job_common_args(args))
784 except:
785 parameterized_job.delete()
786 raise
787
788
Simran Basib6ec8ae2014-04-23 12:05:08 -0700789def create_job_page_handler(name, priority, control_file, control_type,
Dan Shid215dbe2015-06-18 16:14:59 -0700790 image=None, hostless=False, firmware_rw_build=None,
791 firmware_ro_build=None, test_source_build=None,
792 **kwargs):
Simran Basib6ec8ae2014-04-23 12:05:08 -0700793 """\
794 Create and enqueue a job.
795
796 @param name name of this job
797 @param priority Integer priority of this job. Higher is more important.
798 @param control_file String contents of the control file.
799 @param control_type Type of control file, Client or Server.
Dan Shid215dbe2015-06-18 16:14:59 -0700800 @param image: ChromeOS build to be installed in the dut. Default to None.
801 @param firmware_rw_build: Firmware build to update RW firmware. Default to
802 None, i.e., RW firmware will not be updated.
803 @param firmware_ro_build: Firmware build to update RO firmware. Default to
804 None, i.e., RO firmware will not be updated.
805 @param test_source_build: Build to be used to retrieve test code. Default
806 to None.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700807 @param kwargs extra args that will be required by create_suite_job or
808 create_job.
809
810 @returns The created Job id number.
811 """
812 control_file = rpc_utils.encode_ascii(control_file)
Jiaxi Luodd67beb2014-07-18 16:28:31 -0700813 if not control_file:
814 raise model_logic.ValidationError({
815 'control_file' : "Control file cannot be empty"})
Simran Basib6ec8ae2014-04-23 12:05:08 -0700816
817 if image and hostless:
Dan Shid215dbe2015-06-18 16:14:59 -0700818 builds = {}
819 builds[provision.CROS_VERSION_PREFIX] = image
820 if firmware_rw_build:
Dan Shi0723bf52015-06-24 10:52:38 -0700821 builds[provision.FW_RW_VERSION_PREFIX] = firmware_rw_build
Dan Shid215dbe2015-06-18 16:14:59 -0700822 if firmware_ro_build:
823 builds[provision.FW_RO_VERSION_PREFIX] = firmware_ro_build
Simran Basib6ec8ae2014-04-23 12:05:08 -0700824 return site_rpc_interface.create_suite_job(
825 name=name, control_file=control_file, priority=priority,
Dan Shid215dbe2015-06-18 16:14:59 -0700826 builds=builds, test_source_build=test_source_build, **kwargs)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700827 return create_job(name, priority, control_file, control_type, image=image,
828 hostless=hostless, **kwargs)
829
830
MK Ryue301eb72015-06-25 12:51:02 -0700831@rpc_utils.route_rpc_to_master
showard12f3e322009-05-13 21:27:42 +0000832def create_job(name, priority, control_file, control_type,
833 hosts=(), meta_hosts=(), one_time_hosts=(),
834 atomic_group_name=None, synch_count=None, is_template=False,
Simran Basi7e605742013-11-12 13:43:36 -0800835 timeout=None, timeout_mins=None, max_runtime_mins=None,
836 run_verify=False, email_list='', dependencies=(),
837 reboot_before=None, reboot_after=None, parse_failed_repair=None,
838 hostless=False, keyvals=None, drone_set=None, image=None,
Dan Shiec1d47d2015-02-13 11:38:13 -0800839 parent_job_id=None, test_retry=0, run_reset=True,
840 require_ssp=None, args=(), **kwargs):
jadmanski0afbb632008-06-06 21:10:57 +0000841 """\
842 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000843
showarda1e74b32009-05-12 17:32:04 +0000844 @param name name of this job
Alex Miller7d658cf2013-09-04 16:00:35 -0700845 @param priority Integer priority of this job. Higher is more important.
showarda1e74b32009-05-12 17:32:04 +0000846 @param control_file String contents of the control file.
847 @param control_type Type of control file, Client or Server.
848 @param synch_count How many machines the job uses per autoserv execution.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700849 synch_count == 1 means the job is asynchronous. If an atomic group is
850 given this value is treated as a minimum.
showarda1e74b32009-05-12 17:32:04 +0000851 @param is_template If true then create a template job.
852 @param timeout Hours after this call returns until the job times out.
Simran Basi7e605742013-11-12 13:43:36 -0800853 @param timeout_mins Minutes after this call returns until the job times
Jiaxi Luo90190c92014-06-18 12:35:57 -0700854 out.
Simran Basi34217022012-11-06 13:43:15 -0800855 @param max_runtime_mins Minutes from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000856 @param run_verify Should the host be verified before running the test?
857 @param email_list String containing emails to mail when the job is done
858 @param dependencies List of label names on which this job depends
859 @param reboot_before Never, If dirty, or Always
860 @param reboot_after Never, If all tests passed, or Always
861 @param parse_failed_repair if true, results of failed repairs launched by
Jiaxi Luo90190c92014-06-18 12:35:57 -0700862 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000863 @param hostless if true, create a hostless job
showardc1a98d12010-01-15 00:22:22 +0000864 @param keyvals dict of keyvals to associate with the job
showarda1e74b32009-05-12 17:32:04 +0000865 @param hosts List of hosts to run job on.
866 @param meta_hosts List where each entry is a label name, and for each entry
Jiaxi Luo90190c92014-06-18 12:35:57 -0700867 one host will be chosen from that label to run the job on.
showarda1e74b32009-05-12 17:32:04 +0000868 @param one_time_hosts List of hosts not in the database to run the job on.
869 @param atomic_group_name The name of an atomic group to schedule the job on.
jamesren76fcf192010-04-21 20:39:50 +0000870 @param drone_set The name of the drone set to run this test on.
Paul Pendlebury5a8c6ad2011-02-01 07:20:17 -0800871 @param image OS image to install before running job.
Aviv Keshet0b9cfc92013-02-05 11:36:02 -0800872 @param parent_job_id id of a job considered to be parent of created job.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700873 @param test_retry Number of times to retry test if the test did not
Jiaxi Luo90190c92014-06-18 12:35:57 -0700874 complete successfully. (optional, default: 0)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700875 @param run_reset Should the host be reset before running the test?
Dan Shiec1d47d2015-02-13 11:38:13 -0800876 @param require_ssp Set to True to require server-side packaging to run the
877 test. If it's set to None, drone will still try to run
878 the server side with server-side packaging. If the
879 autotest-server package doesn't exist for the build or
880 image is not set, drone will run the test without server-
881 side packaging. Default is None.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700882 @param args A list of args to be injected into control file.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700883 @param kwargs extra keyword args. NOT USED.
showardc92da832009-04-07 18:14:34 +0000884
885 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000886 """
Jiaxi Luo90190c92014-06-18 12:35:57 -0700887 if args:
888 control_file = tools.inject_vars({'args': args}, control_file)
889
Simran Basiab5a1bf2014-05-28 15:39:44 -0700890 if image is None:
891 return rpc_utils.create_job_common(
892 **rpc_utils.get_create_job_common_args(locals()))
893
894 # When image is supplied use a known parameterized test already in the
895 # database to pass the OS image path from the front end, through the
896 # scheduler, and finally to autoserv as the --image parameter.
897
898 # The test autoupdate_ParameterizedJob is in afe_autotests and used to
899 # instantiate a Test object and from there a ParameterizedJob.
900 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
901 known_parameterized_job = models.ParameterizedJob.objects.create(
902 test=known_test_obj)
903
904 # autoupdate_ParameterizedJob has a single parameter, the image parameter,
905 # stored in the table afe_test_parameters. We retrieve and set this
906 # instance of the parameter to the OS image path.
907 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
908 name='image')
909 known_parameterized_job.parameterizedjobparameter_set.create(
910 test_parameter=image_parameter, parameter_value=image,
911 parameter_type='string')
912
Dan Shid215dbe2015-06-18 16:14:59 -0700913 # TODO(crbug.com/502638): save firmware build etc to parameterized_job.
914
Simran Basiab5a1bf2014-05-28 15:39:44 -0700915 # By passing a parameterized_job to create_job_common the job entry in
916 # the afe_jobs table will have the field parameterized_job_id set.
917 # The scheduler uses this id in the afe_parameterized_jobs table to
918 # match this job to our known test, and then with the
919 # afe_parameterized_job_parameters table to get the actual image path.
jamesren4a41e012010-07-16 22:33:48 +0000920 return rpc_utils.create_job_common(
Simran Basiab5a1bf2014-05-28 15:39:44 -0700921 parameterized_job=known_parameterized_job.id,
jamesren4a41e012010-07-16 22:33:48 +0000922 **rpc_utils.get_create_job_common_args(locals()))
mblighe8819cd2008-02-15 16:48:40 +0000923
924
showard9dbdcda2008-10-14 17:34:36 +0000925def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000926 """\
showard9dbdcda2008-10-14 17:34:36 +0000927 Abort a set of host queue entries.
Fang Deng63b0e452014-12-19 14:38:15 -0800928
929 @return: A list of dictionaries, each contains information
930 about an aborted HQE.
jadmanski0afbb632008-06-06 21:10:57 +0000931 """
showard9dbdcda2008-10-14 17:34:36 +0000932 query = models.HostQueueEntry.query_objects(filter_data)
beepsfaecbce2013-10-29 11:35:10 -0700933
934 # Dont allow aborts on:
935 # 1. Jobs that have already completed (whether or not they were aborted)
936 # 2. Jobs that we have already been aborted (but may not have completed)
937 query = query.filter(complete=False).filter(aborted=False)
showarddc817512008-11-12 18:16:41 +0000938 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000939 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000940 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000941
Simran Basic1b26762013-06-26 14:23:21 -0700942 models.HostQueueEntry.abort_host_queue_entries(host_queue_entries)
Fang Deng63b0e452014-12-19 14:38:15 -0800943 hqe_info = [{'HostQueueEntry': hqe.id, 'Job': hqe.job_id,
944 'Job name': hqe.job.name} for hqe in host_queue_entries]
945 return hqe_info
showard9d821ab2008-07-11 16:54:29 +0000946
947
beeps8bb1f7d2013-08-05 01:30:09 -0700948def abort_special_tasks(**filter_data):
949 """\
950 Abort the special task, or tasks, specified in the filter.
951 """
952 query = models.SpecialTask.query_objects(filter_data)
953 special_tasks = query.filter(is_active=True)
954 for task in special_tasks:
955 task.abort()
956
957
Simran Basi73dae552013-02-25 14:57:46 -0800958def _call_special_tasks_on_hosts(task, hosts):
959 """\
960 Schedules a set of hosts for a special task.
961
962 @returns A list of hostnames that a special task was created for.
963 """
964 models.AclGroup.check_for_acl_violation_hosts(hosts)
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800965 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts)
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800966 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800967 raise ValueError('The following hosts are on shards, please '
968 'follow the link to the shards and create jobs '
969 'there instead. %s.' % shard_host_map)
Simran Basi73dae552013-02-25 14:57:46 -0800970 for host in hosts:
971 models.SpecialTask.schedule_special_task(host, task)
972 return list(sorted(host.hostname for host in hosts))
973
974
showard1ff7b2e2009-05-15 23:17:18 +0000975def reverify_hosts(**filter_data):
976 """\
977 Schedules a set of hosts for verify.
mbligh4e545a52009-12-19 05:30:39 +0000978
979 @returns A list of hostnames that a verify task was created for.
showard1ff7b2e2009-05-15 23:17:18 +0000980 """
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800981 hosts = models.Host.query_objects(filter_data)
982 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts, rpc_hostnames=True)
983
984 # Filter out hosts on a shard from those on the master, forward
985 # rpcs to the shard with an additional hostname__in filter, and
986 # create a local SpecialTask for each remaining host.
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800987 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800988 hosts = [h for h in hosts if h.shard is None]
989 for shard, hostnames in shard_host_map.iteritems():
990
991 # The main client of this module is the frontend website, and
992 # it invokes it with an 'id' or an 'id__in' filter. Regardless,
993 # the 'hostname' filter should narrow down the list of hosts on
994 # each shard even though we supply all the ids in filter_data.
995 # This method uses hostname instead of id because it fits better
996 # with the overall architecture of redirection functions in rpc_utils.
997 shard_filter = filter_data.copy()
998 shard_filter['hostname__in'] = hostnames
999 rpc_utils.run_rpc_on_multiple_hostnames(
1000 'reverify_hosts', [shard], **shard_filter)
1001
1002 # There is a race condition here if someone assigns a shard to one of these
1003 # hosts before we create the task. The host will stay on the master if:
1004 # 1. The host is not Ready
1005 # 2. The host is Ready but has a task
1006 # But if the host is Ready and doesn't have a task yet, it will get sent
1007 # to the shard as we're creating a task here.
1008
1009 # Given that we only rarely verify Ready hosts it isn't worth putting this
1010 # entire method in a transaction. The worst case scenario is that we have
1011 # a verify running on a Ready host while the shard is using it, if the verify
1012 # fails no subsequent tasks will be created against the host on the master,
1013 # and verifies are safe enough that this is OK.
1014 return _call_special_tasks_on_hosts(models.SpecialTask.Task.VERIFY, hosts)
Simran Basi73dae552013-02-25 14:57:46 -08001015
1016
1017def repair_hosts(**filter_data):
1018 """\
1019 Schedules a set of hosts for repair.
1020
1021 @returns A list of hostnames that a repair task was created for.
1022 """
1023 return _call_special_tasks_on_hosts(models.SpecialTask.Task.REPAIR,
1024 models.Host.query_objects(filter_data))
showard1ff7b2e2009-05-15 23:17:18 +00001025
1026
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001027def get_jobs(not_yet_run=False, running=False, finished=False,
1028 suite=False, sub=False, standalone=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001029 """\
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001030 Extra status filter args for get_jobs:
jadmanski0afbb632008-06-06 21:10:57 +00001031 -not_yet_run: Include only jobs that have not yet started running.
1032 -running: Include only jobs that have start running but for which not
1033 all hosts have completed.
1034 -finished: Include only jobs for which all hosts have completed (or
1035 aborted).
1036 At most one of these three fields should be specified.
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001037
1038 Extra type filter args for get_jobs:
1039 -suite: Include only jobs with child jobs.
1040 -sub: Include only jobs with a parent job.
1041 -standalone: Inlcude only jobs with no child or parent jobs.
1042 At most one of these three fields should be specified.
jadmanski0afbb632008-06-06 21:10:57 +00001043 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001044 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1045 running,
1046 finished)
1047 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1048 suite,
1049 sub,
1050 standalone)
showard0957a842009-05-11 19:25:08 +00001051 job_dicts = []
1052 jobs = list(models.Job.query_objects(filter_data))
1053 models.Job.objects.populate_relationships(jobs, models.Label,
1054 'dependencies')
showardc1a98d12010-01-15 00:22:22 +00001055 models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals')
showard0957a842009-05-11 19:25:08 +00001056 for job in jobs:
1057 job_dict = job.get_object_dict()
1058 job_dict['dependencies'] = ','.join(label.name
1059 for label in job.dependencies)
showardc1a98d12010-01-15 00:22:22 +00001060 job_dict['keyvals'] = dict((keyval.key, keyval.value)
1061 for keyval in job.keyvals)
Eric Lid23bc192011-02-09 14:38:57 -08001062 if job.parameterized_job:
1063 job_dict['image'] = get_parameterized_autoupdate_image_url(job)
showard0957a842009-05-11 19:25:08 +00001064 job_dicts.append(job_dict)
1065 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +00001066
1067
1068def get_num_jobs(not_yet_run=False, running=False, finished=False,
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001069 suite=False, sub=False, standalone=False,
jadmanski0afbb632008-06-06 21:10:57 +00001070 **filter_data):
1071 """\
1072 See get_jobs() for documentation of extra filter parameters.
1073 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001074 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1075 running,
1076 finished)
1077 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1078 suite,
1079 sub,
1080 standalone)
jadmanski0afbb632008-06-06 21:10:57 +00001081 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +00001082
1083
mblighe8819cd2008-02-15 16:48:40 +00001084def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001085 """\
Jiaxi Luoaac54572014-06-04 13:57:02 -07001086 Like get_jobs(), but adds 'status_counts' and 'result_counts' field.
1087
1088 'status_counts' filed is a dictionary mapping status strings to the number
1089 of hosts currently with that status, i.e. {'Queued' : 4, 'Running' : 2}.
1090
1091 'result_counts' field is piped to tko's rpc_interface and has the return
1092 format specified under get_group_counts.
jadmanski0afbb632008-06-06 21:10:57 +00001093 """
1094 jobs = get_jobs(**filter_data)
1095 ids = [job['id'] for job in jobs]
1096 all_status_counts = models.Job.objects.get_status_counts(ids)
1097 for job in jobs:
1098 job['status_counts'] = all_status_counts[job['id']]
Jiaxi Luoaac54572014-06-04 13:57:02 -07001099 job['result_counts'] = tko_rpc_interface.get_status_counts(
1100 ['afe_job_id', 'afe_job_id'],
1101 header_groups=[['afe_job_id'], ['afe_job_id']],
1102 **{'afe_job_id': job['id']})
jadmanski0afbb632008-06-06 21:10:57 +00001103 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +00001104
1105
showarda965cef2009-05-15 23:17:41 +00001106def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +00001107 """\
1108 Retrieves all the information needed to clone a job.
1109 """
showarda8709c52008-07-03 19:44:54 +00001110 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +00001111 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +00001112 preserve_metahosts,
1113 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +00001114
showardd9992fe2008-07-31 02:15:03 +00001115 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +00001116 for host in job_info['hosts']:
1117 host_dict = get_hosts(id=host.id)[0]
1118 other_labels = host_dict['labels']
1119 if host_dict['platform']:
1120 other_labels.remove(host_dict['platform'])
1121 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +00001122 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001123
showard29f7cd22009-04-29 21:16:24 +00001124 for host in job_info['one_time_hosts']:
1125 host_dict = dict(hostname=host.hostname,
1126 id=host.id,
1127 platform='(one-time host)',
1128 locked_text='')
1129 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001130
showard4d077562009-05-08 18:24:36 +00001131 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +00001132 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +00001133 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +00001134
1135 info = dict(job=job.get_object_dict(),
1136 meta_host_counts=meta_host_counts,
1137 hosts=host_dicts)
1138 info['job']['dependencies'] = job_info['dependencies']
1139 if job_info['atomic_group']:
1140 info['atomic_group_name'] = (job_info['atomic_group']).name
1141 else:
1142 info['atomic_group_name'] = None
jamesren2275ef12010-04-12 18:25:06 +00001143 info['hostless'] = job_info['hostless']
jamesren76fcf192010-04-21 20:39:50 +00001144 info['drone_set'] = job.drone_set and job.drone_set.name
showarda8709c52008-07-03 19:44:54 +00001145
Eric Lid23bc192011-02-09 14:38:57 -08001146 if job.parameterized_job:
1147 info['job']['image'] = get_parameterized_autoupdate_image_url(job)
1148
showarda8709c52008-07-03 19:44:54 +00001149 return rpc_utils.prepare_for_serialization(info)
1150
1151
showard34dc5fa2008-04-24 20:58:40 +00001152# host queue entries
1153
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001154def get_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001155 """\
showardc92da832009-04-07 18:14:34 +00001156 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +00001157 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001158 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1159 'started_on__lte',
1160 start_time,
1161 end_time,
1162 **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001163 return rpc_utils.prepare_rows_as_nested_dicts(
1164 models.HostQueueEntry.query_objects(filter_data),
1165 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +00001166
1167
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001168def get_num_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001169 """\
1170 Get the number of host queue entries associated with this job.
1171 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001172 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1173 'started_on__lte',
1174 start_time,
1175 end_time,
1176 **filter_data)
jadmanski0afbb632008-06-06 21:10:57 +00001177 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +00001178
1179
showard1e935f12008-07-11 00:11:36 +00001180def get_hqe_percentage_complete(**filter_data):
1181 """
showardc92da832009-04-07 18:14:34 +00001182 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +00001183 that are complete.
1184 """
1185 query = models.HostQueueEntry.query_objects(filter_data)
1186 complete_count = query.filter(complete=True).count()
1187 total_count = query.count()
1188 if total_count == 0:
1189 return 1
1190 return float(complete_count) / total_count
1191
1192
showard1a5a4082009-07-28 20:01:37 +00001193# special tasks
1194
1195def get_special_tasks(**filter_data):
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001196 """Get special task entries from the local database.
1197
1198 Query the special tasks table for tasks matching the given
1199 `filter_data`, and return a list of the results. No attempt is
1200 made to forward the call to shards; the buck will stop here.
1201 The caller is expected to know the target shard for such reasons
1202 as:
1203 * The caller is a service (such as gs_offloader) configured
1204 to operate on behalf of one specific shard, and no other.
1205 * The caller has a host as a parameter, and knows that this is
1206 the shard assigned to that host.
1207
1208 @param filter_data Filter keywords to pass to the underlying
1209 database query.
1210
1211 """
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001212 return rpc_utils.prepare_rows_as_nested_dicts(
1213 models.SpecialTask.query_objects(filter_data),
1214 ('host', 'queue_entry'))
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001215
1216
1217def get_host_special_tasks(host_id, **filter_data):
1218 """Get special task entries for a given host.
1219
1220 Query the special tasks table for tasks that ran on the host
1221 given by `host_id` and matching the given `filter_data`.
1222 Return a list of the results. If the host is assigned to a
1223 shard, forward this call to that shard.
1224
1225 @param host_id Id in the database of the target host.
1226 @param filter_data Filter keywords to pass to the underlying
1227 database query.
1228
1229 """
MK Ryu0c1a37d2015-04-30 12:00:55 -07001230 # Retrieve host data even if the host is in an invalid state.
1231 host = models.Host.smart_get(host_id, False)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001232 if not host.shard:
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001233 return get_special_tasks(host_id=host_id, **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001234 else:
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001235 # The return values from AFE methods are post-processed
1236 # objects that aren't JSON-serializable. So, we have to
1237 # call AFE.run() to get the raw, serializable output from
1238 # the shard.
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001239 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1240 return shard_afe.run('get_special_tasks',
1241 host_id=host_id, **filter_data)
showard1a5a4082009-07-28 20:01:37 +00001242
1243
MK Ryu0c1a37d2015-04-30 12:00:55 -07001244def get_num_special_tasks(**kwargs):
1245 """Get the number of special task entries from the local database.
1246
1247 Query the special tasks table for tasks matching the given 'kwargs',
1248 and return the number of the results. No attempt is made to forward
1249 the call to shards; the buck will stop here.
1250
1251 @param kwargs Filter keywords to pass to the underlying database query.
1252
1253 """
1254 return models.SpecialTask.query_count(kwargs)
1255
1256
1257def get_host_num_special_tasks(host, **kwargs):
1258 """Get special task entries for a given host.
1259
1260 Query the special tasks table for tasks that ran on the host
1261 given by 'host' and matching the given 'kwargs'.
1262 Return a list of the results. If the host is assigned to a
1263 shard, forward this call to that shard.
1264
1265 @param host id or name of a host. More often a hostname.
1266 @param kwargs Filter keywords to pass to the underlying database query.
1267
1268 """
1269 # Retrieve host data even if the host is in an invalid state.
1270 host_model = models.Host.smart_get(host, False)
1271 if not host_model.shard:
1272 return get_num_special_tasks(host=host, **kwargs)
1273 else:
1274 shard_afe = frontend.AFE(server=host_model.shard.rpc_hostname())
1275 return shard_afe.run('get_num_special_tasks', host=host, **kwargs)
1276
1277
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001278def get_status_task(host_id, end_time):
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001279 """Get the "status task" for a host from the local shard.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001280
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001281 Returns a single special task representing the given host's
1282 "status task". The status task is a completed special task that
1283 identifies whether the corresponding host was working or broken
1284 when it completed. A successful task indicates a working host;
1285 a failed task indicates broken.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001286
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001287 This call will not be forward to a shard; the receiving server
1288 must be the shard that owns the host.
1289
1290 @param host_id Id in the database of the target host.
1291 @param end_time Time reference for the host's status.
1292
1293 @return A single task; its status (successful or not)
1294 corresponds to the status of the host (working or
1295 broken) at the given time. If no task is found, return
1296 `None`.
1297
1298 """
1299 tasklist = rpc_utils.prepare_rows_as_nested_dicts(
1300 status_history.get_status_task(host_id, end_time),
1301 ('host', 'queue_entry'))
1302 return tasklist[0] if tasklist else None
1303
1304
1305def get_host_status_task(host_id, end_time):
1306 """Get the "status task" for a host from its owning shard.
1307
1308 Finds the given host's owning shard, and forwards to it a call
1309 to `get_status_task()` (see above).
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001310
1311 @param host_id Id in the database of the target host.
1312 @param end_time Time reference for the host's status.
1313
1314 @return A single task; its status (successful or not)
1315 corresponds to the status of the host (working or
1316 broken) at the given time. If no task is found, return
1317 `None`.
1318
1319 """
1320 host = models.Host.smart_get(host_id)
1321 if not host.shard:
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001322 return get_status_task(host_id, end_time)
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001323 else:
1324 # The return values from AFE methods are post-processed
1325 # objects that aren't JSON-serializable. So, we have to
1326 # call AFE.run() to get the raw, serializable output from
1327 # the shard.
1328 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1329 return shard_afe.run('get_status_task',
1330 host_id=host_id, end_time=end_time)
1331
1332
J. Richard Barnette8abbfd62015-06-23 12:46:54 -07001333def get_host_diagnosis_interval(host_id, end_time, success):
1334 """Find a "diagnosis interval" for a given host.
1335
1336 A "diagnosis interval" identifies a start and end time where
1337 the host went from "working" to "broken", or vice versa. The
1338 interval's starting time is the starting time of the last status
1339 task with the old status; the end time is the finish time of the
1340 first status task with the new status.
1341
1342 This routine finds the most recent diagnosis interval for the
1343 given host prior to `end_time`, with a starting status matching
1344 `success`. If `success` is true, the interval will start with a
1345 successful status task; if false the interval will start with a
1346 failed status task.
1347
1348 @param host_id Id in the database of the target host.
1349 @param end_time Time reference for the diagnosis interval.
1350 @param success Whether the diagnosis interval should start
1351 with a successful or failed status task.
1352
1353 @return A list of two strings. The first is the timestamp for
1354 the beginning of the interval; the second is the
1355 timestamp for the end. If the host has never changed
1356 state, the list is empty.
1357
1358 """
1359 host = models.Host.smart_get(host_id)
J. Richard Barnette78f281a2015-06-29 13:24:51 -07001360 if not host.shard or utils.is_shard():
J. Richard Barnette8abbfd62015-06-23 12:46:54 -07001361 return status_history.get_diagnosis_interval(
1362 host_id, end_time, success)
1363 else:
1364 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1365 return shard_afe.get_host_diagnosis_interval(
1366 host_id, end_time, success)
1367
1368
showardc0ac3a72009-07-08 21:14:45 +00001369# support for host detail view
1370
MK Ryu0c1a37d2015-04-30 12:00:55 -07001371def get_host_queue_entries_and_special_tasks(host, query_start=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001372 query_limit=None, start_time=None,
1373 end_time=None):
showardc0ac3a72009-07-08 21:14:45 +00001374 """
1375 @returns an interleaved list of HostQueueEntries and SpecialTasks,
1376 in approximate run order. each dict contains keys for type, host,
1377 job, status, started_on, execution_path, and ID.
1378 """
1379 total_limit = None
1380 if query_limit is not None:
1381 total_limit = query_start + query_limit
MK Ryu0c1a37d2015-04-30 12:00:55 -07001382 filter_data_common = {'host': host,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001383 'query_limit': total_limit,
1384 'sort_by': ['-id']}
showardc0ac3a72009-07-08 21:14:45 +00001385
MK Ryu0c1a37d2015-04-30 12:00:55 -07001386 filter_data_special_tasks = rpc_utils.inject_times_to_filter(
1387 'time_started__gte', 'time_started__lte', start_time, end_time,
1388 **filter_data_common)
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001389
MK Ryu0c1a37d2015-04-30 12:00:55 -07001390 queue_entries = get_host_queue_entries(
1391 start_time, end_time, **filter_data_common)
1392 special_tasks = get_host_special_tasks(host, **filter_data_special_tasks)
showardc0ac3a72009-07-08 21:14:45 +00001393
1394 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
1395 special_tasks)
1396 if query_start is not None:
1397 interleaved_entries = interleaved_entries[query_start:]
1398 if query_limit is not None:
1399 interleaved_entries = interleaved_entries[:query_limit]
MK Ryu0c1a37d2015-04-30 12:00:55 -07001400 return rpc_utils.prepare_host_queue_entries_and_special_tasks(
1401 interleaved_entries, queue_entries)
showardc0ac3a72009-07-08 21:14:45 +00001402
1403
MK Ryu0c1a37d2015-04-30 12:00:55 -07001404def get_num_host_queue_entries_and_special_tasks(host, start_time=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001405 end_time=None):
MK Ryu0c1a37d2015-04-30 12:00:55 -07001406 filter_data_common = {'host': host}
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001407
1408 filter_data_queue_entries, filter_data_special_tasks = (
1409 rpc_utils.inject_times_to_hqe_special_tasks_filters(
1410 filter_data_common, start_time, end_time))
1411
1412 return (models.HostQueueEntry.query_count(filter_data_queue_entries)
MK Ryu0c1a37d2015-04-30 12:00:55 -07001413 + get_host_num_special_tasks(**filter_data_special_tasks))
showardc0ac3a72009-07-08 21:14:45 +00001414
1415
showard29f7cd22009-04-29 21:16:24 +00001416# recurring run
1417
1418def get_recurring(**filter_data):
1419 return rpc_utils.prepare_rows_as_nested_dicts(
1420 models.RecurringRun.query_objects(filter_data),
1421 ('job', 'owner'))
1422
1423
1424def get_num_recurring(**filter_data):
1425 return models.RecurringRun.query_count(filter_data)
1426
1427
1428def delete_recurring_runs(**filter_data):
1429 to_delete = models.RecurringRun.query_objects(filter_data)
1430 to_delete.delete()
1431
1432
1433def create_recurring_run(job_id, start_date, loop_period, loop_count):
showard64a95952010-01-13 21:27:16 +00001434 owner = models.User.current_user().login
showard29f7cd22009-04-29 21:16:24 +00001435 job = models.Job.objects.get(id=job_id)
1436 return job.create_recurring_job(start_date=start_date,
1437 loop_period=loop_period,
1438 loop_count=loop_count,
1439 owner=owner)
1440
1441
mblighe8819cd2008-02-15 16:48:40 +00001442# other
1443
showarde0b63622008-08-04 20:58:47 +00001444def echo(data=""):
1445 """\
1446 Returns a passed in string. For doing a basic test to see if RPC calls
1447 can successfully be made.
1448 """
1449 return data
1450
1451
showardb7a52fd2009-04-27 20:10:56 +00001452def get_motd():
1453 """\
1454 Returns the message of the day as a string.
1455 """
1456 return rpc_utils.get_motd()
1457
1458
mblighe8819cd2008-02-15 16:48:40 +00001459def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +00001460 """\
1461 Returns a dictionary containing a bunch of data that shouldn't change
1462 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +00001463
1464 priorities: List of job priority choices.
1465 default_priority: Default priority value for new jobs.
1466 users: Sorted list of all users.
Jiaxi Luo31874592014-06-11 10:36:35 -07001467 labels: Sorted list of labels not start with 'cros-version' and
1468 'fw-version'.
showardc92da832009-04-07 18:14:34 +00001469 atomic_groups: Sorted list of all atomic groups.
1470 tests: Sorted list of all tests.
1471 profilers: Sorted list of all profilers.
1472 current_user: Logged-in username.
1473 host_statuses: Sorted list of possible Host statuses.
1474 job_statuses: Sorted list of possible HostQueueEntry statuses.
Simran Basi7e605742013-11-12 13:43:36 -08001475 job_timeout_default: The default job timeout length in minutes.
showarda1e74b32009-05-12 17:32:04 +00001476 parse_failed_repair_default: Default value for the parse_failed_repair job
Jiaxi Luo31874592014-06-11 10:36:35 -07001477 option.
showardc92da832009-04-07 18:14:34 +00001478 reboot_before_options: A list of valid RebootBefore string enums.
1479 reboot_after_options: A list of valid RebootAfter string enums.
1480 motd: Server's message of the day.
1481 status_dictionary: A mapping from one word job status names to a more
1482 informative description.
jadmanski0afbb632008-06-06 21:10:57 +00001483 """
showard21baa452008-10-21 00:08:39 +00001484
1485 job_fields = models.Job.get_field_dict()
jamesren76fcf192010-04-21 20:39:50 +00001486 default_drone_set_name = models.DroneSet.default_drone_set_name()
1487 drone_sets = ([default_drone_set_name] +
1488 sorted(drone_set.name for drone_set in
1489 models.DroneSet.objects.exclude(
1490 name=default_drone_set_name)))
showard21baa452008-10-21 00:08:39 +00001491
jadmanski0afbb632008-06-06 21:10:57 +00001492 result = {}
Alex Miller7d658cf2013-09-04 16:00:35 -07001493 result['priorities'] = priorities.Priority.choices()
1494 default_priority = priorities.Priority.DEFAULT
1495 result['default_priority'] = 'Default'
1496 result['max_schedulable_priority'] = priorities.Priority.DEFAULT
jadmanski0afbb632008-06-06 21:10:57 +00001497 result['users'] = get_users(sort_by=['login'])
Jiaxi Luo31874592014-06-11 10:36:35 -07001498
1499 label_exclude_filters = [{'name__startswith': 'cros-version'},
1500 {'name__startswith': 'fw-version'}]
1501 result['labels'] = get_labels(
1502 label_exclude_filters,
1503 sort_by=['-platform', 'name'])
1504
showardc92da832009-04-07 18:14:34 +00001505 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +00001506 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +00001507 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +00001508 result['current_user'] = rpc_utils.prepare_for_serialization(
showard64a95952010-01-13 21:27:16 +00001509 models.User.current_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +00001510 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +00001511 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
Simran Basi7e605742013-11-12 13:43:36 -08001512 result['job_timeout_mins_default'] = models.Job.DEFAULT_TIMEOUT_MINS
Simran Basi34217022012-11-06 13:43:15 -08001513 result['job_max_runtime_mins_default'] = (
1514 models.Job.DEFAULT_MAX_RUNTIME_MINS)
showarda1e74b32009-05-12 17:32:04 +00001515 result['parse_failed_repair_default'] = bool(
1516 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
jamesrendd855242010-03-02 22:23:44 +00001517 result['reboot_before_options'] = model_attributes.RebootBefore.names
1518 result['reboot_after_options'] = model_attributes.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +00001519 result['motd'] = rpc_utils.get_motd()
jamesren76fcf192010-04-21 20:39:50 +00001520 result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled()
1521 result['drone_sets'] = drone_sets
jamesren4a41e012010-07-16 22:33:48 +00001522 result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled()
showard8ac29b42008-07-17 17:01:55 +00001523
showardd3dc1992009-04-22 21:01:40 +00001524 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +00001525 "Verifying": "Verifying Host",
Alex Millerdfff2fd2013-05-28 13:05:06 -07001526 "Provisioning": "Provisioning Host",
showard8ac29b42008-07-17 17:01:55 +00001527 "Pending": "Waiting on other hosts",
1528 "Running": "Running autoserv",
1529 "Completed": "Autoserv completed",
1530 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +00001531 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +00001532 "Starting": "Next in host's queue",
1533 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +00001534 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +00001535 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +00001536 "Template": "Template job for recurring run",
mbligh4608b002010-01-05 18:22:35 +00001537 "Waiting": "Waiting for scheduler action",
Dan Shi07e09af2013-04-12 09:31:29 -07001538 "Archiving": "Archiving results",
1539 "Resetting": "Resetting hosts"}
Jiaxi Luo421608e2014-07-07 14:38:00 -07001540
1541 result['wmatrix_url'] = rpc_utils.get_wmatrix_url()
Simran Basi71206ef2014-08-13 13:51:18 -07001542 result['is_moblab'] = bool(utils.is_moblab())
Jiaxi Luo421608e2014-07-07 14:38:00 -07001543
jadmanski0afbb632008-06-06 21:10:57 +00001544 return result
showard29f7cd22009-04-29 21:16:24 +00001545
1546
1547def get_server_time():
1548 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")