blob: 64e8ca7968baf0676cc8001c24d286c19d5344d1 [file] [log] [blame]
Aviv Keshet0b9cfc92013-02-05 11:36:02 -08001# pylint: disable-msg=C0111
2
mblighe8819cd2008-02-15 16:48:40 +00003"""\
4Functions to expose over the RPC interface.
5
6For all modify* and delete* functions that ask for an 'id' parameter to
7identify the object to operate on, the id may be either
8 * the database row ID
9 * the name of the object (label name, hostname, user login, etc.)
10 * a dictionary containing uniquely identifying field (this option should seldom
11 be used)
12
13When specifying foreign key fields (i.e. adding hosts to a label, or adding
14users to an ACL group), the given value may be either the database row ID or the
15name of the object.
16
17All get* functions return lists of dictionaries. Each dictionary represents one
18object and maps field names to values.
19
20Some examples:
21modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
22modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
23modify_test('sleeptest', test_type='Client', params=', seconds=60')
24delete_acl_group(1) # delete by ID
25delete_acl_group('Everyone') # delete by name
26acl_group_add_users('Everyone', ['mbligh', 'showard'])
27get_jobs(owner='showard', status='Queued')
28
mbligh93c80e62009-02-03 17:48:30 +000029See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000030"""
31
32__author__ = 'showard@google.com (Steve Howard)'
33
MK Ryu9c5fbbe2015-02-11 15:46:22 -080034import sys
showard29f7cd22009-04-29 21:16:24 +000035import datetime
MK Ryu9c5fbbe2015-02-11 15:46:22 -080036
Moises Osorio2dc7a102014-12-02 18:24:02 -080037from django.db.models import Count
showardcafd16e2009-05-29 18:37:49 +000038import common
Simran Basib6ec8ae2014-04-23 12:05:08 -070039from autotest_lib.client.common_lib import priorities
Gabe Black1e1c41b2015-02-04 23:55:15 -080040from autotest_lib.client.common_lib.cros.graphite import autotest_stats
showard6d7b2ff2009-06-10 00:16:47 +000041from autotest_lib.frontend.afe import control_file, rpc_utils
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070042from autotest_lib.frontend.afe import models, model_logic, model_attributes
Simran Basib6ec8ae2014-04-23 12:05:08 -070043from autotest_lib.frontend.afe import site_rpc_interface
Moises Osorio2dc7a102014-12-02 18:24:02 -080044from autotest_lib.frontend.tko import models as tko_models
Jiaxi Luoaac54572014-06-04 13:57:02 -070045from autotest_lib.frontend.tko import rpc_interface as tko_rpc_interface
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070046from autotest_lib.server import frontend
Simran Basi71206ef2014-08-13 13:51:18 -070047from autotest_lib.server import utils
Jiaxi Luo90190c92014-06-18 12:35:57 -070048from autotest_lib.server.cros.dynamic_suite import tools
J. Richard Barnette39255fa2015-04-14 17:23:41 -070049from autotest_lib.site_utils import status_history
mblighe8819cd2008-02-15 16:48:40 +000050
Moises Osorio2dc7a102014-12-02 18:24:02 -080051
Gabe Black1e1c41b2015-02-04 23:55:15 -080052_timer = autotest_stats.Timer('rpc_interface')
Moises Osorio2dc7a102014-12-02 18:24:02 -080053
Eric Lid23bc192011-02-09 14:38:57 -080054def get_parameterized_autoupdate_image_url(job):
55 """Get the parameterized autoupdate image url from a parameterized job."""
56 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
57 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
beeps8bb1f7d2013-08-05 01:30:09 -070058 name='image')
Eric Lid23bc192011-02-09 14:38:57 -080059 para_set = job.parameterized_job.parameterizedjobparameter_set
60 job_test_para = para_set.get(test_parameter=image_parameter)
61 return job_test_para.parameter_value
62
63
mblighe8819cd2008-02-15 16:48:40 +000064# labels
65
mblighe8819cd2008-02-15 16:48:40 +000066def modify_label(id, **data):
MK Ryu8c554cf2015-06-12 11:45:50 -070067 """Modify a label.
68
69 @param id: id or name of a label. More often a label name.
70 @param data: New data for a label.
71 """
72 label_model = models.Label.smart_get(id)
73
74 # Master forwards the RPC to shards
75 if not utils.is_shard():
76 rpc_utils.fanout_rpc(label_model.host_set.all(), 'modify_label', False,
77 id=id, **data)
78
79 label_model.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000080
81
82def delete_label(id):
MK Ryu8c554cf2015-06-12 11:45:50 -070083 """Delete a label.
84
85 @param id: id or name of a label. More often a label name.
86 """
87 label_model = models.Label.smart_get(id)
88
89 # Master forwards the RPC to shards
90 if not utils.is_shard():
91 rpc_utils.fanout_rpc(label_model.host_set.all(), 'delete_label', False,
92 id=id)
93
94 label_model.delete()
mblighe8819cd2008-02-15 16:48:40 +000095
Prashanth Balasubramanian744898f2015-01-13 05:04:16 -080096
MK Ryu9c5fbbe2015-02-11 15:46:22 -080097def add_label(name, ignore_exception_if_exists=False, **kwargs):
MK Ryucf027c62015-03-04 12:00:50 -080098 """Adds a new label of a given name.
MK Ryu9c5fbbe2015-02-11 15:46:22 -080099
100 @param name: label name.
101 @param ignore_exception_if_exists: If True and the exception was
102 thrown due to the duplicated label name when adding a label,
103 then suppress the exception. Default is False.
104 @param kwargs: keyword args that store more info about a label
105 other than the name.
106 @return: int/long id of a new label.
107 """
108 # models.Label.add_object() throws model_logic.ValidationError
109 # when it is given a label name that already exists.
110 # However, ValidationError can be thrown with different errors,
111 # and those errors should be thrown up to the call chain.
112 try:
113 label = models.Label.add_object(name=name, **kwargs)
114 except:
115 exc_info = sys.exc_info()
116 if ignore_exception_if_exists:
117 label = rpc_utils.get_label(name)
118 # If the exception is raised not because of duplicated
119 # "name", then raise the original exception.
120 if label is None:
121 raise exc_info[0], exc_info[1], exc_info[2]
122 else:
123 raise exc_info[0], exc_info[1], exc_info[2]
124 return label.id
125
126
127def add_label_to_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800128 """Adds a label of the given id to the given hosts only in local DB.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800129
130 @param id: id or name of a label. More often a label name.
131 @param hosts: The hostnames of hosts that need the label.
132
133 @raises models.Label.DoesNotExist: If the label with id doesn't exist.
134 """
135 label = models.Label.smart_get(id)
136 host_objs = models.Host.smart_get_bulk(hosts)
137 if label.platform:
138 models.Host.check_no_platform(host_objs)
139 label.host_set.add(*host_objs)
140
141
MK Ryufbb002c2015-06-08 14:13:16 -0700142@rpc_utils.route_rpc_to_master
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800143def label_add_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800144 """Adds a label with the given id to the given hosts.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800145
146 This method should be run only on master not shards.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800147 The given label will be created if it doesn't exist, provided the `id`
148 supplied is a label name not an int/long id.
149
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800150 @param id: id or name of a label. More often a label name.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800151 @param hosts: A list of hostnames or ids. More often hostnames.
152
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800153 @raises ValueError: If the id specified is an int/long (label id)
154 while the label does not exist.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800155 """
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800156 try:
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800157 label = models.Label.smart_get(id)
158 except models.Label.DoesNotExist:
159 # This matches the type checks in smart_get, which is a hack
160 # in and off itself. The aim here is to create any non-existent
161 # label, which we cannot do if the 'id' specified isn't a label name.
162 if isinstance(id, basestring):
163 label = models.Label.smart_get(add_label(id))
164 else:
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800165 raise ValueError('Label id (%s) does not exist. Please specify '
166 'the argument, id, as a string (label name).'
167 % id)
MK Ryucf027c62015-03-04 12:00:50 -0800168
169 host_objs = models.Host.smart_get_bulk(hosts)
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800170 # Make sure the label exists on the shard with the same id
171 # as it is on the master.
MK Ryucf027c62015-03-04 12:00:50 -0800172 # It is possible that the label is already in a shard because
173 # we are adding a new label only to shards of hosts that the label
174 # is going to be attached.
175 # For example, we add a label L1 to a host in shard S1.
176 # Master and S1 will have L1 but other shards won't.
177 # Later, when we add the same label L1 to hosts in shards S1 and S2,
178 # S1 already has the label but S2 doesn't.
179 # S2 should have the new label without any problem.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800180 # We ignore exception in such a case.
181 rpc_utils.fanout_rpc(
182 host_objs, 'add_label', name=label.name, id=label.id,
183 include_hostnames=False, ignore_exception_if_exists=True)
184 rpc_utils.fanout_rpc(host_objs, 'add_label_to_hosts', id=id)
showardbbabf502008-06-06 00:02:02 +0000185
MK Ryu26f0c932015-05-28 18:14:33 -0700186 add_label_to_hosts(id, hosts)
187
showardbbabf502008-06-06 00:02:02 +0000188
MK Ryucf027c62015-03-04 12:00:50 -0800189def remove_label_from_hosts(id, hosts):
190 """Removes a label of the given id from the given hosts only in local DB.
191
192 @param id: id or name of a label.
193 @param hosts: The hostnames of hosts that need to remove the label from.
194 """
showardbe3ec042008-11-12 18:16:07 +0000195 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000196 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +0000197
198
MK Ryufbb002c2015-06-08 14:13:16 -0700199@rpc_utils.route_rpc_to_master
MK Ryucf027c62015-03-04 12:00:50 -0800200def label_remove_hosts(id, hosts):
201 """Removes a label of the given id from the given hosts.
202
203 This method should be run only on master not shards.
204
205 @param id: id or name of a label.
206 @param hosts: A list of hostnames or ids. More often hostnames.
207 """
MK Ryucf027c62015-03-04 12:00:50 -0800208 host_objs = models.Host.smart_get_bulk(hosts)
209 rpc_utils.fanout_rpc(host_objs, 'remove_label_from_hosts', id=id)
210
MK Ryu26f0c932015-05-28 18:14:33 -0700211 remove_label_from_hosts(id, hosts)
212
MK Ryucf027c62015-03-04 12:00:50 -0800213
Jiaxi Luo31874592014-06-11 10:36:35 -0700214def get_labels(exclude_filters=(), **filter_data):
showardc92da832009-04-07 18:14:34 +0000215 """\
Jiaxi Luo31874592014-06-11 10:36:35 -0700216 @param exclude_filters: A sequence of dictionaries of filters.
217
showardc92da832009-04-07 18:14:34 +0000218 @returns A sequence of nested dictionaries of label information.
219 """
Jiaxi Luo31874592014-06-11 10:36:35 -0700220 labels = models.Label.query_objects(filter_data)
221 for exclude_filter in exclude_filters:
222 labels = labels.exclude(**exclude_filter)
223 return rpc_utils.prepare_rows_as_nested_dicts(labels, ('atomic_group',))
showardc92da832009-04-07 18:14:34 +0000224
225
226# atomic groups
227
showarde9450c92009-06-30 01:58:52 +0000228def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +0000229 return models.AtomicGroup.add_object(
230 name=name, max_number_of_machines=max_number_of_machines,
231 description=description).id
232
233
234def modify_atomic_group(id, **data):
235 models.AtomicGroup.smart_get(id).update_object(data)
236
237
238def delete_atomic_group(id):
239 models.AtomicGroup.smart_get(id).delete()
240
241
242def atomic_group_add_labels(id, labels):
243 label_objs = models.Label.smart_get_bulk(labels)
244 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
245
246
247def atomic_group_remove_labels(id, labels):
248 label_objs = models.Label.smart_get_bulk(labels)
249 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
250
251
252def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000253 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000254 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000255
256
257# hosts
258
Matthew Sartori68186332015-04-27 17:19:53 -0700259def add_host(hostname, status=None, locked=None, lock_reason='', protection=None):
260 if locked and not lock_reason:
261 raise model_logic.ValidationError(
262 {'locked': 'Please provide a reason for locking when adding host.'})
263
jadmanski0afbb632008-06-06 21:10:57 +0000264 return models.Host.add_object(hostname=hostname, status=status,
Matthew Sartori68186332015-04-27 17:19:53 -0700265 locked=locked, lock_reason=lock_reason,
266 protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000267
268
Jakob Juelich50e91f72014-10-01 12:43:23 -0700269@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000270def modify_host(id, **data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700271 """Modify local attributes of a host.
272
273 If this is called on the master, but the host is assigned to a shard, this
274 will also forward the call to the responsible shard. This means i.e. if a
275 host is being locked using this function, this change will also propagate to
276 shards.
277
278 @param id: id of the host to modify.
279 @param **data: key=value pairs of values to set on the host.
280 """
showardbe0d8692009-08-20 23:42:44 +0000281 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000282 host = models.Host.smart_get(id)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700283
showardce7c0922009-09-11 18:39:24 +0000284 rpc_utils.check_modify_host_locking(host, data)
285 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000286
287
showard276f9442009-05-20 00:33:16 +0000288def modify_hosts(host_filter_data, update_data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700289 """Modify local attributes of multiple hosts.
290
291 If this is called on the master, but one of the hosts in that match the
292 filters is assigned to a shard, this will also forward the call to the
293 responsible shard.
294
295 The filters are always applied on the master, not on the shards. This means
296 if the states of a host differ on the master and a shard, the state on the
297 master will be used. I.e. this means:
298 A host was synced to Shard 1. On Shard 1 the status of the host was set to
299 'Repair Failed'.
300 - A call to modify_hosts with host_filter_data={'status': 'Ready'} will
301 update the host (both on the shard and on the master), because the state
302 of the host as the master knows it is still 'Ready'.
303 - A call to modify_hosts with host_filter_data={'status': 'Repair failed'
304 will not update the host, because the filter doesn't apply on the master.
305
showardbe0d8692009-08-20 23:42:44 +0000306 @param host_filter_data: Filters out which hosts to modify.
307 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000308 """
showardbe0d8692009-08-20 23:42:44 +0000309 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000310 hosts = models.Host.query_objects(host_filter_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700311
312 affected_shard_hostnames = set()
313 affected_host_ids = []
314
Alex Miller9658a952013-05-14 16:40:02 -0700315 # Check all hosts before changing data for exception safety.
316 for host in hosts:
317 rpc_utils.check_modify_host_locking(host, update_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700318 if host.shard:
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800319 affected_shard_hostnames.add(host.shard.rpc_hostname())
Jakob Juelich50e91f72014-10-01 12:43:23 -0700320 affected_host_ids.append(host.id)
321
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800322 if not utils.is_shard():
Jakob Juelich50e91f72014-10-01 12:43:23 -0700323 # Caution: Changing the filter from the original here. See docstring.
324 rpc_utils.run_rpc_on_multiple_hostnames(
325 'modify_hosts', affected_shard_hostnames,
326 host_filter_data={'id__in': affected_host_ids},
327 update_data=update_data)
328
showard276f9442009-05-20 00:33:16 +0000329 for host in hosts:
330 host.update_object(update_data)
331
332
MK Ryufbb002c2015-06-08 14:13:16 -0700333def add_labels_to_host(id, labels):
334 """Adds labels to a given host only in local DB.
showardcafd16e2009-05-29 18:37:49 +0000335
MK Ryufbb002c2015-06-08 14:13:16 -0700336 @param id: id or hostname for a host.
337 @param labels: ids or names for labels.
338 """
339 label_objs = models.Label.smart_get_bulk(labels)
340 models.Host.smart_get(id).labels.add(*label_objs)
341
342
343@rpc_utils.route_rpc_to_master
344def host_add_labels(id, labels):
345 """Adds labels to a given host.
346
347 @param id: id or hostname for a host.
348 @param labels: ids or names for labels.
349
350 @raises ValidationError: If adding more than one platform label.
351 """
352 label_objs = models.Label.smart_get_bulk(labels)
353 platforms = [label.name for label in label_objs if label.platform]
showardcafd16e2009-05-29 18:37:49 +0000354 if len(platforms) > 1:
355 raise model_logic.ValidationError(
356 {'labels': 'Adding more than one platform label: %s' %
357 ', '.join(platforms)})
MK Ryufbb002c2015-06-08 14:13:16 -0700358
359 host_obj = models.Host.smart_get(id)
showardcafd16e2009-05-29 18:37:49 +0000360 if len(platforms) == 1:
MK Ryufbb002c2015-06-08 14:13:16 -0700361 models.Host.check_no_platform([host_obj])
362
363 rpc_utils.fanout_rpc([host_obj], 'add_labels_to_host', False,
364 id=id, labels=labels)
365 add_labels_to_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000366
367
MK Ryufbb002c2015-06-08 14:13:16 -0700368def remove_labels_from_host(id, labels):
369 """Removes labels from a given host only in local DB.
370
371 @param id: id or hostname for a host.
372 @param labels: ids or names for labels.
373 """
374 label_objs = models.Label.smart_get_bulk(labels)
375 models.Host.smart_get(id).labels.remove(*label_objs)
376
377
378@rpc_utils.route_rpc_to_master
mblighe8819cd2008-02-15 16:48:40 +0000379def host_remove_labels(id, labels):
MK Ryufbb002c2015-06-08 14:13:16 -0700380 """Removes labels from a given host.
381
382 @param id: id or hostname for a host.
383 @param labels: ids or names for labels.
384 """
385 host_obj = models.Host.smart_get(id)
386 rpc_utils.fanout_rpc([host_obj], 'remove_labels_from_host', False,
387 id=id, labels=labels)
388 remove_labels_from_host(id, labels)
mblighe8819cd2008-02-15 16:48:40 +0000389
390
MK Ryuacf35922014-10-03 14:56:49 -0700391def get_host_attribute(attribute, **host_filter_data):
392 """
393 @param attribute: string name of attribute
394 @param host_filter_data: filter data to apply to Hosts to choose hosts to
395 act upon
396 """
397 hosts = rpc_utils.get_host_query((), False, False, True, host_filter_data)
398 hosts = list(hosts)
399 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
400 'attribute_list')
401 host_attr_dicts = []
402 for host_obj in hosts:
403 for attr_obj in host_obj.attribute_list:
404 if attr_obj.attribute == attribute:
405 host_attr_dicts.append(attr_obj.get_object_dict())
406 return rpc_utils.prepare_for_serialization(host_attr_dicts)
407
408
showard0957a842009-05-11 19:25:08 +0000409def set_host_attribute(attribute, value, **host_filter_data):
410 """
MK Ryu26f0c932015-05-28 18:14:33 -0700411 @param attribute: string name of attribute
412 @param value: string, or None to delete an attribute
413 @param host_filter_data: filter data to apply to Hosts to choose hosts to
414 act upon
showard0957a842009-05-11 19:25:08 +0000415 """
416 assert host_filter_data # disallow accidental actions on all hosts
417 hosts = models.Host.query_objects(host_filter_data)
418 models.AclGroup.check_for_acl_violation_hosts(hosts)
419
MK Ryu26f0c932015-05-28 18:14:33 -0700420 # Master forwards this RPC to shards.
421 if not utils.is_shard():
422 rpc_utils.fanout_rpc(hosts, 'set_host_attribute', False,
423 attribute=attribute, value=value, **host_filter_data)
424
showard0957a842009-05-11 19:25:08 +0000425 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000426 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000427
428
Jakob Juelich50e91f72014-10-01 12:43:23 -0700429@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000430def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000431 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000432
433
showard87cc38f2009-08-20 23:37:04 +0000434def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000435 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000436 """
437 @param multiple_labels: match hosts in all of the labels given. Should
438 be a list of label names.
439 @param exclude_only_if_needed_labels: Exclude hosts with at least one
440 "only_if_needed" label applied.
441 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
442 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000443 """
showard43a3d262008-11-12 18:17:05 +0000444 hosts = rpc_utils.get_host_query(multiple_labels,
445 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000446 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000447 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000448 hosts = list(hosts)
449 models.Host.objects.populate_relationships(hosts, models.Label,
450 'label_list')
451 models.Host.objects.populate_relationships(hosts, models.AclGroup,
452 'acl_list')
453 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
454 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000455 host_dicts = []
456 for host_obj in hosts:
457 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000458 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000459 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
460 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000461 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
462 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
463 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000464 host_dicts.append(host_dict)
465 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000466
467
showard87cc38f2009-08-20 23:37:04 +0000468def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000469 exclude_atomic_group_hosts=False, valid_only=True,
470 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000471 """
472 Same parameters as get_hosts().
473
474 @returns The number of matching hosts.
475 """
showard43a3d262008-11-12 18:17:05 +0000476 hosts = rpc_utils.get_host_query(multiple_labels,
477 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000478 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000479 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000480 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000481
mblighe8819cd2008-02-15 16:48:40 +0000482
483# tests
484
showard909c7a62008-07-15 21:52:38 +0000485def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000486 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000487 test_time=None, test_category=None, description=None,
488 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000489 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000490 author=author, dependencies=dependencies,
491 experimental=experimental,
492 run_verify=run_verify, test_time=test_time,
493 test_category=test_category,
494 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000495 test_class=test_class,
496 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000497
498
499def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000500 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000501
502
503def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000504 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000505
506
507def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000508 return rpc_utils.prepare_for_serialization(
509 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000510
511
Moises Osorio2dc7a102014-12-02 18:24:02 -0800512@_timer.decorate
513def get_tests_status_counts_by_job_name_label(job_name_prefix, label_name):
514 """Gets the counts of all passed and failed tests from the matching jobs.
515
516 @param job_name_prefix: Name prefix of the jobs to get the summary from, e.g.,
517 'butterfly-release/R40-6457.21.0/bvt-cq/'.
518 @param label_name: Label that must be set in the jobs, e.g.,
519 'cros-version:butterfly-release/R40-6457.21.0'.
520
521 @returns A summary of the counts of all the passed and failed tests.
522 """
523 job_ids = list(models.Job.objects.filter(
524 name__startswith=job_name_prefix,
525 dependency_labels__name=label_name).values_list(
526 'pk', flat=True))
527 summary = {'passed': 0, 'failed': 0}
528 if not job_ids:
529 return summary
530
531 counts = (tko_models.TestView.objects.filter(
532 afe_job_id__in=job_ids).exclude(
533 test_name='SERVER_JOB').exclude(
534 test_name__startswith='CLIENT_JOB').values(
535 'status').annotate(
536 count=Count('status')))
537 for status in counts:
538 if status['status'] == 'GOOD':
539 summary['passed'] += status['count']
540 else:
541 summary['failed'] += status['count']
542 return summary
543
544
showard2b9a88b2008-06-13 20:55:03 +0000545# profilers
546
547def add_profiler(name, description=None):
548 return models.Profiler.add_object(name=name, description=description).id
549
550
551def modify_profiler(id, **data):
552 models.Profiler.smart_get(id).update_object(data)
553
554
555def delete_profiler(id):
556 models.Profiler.smart_get(id).delete()
557
558
559def get_profilers(**filter_data):
560 return rpc_utils.prepare_for_serialization(
561 models.Profiler.list_objects(filter_data))
562
563
mblighe8819cd2008-02-15 16:48:40 +0000564# users
565
566def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000567 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000568
569
570def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000571 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000572
573
574def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000575 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000576
577
578def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000579 return rpc_utils.prepare_for_serialization(
580 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000581
582
583# acl groups
584
585def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000586 group = models.AclGroup.add_object(name=name, description=description)
showard64a95952010-01-13 21:27:16 +0000587 group.users.add(models.User.current_user())
showard04f2cd82008-07-25 20:53:31 +0000588 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000589
590
591def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000592 group = models.AclGroup.smart_get(id)
593 group.check_for_acl_violation_acl_group()
594 group.update_object(data)
595 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000596
597
598def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000599 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000600 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000601 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000602 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000603
604
605def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000606 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000607 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000608 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000609 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000610 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000611
612
613def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000614 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000615 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000616 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000617 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000618 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000619
620
621def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000622 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000623 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000624 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000625 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000626 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000627
628
629def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000630 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000631
632
633def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000634 acl_groups = models.AclGroup.list_objects(filter_data)
635 for acl_group in acl_groups:
636 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
637 acl_group['users'] = [user.login
638 for user in acl_group_obj.users.all()]
639 acl_group['hosts'] = [host.hostname
640 for host in acl_group_obj.hosts.all()]
641 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000642
643
644# jobs
645
mbligh120351e2009-01-24 01:40:45 +0000646def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000647 client_control_file='', use_container=False,
showard232b7ae2009-11-10 00:46:48 +0000648 profile_only=None, upload_kernel_config=False):
jadmanski0afbb632008-06-06 21:10:57 +0000649 """
mbligh120351e2009-01-24 01:40:45 +0000650 Generates a client-side control file to load a kernel and run tests.
651
652 @param tests List of tests to run.
mbligha3c58d22009-08-24 22:01:51 +0000653 @param kernel A list of kernel info dictionaries configuring which kernels
654 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000655 @param label Name of label to grab kernel config from.
656 @param profilers List of profilers to activate during the job.
657 @param client_control_file The contents of a client-side control file to
658 run at the end of all tests. If this is supplied, all tests must be
659 client side.
660 TODO: in the future we should support server control files directly
661 to wrap with a kernel. That'll require changing the parameter
662 name and adding a boolean to indicate if it is a client or server
663 control file.
664 @param use_container unused argument today. TODO: Enable containers
665 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000666 @param profile_only A boolean that indicates what default profile_only
667 mode to use in the control file. Passing None will generate a
668 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000669 @param upload_kernel_config: if enabled it will generate server control
670 file code that uploads the kernel config file to the client and
671 tells the client of the new (local) path when compiling the kernel;
672 the tests must be server side tests
mbligh120351e2009-01-24 01:40:45 +0000673
674 @returns a dict with the following keys:
675 control_file: str, The control file text.
676 is_server: bool, is the control file a server-side control file?
677 synch_count: How many machines the job uses per autoserv execution.
678 synch_count == 1 means the job is asynchronous.
679 dependencies: A list of the names of labels on which the job depends.
680 """
showardd86debe2009-06-10 17:37:56 +0000681 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000682 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000683 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000684
showard989f25d2008-10-01 11:38:11 +0000685 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000686 rpc_utils.prepare_generate_control_file(tests, kernel, label,
687 profilers))
showard989f25d2008-10-01 11:38:11 +0000688 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000689 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000690 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000691 client_control_file=client_control_file, profile_only=profile_only,
692 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000693 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000694
695
jamesren4a41e012010-07-16 22:33:48 +0000696def create_parameterized_job(name, priority, test, parameters, kernel=None,
697 label=None, profilers=(), profiler_parameters=None,
698 use_container=False, profile_only=None,
699 upload_kernel_config=False, hosts=(),
700 meta_hosts=(), one_time_hosts=(),
701 atomic_group_name=None, synch_count=None,
702 is_template=False, timeout=None,
Simran Basi7e605742013-11-12 13:43:36 -0800703 timeout_mins=None, max_runtime_mins=None,
704 run_verify=False, email_list='', dependencies=(),
705 reboot_before=None, reboot_after=None,
706 parse_failed_repair=None, hostless=False,
Dan Shiec1d47d2015-02-13 11:38:13 -0800707 keyvals=None, drone_set=None, run_reset=True,
708 require_ssq=None):
jamesren4a41e012010-07-16 22:33:48 +0000709 """
710 Creates and enqueues a parameterized job.
711
712 Most parameters a combination of the parameters for generate_control_file()
713 and create_job(), with the exception of:
714
715 @param test name or ID of the test to run
716 @param parameters a map of parameter name ->
717 tuple of (param value, param type)
718 @param profiler_parameters a dictionary of parameters for the profilers:
719 key: profiler name
720 value: dict of param name -> tuple of
721 (param value,
722 param type)
723 """
724 # Save the values of the passed arguments here. What we're going to do with
725 # them is pass them all to rpc_utils.get_create_job_common_args(), which
726 # will extract the subset of these arguments that apply for
727 # rpc_utils.create_job_common(), which we then pass in to that function.
728 args = locals()
729
730 # Set up the parameterized job configs
731 test_obj = models.Test.smart_get(test)
Aviv Keshet3dd8beb2013-05-13 17:36:04 -0700732 control_type = test_obj.test_type
jamesren4a41e012010-07-16 22:33:48 +0000733
734 try:
735 label = models.Label.smart_get(label)
736 except models.Label.DoesNotExist:
737 label = None
738
739 kernel_objs = models.Kernel.create_kernels(kernel)
740 profiler_objs = [models.Profiler.smart_get(profiler)
741 for profiler in profilers]
742
743 parameterized_job = models.ParameterizedJob.objects.create(
744 test=test_obj, label=label, use_container=use_container,
745 profile_only=profile_only,
746 upload_kernel_config=upload_kernel_config)
747 parameterized_job.kernels.add(*kernel_objs)
748
749 for profiler in profiler_objs:
750 parameterized_profiler = models.ParameterizedJobProfiler.objects.create(
751 parameterized_job=parameterized_job,
752 profiler=profiler)
753 profiler_params = profiler_parameters.get(profiler.name, {})
754 for name, (value, param_type) in profiler_params.iteritems():
755 models.ParameterizedJobProfilerParameter.objects.create(
756 parameterized_job_profiler=parameterized_profiler,
757 parameter_name=name,
758 parameter_value=value,
759 parameter_type=param_type)
760
761 try:
762 for parameter in test_obj.testparameter_set.all():
763 if parameter.name in parameters:
764 param_value, param_type = parameters.pop(parameter.name)
765 parameterized_job.parameterizedjobparameter_set.create(
766 test_parameter=parameter, parameter_value=param_value,
767 parameter_type=param_type)
768
769 if parameters:
770 raise Exception('Extra parameters remain: %r' % parameters)
771
772 return rpc_utils.create_job_common(
773 parameterized_job=parameterized_job.id,
774 control_type=control_type,
775 **rpc_utils.get_create_job_common_args(args))
776 except:
777 parameterized_job.delete()
778 raise
779
780
Simran Basib6ec8ae2014-04-23 12:05:08 -0700781def create_job_page_handler(name, priority, control_file, control_type,
782 image=None, hostless=False, **kwargs):
783 """\
784 Create and enqueue a job.
785
786 @param name name of this job
787 @param priority Integer priority of this job. Higher is more important.
788 @param control_file String contents of the control file.
789 @param control_type Type of control file, Client or Server.
790 @param kwargs extra args that will be required by create_suite_job or
791 create_job.
792
793 @returns The created Job id number.
794 """
795 control_file = rpc_utils.encode_ascii(control_file)
Jiaxi Luodd67beb2014-07-18 16:28:31 -0700796 if not control_file:
797 raise model_logic.ValidationError({
798 'control_file' : "Control file cannot be empty"})
Simran Basib6ec8ae2014-04-23 12:05:08 -0700799
800 if image and hostless:
801 return site_rpc_interface.create_suite_job(
802 name=name, control_file=control_file, priority=priority,
803 build=image, **kwargs)
804 return create_job(name, priority, control_file, control_type, image=image,
805 hostless=hostless, **kwargs)
806
807
showard12f3e322009-05-13 21:27:42 +0000808def create_job(name, priority, control_file, control_type,
809 hosts=(), meta_hosts=(), one_time_hosts=(),
810 atomic_group_name=None, synch_count=None, is_template=False,
Simran Basi7e605742013-11-12 13:43:36 -0800811 timeout=None, timeout_mins=None, max_runtime_mins=None,
812 run_verify=False, email_list='', dependencies=(),
813 reboot_before=None, reboot_after=None, parse_failed_repair=None,
814 hostless=False, keyvals=None, drone_set=None, image=None,
Dan Shiec1d47d2015-02-13 11:38:13 -0800815 parent_job_id=None, test_retry=0, run_reset=True,
816 require_ssp=None, args=(), **kwargs):
jadmanski0afbb632008-06-06 21:10:57 +0000817 """\
818 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000819
showarda1e74b32009-05-12 17:32:04 +0000820 @param name name of this job
Alex Miller7d658cf2013-09-04 16:00:35 -0700821 @param priority Integer priority of this job. Higher is more important.
showarda1e74b32009-05-12 17:32:04 +0000822 @param control_file String contents of the control file.
823 @param control_type Type of control file, Client or Server.
824 @param synch_count How many machines the job uses per autoserv execution.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700825 synch_count == 1 means the job is asynchronous. If an atomic group is
826 given this value is treated as a minimum.
showarda1e74b32009-05-12 17:32:04 +0000827 @param is_template If true then create a template job.
828 @param timeout Hours after this call returns until the job times out.
Simran Basi7e605742013-11-12 13:43:36 -0800829 @param timeout_mins Minutes after this call returns until the job times
Jiaxi Luo90190c92014-06-18 12:35:57 -0700830 out.
Simran Basi34217022012-11-06 13:43:15 -0800831 @param max_runtime_mins Minutes from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000832 @param run_verify Should the host be verified before running the test?
833 @param email_list String containing emails to mail when the job is done
834 @param dependencies List of label names on which this job depends
835 @param reboot_before Never, If dirty, or Always
836 @param reboot_after Never, If all tests passed, or Always
837 @param parse_failed_repair if true, results of failed repairs launched by
Jiaxi Luo90190c92014-06-18 12:35:57 -0700838 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000839 @param hostless if true, create a hostless job
showardc1a98d12010-01-15 00:22:22 +0000840 @param keyvals dict of keyvals to associate with the job
showarda1e74b32009-05-12 17:32:04 +0000841 @param hosts List of hosts to run job on.
842 @param meta_hosts List where each entry is a label name, and for each entry
Jiaxi Luo90190c92014-06-18 12:35:57 -0700843 one host will be chosen from that label to run the job on.
showarda1e74b32009-05-12 17:32:04 +0000844 @param one_time_hosts List of hosts not in the database to run the job on.
845 @param atomic_group_name The name of an atomic group to schedule the job on.
jamesren76fcf192010-04-21 20:39:50 +0000846 @param drone_set The name of the drone set to run this test on.
Paul Pendlebury5a8c6ad2011-02-01 07:20:17 -0800847 @param image OS image to install before running job.
Aviv Keshet0b9cfc92013-02-05 11:36:02 -0800848 @param parent_job_id id of a job considered to be parent of created job.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700849 @param test_retry Number of times to retry test if the test did not
Jiaxi Luo90190c92014-06-18 12:35:57 -0700850 complete successfully. (optional, default: 0)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700851 @param run_reset Should the host be reset before running the test?
Dan Shiec1d47d2015-02-13 11:38:13 -0800852 @param require_ssp Set to True to require server-side packaging to run the
853 test. If it's set to None, drone will still try to run
854 the server side with server-side packaging. If the
855 autotest-server package doesn't exist for the build or
856 image is not set, drone will run the test without server-
857 side packaging. Default is None.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700858 @param args A list of args to be injected into control file.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700859 @param kwargs extra keyword args. NOT USED.
showardc92da832009-04-07 18:14:34 +0000860
861 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000862 """
Jiaxi Luo90190c92014-06-18 12:35:57 -0700863 if args:
864 control_file = tools.inject_vars({'args': args}, control_file)
865
Simran Basiab5a1bf2014-05-28 15:39:44 -0700866 if image is None:
867 return rpc_utils.create_job_common(
868 **rpc_utils.get_create_job_common_args(locals()))
869
870 # When image is supplied use a known parameterized test already in the
871 # database to pass the OS image path from the front end, through the
872 # scheduler, and finally to autoserv as the --image parameter.
873
874 # The test autoupdate_ParameterizedJob is in afe_autotests and used to
875 # instantiate a Test object and from there a ParameterizedJob.
876 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
877 known_parameterized_job = models.ParameterizedJob.objects.create(
878 test=known_test_obj)
879
880 # autoupdate_ParameterizedJob has a single parameter, the image parameter,
881 # stored in the table afe_test_parameters. We retrieve and set this
882 # instance of the parameter to the OS image path.
883 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
884 name='image')
885 known_parameterized_job.parameterizedjobparameter_set.create(
886 test_parameter=image_parameter, parameter_value=image,
887 parameter_type='string')
888
889 # By passing a parameterized_job to create_job_common the job entry in
890 # the afe_jobs table will have the field parameterized_job_id set.
891 # The scheduler uses this id in the afe_parameterized_jobs table to
892 # match this job to our known test, and then with the
893 # afe_parameterized_job_parameters table to get the actual image path.
jamesren4a41e012010-07-16 22:33:48 +0000894 return rpc_utils.create_job_common(
Simran Basiab5a1bf2014-05-28 15:39:44 -0700895 parameterized_job=known_parameterized_job.id,
jamesren4a41e012010-07-16 22:33:48 +0000896 **rpc_utils.get_create_job_common_args(locals()))
mblighe8819cd2008-02-15 16:48:40 +0000897
898
showard9dbdcda2008-10-14 17:34:36 +0000899def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000900 """\
showard9dbdcda2008-10-14 17:34:36 +0000901 Abort a set of host queue entries.
Fang Deng63b0e452014-12-19 14:38:15 -0800902
903 @return: A list of dictionaries, each contains information
904 about an aborted HQE.
jadmanski0afbb632008-06-06 21:10:57 +0000905 """
showard9dbdcda2008-10-14 17:34:36 +0000906 query = models.HostQueueEntry.query_objects(filter_data)
beepsfaecbce2013-10-29 11:35:10 -0700907
908 # Dont allow aborts on:
909 # 1. Jobs that have already completed (whether or not they were aborted)
910 # 2. Jobs that we have already been aborted (but may not have completed)
911 query = query.filter(complete=False).filter(aborted=False)
showarddc817512008-11-12 18:16:41 +0000912 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000913 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000914 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000915
Simran Basic1b26762013-06-26 14:23:21 -0700916 models.HostQueueEntry.abort_host_queue_entries(host_queue_entries)
Fang Deng63b0e452014-12-19 14:38:15 -0800917 hqe_info = [{'HostQueueEntry': hqe.id, 'Job': hqe.job_id,
918 'Job name': hqe.job.name} for hqe in host_queue_entries]
919 return hqe_info
showard9d821ab2008-07-11 16:54:29 +0000920
921
beeps8bb1f7d2013-08-05 01:30:09 -0700922def abort_special_tasks(**filter_data):
923 """\
924 Abort the special task, or tasks, specified in the filter.
925 """
926 query = models.SpecialTask.query_objects(filter_data)
927 special_tasks = query.filter(is_active=True)
928 for task in special_tasks:
929 task.abort()
930
931
Simran Basi73dae552013-02-25 14:57:46 -0800932def _call_special_tasks_on_hosts(task, hosts):
933 """\
934 Schedules a set of hosts for a special task.
935
936 @returns A list of hostnames that a special task was created for.
937 """
938 models.AclGroup.check_for_acl_violation_hosts(hosts)
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800939 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts)
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800940 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800941 raise ValueError('The following hosts are on shards, please '
942 'follow the link to the shards and create jobs '
943 'there instead. %s.' % shard_host_map)
Simran Basi73dae552013-02-25 14:57:46 -0800944 for host in hosts:
945 models.SpecialTask.schedule_special_task(host, task)
946 return list(sorted(host.hostname for host in hosts))
947
948
showard1ff7b2e2009-05-15 23:17:18 +0000949def reverify_hosts(**filter_data):
950 """\
951 Schedules a set of hosts for verify.
mbligh4e545a52009-12-19 05:30:39 +0000952
953 @returns A list of hostnames that a verify task was created for.
showard1ff7b2e2009-05-15 23:17:18 +0000954 """
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800955 hosts = models.Host.query_objects(filter_data)
956 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts, rpc_hostnames=True)
957
958 # Filter out hosts on a shard from those on the master, forward
959 # rpcs to the shard with an additional hostname__in filter, and
960 # create a local SpecialTask for each remaining host.
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800961 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800962 hosts = [h for h in hosts if h.shard is None]
963 for shard, hostnames in shard_host_map.iteritems():
964
965 # The main client of this module is the frontend website, and
966 # it invokes it with an 'id' or an 'id__in' filter. Regardless,
967 # the 'hostname' filter should narrow down the list of hosts on
968 # each shard even though we supply all the ids in filter_data.
969 # This method uses hostname instead of id because it fits better
970 # with the overall architecture of redirection functions in rpc_utils.
971 shard_filter = filter_data.copy()
972 shard_filter['hostname__in'] = hostnames
973 rpc_utils.run_rpc_on_multiple_hostnames(
974 'reverify_hosts', [shard], **shard_filter)
975
976 # There is a race condition here if someone assigns a shard to one of these
977 # hosts before we create the task. The host will stay on the master if:
978 # 1. The host is not Ready
979 # 2. The host is Ready but has a task
980 # But if the host is Ready and doesn't have a task yet, it will get sent
981 # to the shard as we're creating a task here.
982
983 # Given that we only rarely verify Ready hosts it isn't worth putting this
984 # entire method in a transaction. The worst case scenario is that we have
985 # a verify running on a Ready host while the shard is using it, if the verify
986 # fails no subsequent tasks will be created against the host on the master,
987 # and verifies are safe enough that this is OK.
988 return _call_special_tasks_on_hosts(models.SpecialTask.Task.VERIFY, hosts)
Simran Basi73dae552013-02-25 14:57:46 -0800989
990
991def repair_hosts(**filter_data):
992 """\
993 Schedules a set of hosts for repair.
994
995 @returns A list of hostnames that a repair task was created for.
996 """
997 return _call_special_tasks_on_hosts(models.SpecialTask.Task.REPAIR,
998 models.Host.query_objects(filter_data))
showard1ff7b2e2009-05-15 23:17:18 +0000999
1000
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001001def get_jobs(not_yet_run=False, running=False, finished=False,
1002 suite=False, sub=False, standalone=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001003 """\
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001004 Extra status filter args for get_jobs:
jadmanski0afbb632008-06-06 21:10:57 +00001005 -not_yet_run: Include only jobs that have not yet started running.
1006 -running: Include only jobs that have start running but for which not
1007 all hosts have completed.
1008 -finished: Include only jobs for which all hosts have completed (or
1009 aborted).
1010 At most one of these three fields should be specified.
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001011
1012 Extra type filter args for get_jobs:
1013 -suite: Include only jobs with child jobs.
1014 -sub: Include only jobs with a parent job.
1015 -standalone: Inlcude only jobs with no child or parent jobs.
1016 At most one of these three fields should be specified.
jadmanski0afbb632008-06-06 21:10:57 +00001017 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001018 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1019 running,
1020 finished)
1021 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1022 suite,
1023 sub,
1024 standalone)
showard0957a842009-05-11 19:25:08 +00001025 job_dicts = []
1026 jobs = list(models.Job.query_objects(filter_data))
1027 models.Job.objects.populate_relationships(jobs, models.Label,
1028 'dependencies')
showardc1a98d12010-01-15 00:22:22 +00001029 models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals')
showard0957a842009-05-11 19:25:08 +00001030 for job in jobs:
1031 job_dict = job.get_object_dict()
1032 job_dict['dependencies'] = ','.join(label.name
1033 for label in job.dependencies)
showardc1a98d12010-01-15 00:22:22 +00001034 job_dict['keyvals'] = dict((keyval.key, keyval.value)
1035 for keyval in job.keyvals)
Eric Lid23bc192011-02-09 14:38:57 -08001036 if job.parameterized_job:
1037 job_dict['image'] = get_parameterized_autoupdate_image_url(job)
showard0957a842009-05-11 19:25:08 +00001038 job_dicts.append(job_dict)
1039 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +00001040
1041
1042def get_num_jobs(not_yet_run=False, running=False, finished=False,
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001043 suite=False, sub=False, standalone=False,
jadmanski0afbb632008-06-06 21:10:57 +00001044 **filter_data):
1045 """\
1046 See get_jobs() for documentation of extra filter parameters.
1047 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -07001048 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
1049 running,
1050 finished)
1051 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
1052 suite,
1053 sub,
1054 standalone)
jadmanski0afbb632008-06-06 21:10:57 +00001055 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +00001056
1057
mblighe8819cd2008-02-15 16:48:40 +00001058def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001059 """\
Jiaxi Luoaac54572014-06-04 13:57:02 -07001060 Like get_jobs(), but adds 'status_counts' and 'result_counts' field.
1061
1062 'status_counts' filed is a dictionary mapping status strings to the number
1063 of hosts currently with that status, i.e. {'Queued' : 4, 'Running' : 2}.
1064
1065 'result_counts' field is piped to tko's rpc_interface and has the return
1066 format specified under get_group_counts.
jadmanski0afbb632008-06-06 21:10:57 +00001067 """
1068 jobs = get_jobs(**filter_data)
1069 ids = [job['id'] for job in jobs]
1070 all_status_counts = models.Job.objects.get_status_counts(ids)
1071 for job in jobs:
1072 job['status_counts'] = all_status_counts[job['id']]
Jiaxi Luoaac54572014-06-04 13:57:02 -07001073 job['result_counts'] = tko_rpc_interface.get_status_counts(
1074 ['afe_job_id', 'afe_job_id'],
1075 header_groups=[['afe_job_id'], ['afe_job_id']],
1076 **{'afe_job_id': job['id']})
jadmanski0afbb632008-06-06 21:10:57 +00001077 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +00001078
1079
showarda965cef2009-05-15 23:17:41 +00001080def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +00001081 """\
1082 Retrieves all the information needed to clone a job.
1083 """
showarda8709c52008-07-03 19:44:54 +00001084 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +00001085 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +00001086 preserve_metahosts,
1087 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +00001088
showardd9992fe2008-07-31 02:15:03 +00001089 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +00001090 for host in job_info['hosts']:
1091 host_dict = get_hosts(id=host.id)[0]
1092 other_labels = host_dict['labels']
1093 if host_dict['platform']:
1094 other_labels.remove(host_dict['platform'])
1095 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +00001096 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001097
showard29f7cd22009-04-29 21:16:24 +00001098 for host in job_info['one_time_hosts']:
1099 host_dict = dict(hostname=host.hostname,
1100 id=host.id,
1101 platform='(one-time host)',
1102 locked_text='')
1103 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001104
showard4d077562009-05-08 18:24:36 +00001105 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +00001106 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +00001107 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +00001108
1109 info = dict(job=job.get_object_dict(),
1110 meta_host_counts=meta_host_counts,
1111 hosts=host_dicts)
1112 info['job']['dependencies'] = job_info['dependencies']
1113 if job_info['atomic_group']:
1114 info['atomic_group_name'] = (job_info['atomic_group']).name
1115 else:
1116 info['atomic_group_name'] = None
jamesren2275ef12010-04-12 18:25:06 +00001117 info['hostless'] = job_info['hostless']
jamesren76fcf192010-04-21 20:39:50 +00001118 info['drone_set'] = job.drone_set and job.drone_set.name
showarda8709c52008-07-03 19:44:54 +00001119
Eric Lid23bc192011-02-09 14:38:57 -08001120 if job.parameterized_job:
1121 info['job']['image'] = get_parameterized_autoupdate_image_url(job)
1122
showarda8709c52008-07-03 19:44:54 +00001123 return rpc_utils.prepare_for_serialization(info)
1124
1125
showard34dc5fa2008-04-24 20:58:40 +00001126# host queue entries
1127
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001128def get_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001129 """\
showardc92da832009-04-07 18:14:34 +00001130 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +00001131 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001132 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1133 'started_on__lte',
1134 start_time,
1135 end_time,
1136 **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001137 return rpc_utils.prepare_rows_as_nested_dicts(
1138 models.HostQueueEntry.query_objects(filter_data),
1139 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +00001140
1141
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001142def get_num_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001143 """\
1144 Get the number of host queue entries associated with this job.
1145 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001146 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1147 'started_on__lte',
1148 start_time,
1149 end_time,
1150 **filter_data)
jadmanski0afbb632008-06-06 21:10:57 +00001151 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +00001152
1153
showard1e935f12008-07-11 00:11:36 +00001154def get_hqe_percentage_complete(**filter_data):
1155 """
showardc92da832009-04-07 18:14:34 +00001156 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +00001157 that are complete.
1158 """
1159 query = models.HostQueueEntry.query_objects(filter_data)
1160 complete_count = query.filter(complete=True).count()
1161 total_count = query.count()
1162 if total_count == 0:
1163 return 1
1164 return float(complete_count) / total_count
1165
1166
showard1a5a4082009-07-28 20:01:37 +00001167# special tasks
1168
1169def get_special_tasks(**filter_data):
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001170 """Get special task entries from the local database.
1171
1172 Query the special tasks table for tasks matching the given
1173 `filter_data`, and return a list of the results. No attempt is
1174 made to forward the call to shards; the buck will stop here.
1175 The caller is expected to know the target shard for such reasons
1176 as:
1177 * The caller is a service (such as gs_offloader) configured
1178 to operate on behalf of one specific shard, and no other.
1179 * The caller has a host as a parameter, and knows that this is
1180 the shard assigned to that host.
1181
1182 @param filter_data Filter keywords to pass to the underlying
1183 database query.
1184
1185 """
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001186 return rpc_utils.prepare_rows_as_nested_dicts(
1187 models.SpecialTask.query_objects(filter_data),
1188 ('host', 'queue_entry'))
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001189
1190
1191def get_host_special_tasks(host_id, **filter_data):
1192 """Get special task entries for a given host.
1193
1194 Query the special tasks table for tasks that ran on the host
1195 given by `host_id` and matching the given `filter_data`.
1196 Return a list of the results. If the host is assigned to a
1197 shard, forward this call to that shard.
1198
1199 @param host_id Id in the database of the target host.
1200 @param filter_data Filter keywords to pass to the underlying
1201 database query.
1202
1203 """
MK Ryu0c1a37d2015-04-30 12:00:55 -07001204 # Retrieve host data even if the host is in an invalid state.
1205 host = models.Host.smart_get(host_id, False)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001206 if not host.shard:
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001207 return get_special_tasks(host_id=host_id, **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001208 else:
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001209 # The return values from AFE methods are post-processed
1210 # objects that aren't JSON-serializable. So, we have to
1211 # call AFE.run() to get the raw, serializable output from
1212 # the shard.
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001213 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1214 return shard_afe.run('get_special_tasks',
1215 host_id=host_id, **filter_data)
showard1a5a4082009-07-28 20:01:37 +00001216
1217
MK Ryu0c1a37d2015-04-30 12:00:55 -07001218def get_num_special_tasks(**kwargs):
1219 """Get the number of special task entries from the local database.
1220
1221 Query the special tasks table for tasks matching the given 'kwargs',
1222 and return the number of the results. No attempt is made to forward
1223 the call to shards; the buck will stop here.
1224
1225 @param kwargs Filter keywords to pass to the underlying database query.
1226
1227 """
1228 return models.SpecialTask.query_count(kwargs)
1229
1230
1231def get_host_num_special_tasks(host, **kwargs):
1232 """Get special task entries for a given host.
1233
1234 Query the special tasks table for tasks that ran on the host
1235 given by 'host' and matching the given 'kwargs'.
1236 Return a list of the results. If the host is assigned to a
1237 shard, forward this call to that shard.
1238
1239 @param host id or name of a host. More often a hostname.
1240 @param kwargs Filter keywords to pass to the underlying database query.
1241
1242 """
1243 # Retrieve host data even if the host is in an invalid state.
1244 host_model = models.Host.smart_get(host, False)
1245 if not host_model.shard:
1246 return get_num_special_tasks(host=host, **kwargs)
1247 else:
1248 shard_afe = frontend.AFE(server=host_model.shard.rpc_hostname())
1249 return shard_afe.run('get_num_special_tasks', host=host, **kwargs)
1250
1251
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001252def get_status_task(host_id, end_time):
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001253 """Get the "status task" for a host from the local shard.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001254
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001255 Returns a single special task representing the given host's
1256 "status task". The status task is a completed special task that
1257 identifies whether the corresponding host was working or broken
1258 when it completed. A successful task indicates a working host;
1259 a failed task indicates broken.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001260
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001261 This call will not be forward to a shard; the receiving server
1262 must be the shard that owns the host.
1263
1264 @param host_id Id in the database of the target host.
1265 @param end_time Time reference for the host's status.
1266
1267 @return A single task; its status (successful or not)
1268 corresponds to the status of the host (working or
1269 broken) at the given time. If no task is found, return
1270 `None`.
1271
1272 """
1273 tasklist = rpc_utils.prepare_rows_as_nested_dicts(
1274 status_history.get_status_task(host_id, end_time),
1275 ('host', 'queue_entry'))
1276 return tasklist[0] if tasklist else None
1277
1278
1279def get_host_status_task(host_id, end_time):
1280 """Get the "status task" for a host from its owning shard.
1281
1282 Finds the given host's owning shard, and forwards to it a call
1283 to `get_status_task()` (see above).
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001284
1285 @param host_id Id in the database of the target host.
1286 @param end_time Time reference for the host's status.
1287
1288 @return A single task; its status (successful or not)
1289 corresponds to the status of the host (working or
1290 broken) at the given time. If no task is found, return
1291 `None`.
1292
1293 """
1294 host = models.Host.smart_get(host_id)
1295 if not host.shard:
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001296 return get_status_task(host_id, end_time)
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001297 else:
1298 # The return values from AFE methods are post-processed
1299 # objects that aren't JSON-serializable. So, we have to
1300 # call AFE.run() to get the raw, serializable output from
1301 # the shard.
1302 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1303 return shard_afe.run('get_status_task',
1304 host_id=host_id, end_time=end_time)
1305
1306
showardc0ac3a72009-07-08 21:14:45 +00001307# support for host detail view
1308
MK Ryu0c1a37d2015-04-30 12:00:55 -07001309def get_host_queue_entries_and_special_tasks(host, query_start=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001310 query_limit=None, start_time=None,
1311 end_time=None):
showardc0ac3a72009-07-08 21:14:45 +00001312 """
1313 @returns an interleaved list of HostQueueEntries and SpecialTasks,
1314 in approximate run order. each dict contains keys for type, host,
1315 job, status, started_on, execution_path, and ID.
1316 """
1317 total_limit = None
1318 if query_limit is not None:
1319 total_limit = query_start + query_limit
MK Ryu0c1a37d2015-04-30 12:00:55 -07001320 filter_data_common = {'host': host,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001321 'query_limit': total_limit,
1322 'sort_by': ['-id']}
showardc0ac3a72009-07-08 21:14:45 +00001323
MK Ryu0c1a37d2015-04-30 12:00:55 -07001324 filter_data_special_tasks = rpc_utils.inject_times_to_filter(
1325 'time_started__gte', 'time_started__lte', start_time, end_time,
1326 **filter_data_common)
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001327
MK Ryu0c1a37d2015-04-30 12:00:55 -07001328 queue_entries = get_host_queue_entries(
1329 start_time, end_time, **filter_data_common)
1330 special_tasks = get_host_special_tasks(host, **filter_data_special_tasks)
showardc0ac3a72009-07-08 21:14:45 +00001331
1332 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
1333 special_tasks)
1334 if query_start is not None:
1335 interleaved_entries = interleaved_entries[query_start:]
1336 if query_limit is not None:
1337 interleaved_entries = interleaved_entries[:query_limit]
MK Ryu0c1a37d2015-04-30 12:00:55 -07001338 return rpc_utils.prepare_host_queue_entries_and_special_tasks(
1339 interleaved_entries, queue_entries)
showardc0ac3a72009-07-08 21:14:45 +00001340
1341
MK Ryu0c1a37d2015-04-30 12:00:55 -07001342def get_num_host_queue_entries_and_special_tasks(host, start_time=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001343 end_time=None):
MK Ryu0c1a37d2015-04-30 12:00:55 -07001344 filter_data_common = {'host': host}
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001345
1346 filter_data_queue_entries, filter_data_special_tasks = (
1347 rpc_utils.inject_times_to_hqe_special_tasks_filters(
1348 filter_data_common, start_time, end_time))
1349
1350 return (models.HostQueueEntry.query_count(filter_data_queue_entries)
MK Ryu0c1a37d2015-04-30 12:00:55 -07001351 + get_host_num_special_tasks(**filter_data_special_tasks))
showardc0ac3a72009-07-08 21:14:45 +00001352
1353
showard29f7cd22009-04-29 21:16:24 +00001354# recurring run
1355
1356def get_recurring(**filter_data):
1357 return rpc_utils.prepare_rows_as_nested_dicts(
1358 models.RecurringRun.query_objects(filter_data),
1359 ('job', 'owner'))
1360
1361
1362def get_num_recurring(**filter_data):
1363 return models.RecurringRun.query_count(filter_data)
1364
1365
1366def delete_recurring_runs(**filter_data):
1367 to_delete = models.RecurringRun.query_objects(filter_data)
1368 to_delete.delete()
1369
1370
1371def create_recurring_run(job_id, start_date, loop_period, loop_count):
showard64a95952010-01-13 21:27:16 +00001372 owner = models.User.current_user().login
showard29f7cd22009-04-29 21:16:24 +00001373 job = models.Job.objects.get(id=job_id)
1374 return job.create_recurring_job(start_date=start_date,
1375 loop_period=loop_period,
1376 loop_count=loop_count,
1377 owner=owner)
1378
1379
mblighe8819cd2008-02-15 16:48:40 +00001380# other
1381
showarde0b63622008-08-04 20:58:47 +00001382def echo(data=""):
1383 """\
1384 Returns a passed in string. For doing a basic test to see if RPC calls
1385 can successfully be made.
1386 """
1387 return data
1388
1389
showardb7a52fd2009-04-27 20:10:56 +00001390def get_motd():
1391 """\
1392 Returns the message of the day as a string.
1393 """
1394 return rpc_utils.get_motd()
1395
1396
mblighe8819cd2008-02-15 16:48:40 +00001397def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +00001398 """\
1399 Returns a dictionary containing a bunch of data that shouldn't change
1400 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +00001401
1402 priorities: List of job priority choices.
1403 default_priority: Default priority value for new jobs.
1404 users: Sorted list of all users.
Jiaxi Luo31874592014-06-11 10:36:35 -07001405 labels: Sorted list of labels not start with 'cros-version' and
1406 'fw-version'.
showardc92da832009-04-07 18:14:34 +00001407 atomic_groups: Sorted list of all atomic groups.
1408 tests: Sorted list of all tests.
1409 profilers: Sorted list of all profilers.
1410 current_user: Logged-in username.
1411 host_statuses: Sorted list of possible Host statuses.
1412 job_statuses: Sorted list of possible HostQueueEntry statuses.
Simran Basi7e605742013-11-12 13:43:36 -08001413 job_timeout_default: The default job timeout length in minutes.
showarda1e74b32009-05-12 17:32:04 +00001414 parse_failed_repair_default: Default value for the parse_failed_repair job
Jiaxi Luo31874592014-06-11 10:36:35 -07001415 option.
showardc92da832009-04-07 18:14:34 +00001416 reboot_before_options: A list of valid RebootBefore string enums.
1417 reboot_after_options: A list of valid RebootAfter string enums.
1418 motd: Server's message of the day.
1419 status_dictionary: A mapping from one word job status names to a more
1420 informative description.
jadmanski0afbb632008-06-06 21:10:57 +00001421 """
showard21baa452008-10-21 00:08:39 +00001422
1423 job_fields = models.Job.get_field_dict()
jamesren76fcf192010-04-21 20:39:50 +00001424 default_drone_set_name = models.DroneSet.default_drone_set_name()
1425 drone_sets = ([default_drone_set_name] +
1426 sorted(drone_set.name for drone_set in
1427 models.DroneSet.objects.exclude(
1428 name=default_drone_set_name)))
showard21baa452008-10-21 00:08:39 +00001429
jadmanski0afbb632008-06-06 21:10:57 +00001430 result = {}
Alex Miller7d658cf2013-09-04 16:00:35 -07001431 result['priorities'] = priorities.Priority.choices()
1432 default_priority = priorities.Priority.DEFAULT
1433 result['default_priority'] = 'Default'
1434 result['max_schedulable_priority'] = priorities.Priority.DEFAULT
jadmanski0afbb632008-06-06 21:10:57 +00001435 result['users'] = get_users(sort_by=['login'])
Jiaxi Luo31874592014-06-11 10:36:35 -07001436
1437 label_exclude_filters = [{'name__startswith': 'cros-version'},
1438 {'name__startswith': 'fw-version'}]
1439 result['labels'] = get_labels(
1440 label_exclude_filters,
1441 sort_by=['-platform', 'name'])
1442
showardc92da832009-04-07 18:14:34 +00001443 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +00001444 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +00001445 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +00001446 result['current_user'] = rpc_utils.prepare_for_serialization(
showard64a95952010-01-13 21:27:16 +00001447 models.User.current_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +00001448 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +00001449 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
Simran Basi7e605742013-11-12 13:43:36 -08001450 result['job_timeout_mins_default'] = models.Job.DEFAULT_TIMEOUT_MINS
Simran Basi34217022012-11-06 13:43:15 -08001451 result['job_max_runtime_mins_default'] = (
1452 models.Job.DEFAULT_MAX_RUNTIME_MINS)
showarda1e74b32009-05-12 17:32:04 +00001453 result['parse_failed_repair_default'] = bool(
1454 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
jamesrendd855242010-03-02 22:23:44 +00001455 result['reboot_before_options'] = model_attributes.RebootBefore.names
1456 result['reboot_after_options'] = model_attributes.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +00001457 result['motd'] = rpc_utils.get_motd()
jamesren76fcf192010-04-21 20:39:50 +00001458 result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled()
1459 result['drone_sets'] = drone_sets
jamesren4a41e012010-07-16 22:33:48 +00001460 result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled()
showard8ac29b42008-07-17 17:01:55 +00001461
showardd3dc1992009-04-22 21:01:40 +00001462 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +00001463 "Verifying": "Verifying Host",
Alex Millerdfff2fd2013-05-28 13:05:06 -07001464 "Provisioning": "Provisioning Host",
showard8ac29b42008-07-17 17:01:55 +00001465 "Pending": "Waiting on other hosts",
1466 "Running": "Running autoserv",
1467 "Completed": "Autoserv completed",
1468 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +00001469 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +00001470 "Starting": "Next in host's queue",
1471 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +00001472 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +00001473 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +00001474 "Template": "Template job for recurring run",
mbligh4608b002010-01-05 18:22:35 +00001475 "Waiting": "Waiting for scheduler action",
Dan Shi07e09af2013-04-12 09:31:29 -07001476 "Archiving": "Archiving results",
1477 "Resetting": "Resetting hosts"}
Jiaxi Luo421608e2014-07-07 14:38:00 -07001478
1479 result['wmatrix_url'] = rpc_utils.get_wmatrix_url()
Simran Basi71206ef2014-08-13 13:51:18 -07001480 result['is_moblab'] = bool(utils.is_moblab())
Jiaxi Luo421608e2014-07-07 14:38:00 -07001481
jadmanski0afbb632008-06-06 21:10:57 +00001482 return result
showard29f7cd22009-04-29 21:16:24 +00001483
1484
1485def get_server_time():
1486 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")