blob: e70a7a67f660533d4577ff327f18df18745c126e [file] [log] [blame]
Aviv Keshet0b9cfc92013-02-05 11:36:02 -08001# pylint: disable-msg=C0111
2
mblighe8819cd2008-02-15 16:48:40 +00003"""\
4Functions to expose over the RPC interface.
5
6For all modify* and delete* functions that ask for an 'id' parameter to
7identify the object to operate on, the id may be either
8 * the database row ID
9 * the name of the object (label name, hostname, user login, etc.)
10 * a dictionary containing uniquely identifying field (this option should seldom
11 be used)
12
13When specifying foreign key fields (i.e. adding hosts to a label, or adding
14users to an ACL group), the given value may be either the database row ID or the
15name of the object.
16
17All get* functions return lists of dictionaries. Each dictionary represents one
18object and maps field names to values.
19
20Some examples:
21modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
22modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
23modify_test('sleeptest', test_type='Client', params=', seconds=60')
24delete_acl_group(1) # delete by ID
25delete_acl_group('Everyone') # delete by name
26acl_group_add_users('Everyone', ['mbligh', 'showard'])
27get_jobs(owner='showard', status='Queued')
28
mbligh93c80e62009-02-03 17:48:30 +000029See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000030"""
31
32__author__ = 'showard@google.com (Steve Howard)'
33
MK Ryu9c5fbbe2015-02-11 15:46:22 -080034import sys
showard29f7cd22009-04-29 21:16:24 +000035import datetime
MK Ryu9c5fbbe2015-02-11 15:46:22 -080036
Moises Osorio2dc7a102014-12-02 18:24:02 -080037from django.db.models import Count
showardcafd16e2009-05-29 18:37:49 +000038import common
Simran Basib6ec8ae2014-04-23 12:05:08 -070039from autotest_lib.client.common_lib import priorities
Gabe Black1e1c41b2015-02-04 23:55:15 -080040from autotest_lib.client.common_lib.cros.graphite import autotest_stats
showard6d7b2ff2009-06-10 00:16:47 +000041from autotest_lib.frontend.afe import control_file, rpc_utils
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070042from autotest_lib.frontend.afe import models, model_logic, model_attributes
Simran Basib6ec8ae2014-04-23 12:05:08 -070043from autotest_lib.frontend.afe import site_rpc_interface
Moises Osorio2dc7a102014-12-02 18:24:02 -080044from autotest_lib.frontend.tko import models as tko_models
Jiaxi Luoaac54572014-06-04 13:57:02 -070045from autotest_lib.frontend.tko import rpc_interface as tko_rpc_interface
J. Richard Barnetteb5164d62015-04-13 12:59:31 -070046from autotest_lib.server import frontend
Simran Basi71206ef2014-08-13 13:51:18 -070047from autotest_lib.server import utils
Jiaxi Luo90190c92014-06-18 12:35:57 -070048from autotest_lib.server.cros.dynamic_suite import tools
J. Richard Barnette39255fa2015-04-14 17:23:41 -070049from autotest_lib.site_utils import status_history
mblighe8819cd2008-02-15 16:48:40 +000050
Moises Osorio2dc7a102014-12-02 18:24:02 -080051
Gabe Black1e1c41b2015-02-04 23:55:15 -080052_timer = autotest_stats.Timer('rpc_interface')
Moises Osorio2dc7a102014-12-02 18:24:02 -080053
Eric Lid23bc192011-02-09 14:38:57 -080054def get_parameterized_autoupdate_image_url(job):
55 """Get the parameterized autoupdate image url from a parameterized job."""
56 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
57 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
beeps8bb1f7d2013-08-05 01:30:09 -070058 name='image')
Eric Lid23bc192011-02-09 14:38:57 -080059 para_set = job.parameterized_job.parameterizedjobparameter_set
60 job_test_para = para_set.get(test_parameter=image_parameter)
61 return job_test_para.parameter_value
62
63
mblighe8819cd2008-02-15 16:48:40 +000064# labels
65
mblighe8819cd2008-02-15 16:48:40 +000066def modify_label(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +000067 models.Label.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000068
69
70def delete_label(id):
jadmanski0afbb632008-06-06 21:10:57 +000071 models.Label.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +000072
Prashanth Balasubramanian744898f2015-01-13 05:04:16 -080073
MK Ryu9c5fbbe2015-02-11 15:46:22 -080074def add_label(name, ignore_exception_if_exists=False, **kwargs):
MK Ryucf027c62015-03-04 12:00:50 -080075 """Adds a new label of a given name.
MK Ryu9c5fbbe2015-02-11 15:46:22 -080076
77 @param name: label name.
78 @param ignore_exception_if_exists: If True and the exception was
79 thrown due to the duplicated label name when adding a label,
80 then suppress the exception. Default is False.
81 @param kwargs: keyword args that store more info about a label
82 other than the name.
83 @return: int/long id of a new label.
84 """
85 # models.Label.add_object() throws model_logic.ValidationError
86 # when it is given a label name that already exists.
87 # However, ValidationError can be thrown with different errors,
88 # and those errors should be thrown up to the call chain.
89 try:
90 label = models.Label.add_object(name=name, **kwargs)
91 except:
92 exc_info = sys.exc_info()
93 if ignore_exception_if_exists:
94 label = rpc_utils.get_label(name)
95 # If the exception is raised not because of duplicated
96 # "name", then raise the original exception.
97 if label is None:
98 raise exc_info[0], exc_info[1], exc_info[2]
99 else:
100 raise exc_info[0], exc_info[1], exc_info[2]
101 return label.id
102
103
104def add_label_to_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800105 """Adds a label of the given id to the given hosts only in local DB.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800106
107 @param id: id or name of a label. More often a label name.
108 @param hosts: The hostnames of hosts that need the label.
109
110 @raises models.Label.DoesNotExist: If the label with id doesn't exist.
111 """
112 label = models.Label.smart_get(id)
113 host_objs = models.Host.smart_get_bulk(hosts)
114 if label.platform:
115 models.Host.check_no_platform(host_objs)
116 label.host_set.add(*host_objs)
117
118
119def label_add_hosts(id, hosts):
MK Ryucf027c62015-03-04 12:00:50 -0800120 """Adds a label with the given id to the given hosts.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800121
122 This method should be run only on master not shards.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800123 The given label will be created if it doesn't exist, provided the `id`
124 supplied is a label name not an int/long id.
125
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800126 @param id: id or name of a label. More often a label name.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800127 @param hosts: A list of hostnames or ids. More often hostnames.
128
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800129 @raises ValueError: If the id specified is an int/long (label id)
130 while the label does not exist.
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800131 """
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800132 # This RPC call should be accepted only by master.
133 if utils.is_shard():
134 rpc_utils.route_rpc_to_master('label_add_hosts', id=id, hosts=hosts)
135 return
136
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800137 try:
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -0800138 label = models.Label.smart_get(id)
139 except models.Label.DoesNotExist:
140 # This matches the type checks in smart_get, which is a hack
141 # in and off itself. The aim here is to create any non-existent
142 # label, which we cannot do if the 'id' specified isn't a label name.
143 if isinstance(id, basestring):
144 label = models.Label.smart_get(add_label(id))
145 else:
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800146 raise ValueError('Label id (%s) does not exist. Please specify '
147 'the argument, id, as a string (label name).'
148 % id)
MK Ryucf027c62015-03-04 12:00:50 -0800149
150 host_objs = models.Host.smart_get_bulk(hosts)
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800151 # Make sure the label exists on the shard with the same id
152 # as it is on the master.
MK Ryucf027c62015-03-04 12:00:50 -0800153 # It is possible that the label is already in a shard because
154 # we are adding a new label only to shards of hosts that the label
155 # is going to be attached.
156 # For example, we add a label L1 to a host in shard S1.
157 # Master and S1 will have L1 but other shards won't.
158 # Later, when we add the same label L1 to hosts in shards S1 and S2,
159 # S1 already has the label but S2 doesn't.
160 # S2 should have the new label without any problem.
MK Ryu9c5fbbe2015-02-11 15:46:22 -0800161 # We ignore exception in such a case.
162 rpc_utils.fanout_rpc(
163 host_objs, 'add_label', name=label.name, id=label.id,
164 include_hostnames=False, ignore_exception_if_exists=True)
165 rpc_utils.fanout_rpc(host_objs, 'add_label_to_hosts', id=id)
showardbbabf502008-06-06 00:02:02 +0000166
MK Ryu26f0c932015-05-28 18:14:33 -0700167 add_label_to_hosts(id, hosts)
168
showardbbabf502008-06-06 00:02:02 +0000169
MK Ryucf027c62015-03-04 12:00:50 -0800170def remove_label_from_hosts(id, hosts):
171 """Removes a label of the given id from the given hosts only in local DB.
172
173 @param id: id or name of a label.
174 @param hosts: The hostnames of hosts that need to remove the label from.
175 """
showardbe3ec042008-11-12 18:16:07 +0000176 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000177 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +0000178
179
MK Ryucf027c62015-03-04 12:00:50 -0800180def label_remove_hosts(id, hosts):
181 """Removes a label of the given id from the given hosts.
182
183 This method should be run only on master not shards.
184
185 @param id: id or name of a label.
186 @param hosts: A list of hostnames or ids. More often hostnames.
187 """
188 # This RPC call should be accepted only by master.
189 if utils.is_shard():
190 rpc_utils.route_rpc_to_master('label_remove_hosts', id=id, hosts=hosts)
191 return
192
MK Ryucf027c62015-03-04 12:00:50 -0800193 host_objs = models.Host.smart_get_bulk(hosts)
194 rpc_utils.fanout_rpc(host_objs, 'remove_label_from_hosts', id=id)
195
MK Ryu26f0c932015-05-28 18:14:33 -0700196 remove_label_from_hosts(id, hosts)
197
MK Ryucf027c62015-03-04 12:00:50 -0800198
Jiaxi Luo31874592014-06-11 10:36:35 -0700199def get_labels(exclude_filters=(), **filter_data):
showardc92da832009-04-07 18:14:34 +0000200 """\
Jiaxi Luo31874592014-06-11 10:36:35 -0700201 @param exclude_filters: A sequence of dictionaries of filters.
202
showardc92da832009-04-07 18:14:34 +0000203 @returns A sequence of nested dictionaries of label information.
204 """
Jiaxi Luo31874592014-06-11 10:36:35 -0700205 labels = models.Label.query_objects(filter_data)
206 for exclude_filter in exclude_filters:
207 labels = labels.exclude(**exclude_filter)
208 return rpc_utils.prepare_rows_as_nested_dicts(labels, ('atomic_group',))
showardc92da832009-04-07 18:14:34 +0000209
210
211# atomic groups
212
showarde9450c92009-06-30 01:58:52 +0000213def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +0000214 return models.AtomicGroup.add_object(
215 name=name, max_number_of_machines=max_number_of_machines,
216 description=description).id
217
218
219def modify_atomic_group(id, **data):
220 models.AtomicGroup.smart_get(id).update_object(data)
221
222
223def delete_atomic_group(id):
224 models.AtomicGroup.smart_get(id).delete()
225
226
227def atomic_group_add_labels(id, labels):
228 label_objs = models.Label.smart_get_bulk(labels)
229 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
230
231
232def atomic_group_remove_labels(id, labels):
233 label_objs = models.Label.smart_get_bulk(labels)
234 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
235
236
237def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000238 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000239 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000240
241
242# hosts
243
Matthew Sartori68186332015-04-27 17:19:53 -0700244def add_host(hostname, status=None, locked=None, lock_reason='', protection=None):
245 if locked and not lock_reason:
246 raise model_logic.ValidationError(
247 {'locked': 'Please provide a reason for locking when adding host.'})
248
jadmanski0afbb632008-06-06 21:10:57 +0000249 return models.Host.add_object(hostname=hostname, status=status,
Matthew Sartori68186332015-04-27 17:19:53 -0700250 locked=locked, lock_reason=lock_reason,
251 protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000252
253
Jakob Juelich50e91f72014-10-01 12:43:23 -0700254@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000255def modify_host(id, **data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700256 """Modify local attributes of a host.
257
258 If this is called on the master, but the host is assigned to a shard, this
259 will also forward the call to the responsible shard. This means i.e. if a
260 host is being locked using this function, this change will also propagate to
261 shards.
262
263 @param id: id of the host to modify.
264 @param **data: key=value pairs of values to set on the host.
265 """
showardbe0d8692009-08-20 23:42:44 +0000266 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000267 host = models.Host.smart_get(id)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700268
showardce7c0922009-09-11 18:39:24 +0000269 rpc_utils.check_modify_host_locking(host, data)
270 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000271
272
showard276f9442009-05-20 00:33:16 +0000273def modify_hosts(host_filter_data, update_data):
Jakob Juelich50e91f72014-10-01 12:43:23 -0700274 """Modify local attributes of multiple hosts.
275
276 If this is called on the master, but one of the hosts in that match the
277 filters is assigned to a shard, this will also forward the call to the
278 responsible shard.
279
280 The filters are always applied on the master, not on the shards. This means
281 if the states of a host differ on the master and a shard, the state on the
282 master will be used. I.e. this means:
283 A host was synced to Shard 1. On Shard 1 the status of the host was set to
284 'Repair Failed'.
285 - A call to modify_hosts with host_filter_data={'status': 'Ready'} will
286 update the host (both on the shard and on the master), because the state
287 of the host as the master knows it is still 'Ready'.
288 - A call to modify_hosts with host_filter_data={'status': 'Repair failed'
289 will not update the host, because the filter doesn't apply on the master.
290
showardbe0d8692009-08-20 23:42:44 +0000291 @param host_filter_data: Filters out which hosts to modify.
292 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000293 """
showardbe0d8692009-08-20 23:42:44 +0000294 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000295 hosts = models.Host.query_objects(host_filter_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700296
297 affected_shard_hostnames = set()
298 affected_host_ids = []
299
Alex Miller9658a952013-05-14 16:40:02 -0700300 # Check all hosts before changing data for exception safety.
301 for host in hosts:
302 rpc_utils.check_modify_host_locking(host, update_data)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700303 if host.shard:
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800304 affected_shard_hostnames.add(host.shard.rpc_hostname())
Jakob Juelich50e91f72014-10-01 12:43:23 -0700305 affected_host_ids.append(host.id)
306
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800307 if not utils.is_shard():
Jakob Juelich50e91f72014-10-01 12:43:23 -0700308 # Caution: Changing the filter from the original here. See docstring.
309 rpc_utils.run_rpc_on_multiple_hostnames(
310 'modify_hosts', affected_shard_hostnames,
311 host_filter_data={'id__in': affected_host_ids},
312 update_data=update_data)
313
showard276f9442009-05-20 00:33:16 +0000314 for host in hosts:
315 host.update_object(update_data)
316
317
mblighe8819cd2008-02-15 16:48:40 +0000318def host_add_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000319 labels = models.Label.smart_get_bulk(labels)
showardcafd16e2009-05-29 18:37:49 +0000320 host = models.Host.smart_get(id)
321
322 platforms = [label.name for label in labels if label.platform]
323 if len(platforms) > 1:
324 raise model_logic.ValidationError(
325 {'labels': 'Adding more than one platform label: %s' %
326 ', '.join(platforms)})
327 if len(platforms) == 1:
328 models.Host.check_no_platform([host])
329 host.labels.add(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000330
331
332def host_remove_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000333 labels = models.Label.smart_get_bulk(labels)
jadmanski0afbb632008-06-06 21:10:57 +0000334 models.Host.smart_get(id).labels.remove(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000335
336
MK Ryuacf35922014-10-03 14:56:49 -0700337def get_host_attribute(attribute, **host_filter_data):
338 """
339 @param attribute: string name of attribute
340 @param host_filter_data: filter data to apply to Hosts to choose hosts to
341 act upon
342 """
343 hosts = rpc_utils.get_host_query((), False, False, True, host_filter_data)
344 hosts = list(hosts)
345 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
346 'attribute_list')
347 host_attr_dicts = []
348 for host_obj in hosts:
349 for attr_obj in host_obj.attribute_list:
350 if attr_obj.attribute == attribute:
351 host_attr_dicts.append(attr_obj.get_object_dict())
352 return rpc_utils.prepare_for_serialization(host_attr_dicts)
353
354
showard0957a842009-05-11 19:25:08 +0000355def set_host_attribute(attribute, value, **host_filter_data):
356 """
MK Ryu26f0c932015-05-28 18:14:33 -0700357 @param attribute: string name of attribute
358 @param value: string, or None to delete an attribute
359 @param host_filter_data: filter data to apply to Hosts to choose hosts to
360 act upon
showard0957a842009-05-11 19:25:08 +0000361 """
362 assert host_filter_data # disallow accidental actions on all hosts
363 hosts = models.Host.query_objects(host_filter_data)
364 models.AclGroup.check_for_acl_violation_hosts(hosts)
365
MK Ryu26f0c932015-05-28 18:14:33 -0700366 # Master forwards this RPC to shards.
367 if not utils.is_shard():
368 rpc_utils.fanout_rpc(hosts, 'set_host_attribute', False,
369 attribute=attribute, value=value, **host_filter_data)
370
showard0957a842009-05-11 19:25:08 +0000371 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000372 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000373
374
Jakob Juelich50e91f72014-10-01 12:43:23 -0700375@rpc_utils.forward_single_host_rpc_to_shard
mblighe8819cd2008-02-15 16:48:40 +0000376def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000377 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000378
379
showard87cc38f2009-08-20 23:37:04 +0000380def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000381 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000382 """
383 @param multiple_labels: match hosts in all of the labels given. Should
384 be a list of label names.
385 @param exclude_only_if_needed_labels: Exclude hosts with at least one
386 "only_if_needed" label applied.
387 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
388 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000389 """
showard43a3d262008-11-12 18:17:05 +0000390 hosts = rpc_utils.get_host_query(multiple_labels,
391 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000392 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000393 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000394 hosts = list(hosts)
395 models.Host.objects.populate_relationships(hosts, models.Label,
396 'label_list')
397 models.Host.objects.populate_relationships(hosts, models.AclGroup,
398 'acl_list')
399 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
400 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000401 host_dicts = []
402 for host_obj in hosts:
403 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000404 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000405 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
406 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000407 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
408 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
409 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000410 host_dicts.append(host_dict)
411 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000412
413
showard87cc38f2009-08-20 23:37:04 +0000414def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000415 exclude_atomic_group_hosts=False, valid_only=True,
416 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000417 """
418 Same parameters as get_hosts().
419
420 @returns The number of matching hosts.
421 """
showard43a3d262008-11-12 18:17:05 +0000422 hosts = rpc_utils.get_host_query(multiple_labels,
423 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000424 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000425 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000426 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000427
mblighe8819cd2008-02-15 16:48:40 +0000428
429# tests
430
showard909c7a62008-07-15 21:52:38 +0000431def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000432 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000433 test_time=None, test_category=None, description=None,
434 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000435 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000436 author=author, dependencies=dependencies,
437 experimental=experimental,
438 run_verify=run_verify, test_time=test_time,
439 test_category=test_category,
440 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000441 test_class=test_class,
442 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000443
444
445def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000446 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000447
448
449def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000450 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000451
452
453def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000454 return rpc_utils.prepare_for_serialization(
455 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000456
457
Moises Osorio2dc7a102014-12-02 18:24:02 -0800458@_timer.decorate
459def get_tests_status_counts_by_job_name_label(job_name_prefix, label_name):
460 """Gets the counts of all passed and failed tests from the matching jobs.
461
462 @param job_name_prefix: Name prefix of the jobs to get the summary from, e.g.,
463 'butterfly-release/R40-6457.21.0/bvt-cq/'.
464 @param label_name: Label that must be set in the jobs, e.g.,
465 'cros-version:butterfly-release/R40-6457.21.0'.
466
467 @returns A summary of the counts of all the passed and failed tests.
468 """
469 job_ids = list(models.Job.objects.filter(
470 name__startswith=job_name_prefix,
471 dependency_labels__name=label_name).values_list(
472 'pk', flat=True))
473 summary = {'passed': 0, 'failed': 0}
474 if not job_ids:
475 return summary
476
477 counts = (tko_models.TestView.objects.filter(
478 afe_job_id__in=job_ids).exclude(
479 test_name='SERVER_JOB').exclude(
480 test_name__startswith='CLIENT_JOB').values(
481 'status').annotate(
482 count=Count('status')))
483 for status in counts:
484 if status['status'] == 'GOOD':
485 summary['passed'] += status['count']
486 else:
487 summary['failed'] += status['count']
488 return summary
489
490
showard2b9a88b2008-06-13 20:55:03 +0000491# profilers
492
493def add_profiler(name, description=None):
494 return models.Profiler.add_object(name=name, description=description).id
495
496
497def modify_profiler(id, **data):
498 models.Profiler.smart_get(id).update_object(data)
499
500
501def delete_profiler(id):
502 models.Profiler.smart_get(id).delete()
503
504
505def get_profilers(**filter_data):
506 return rpc_utils.prepare_for_serialization(
507 models.Profiler.list_objects(filter_data))
508
509
mblighe8819cd2008-02-15 16:48:40 +0000510# users
511
512def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000513 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000514
515
516def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000517 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000518
519
520def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000521 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000522
523
524def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000525 return rpc_utils.prepare_for_serialization(
526 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000527
528
529# acl groups
530
531def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000532 group = models.AclGroup.add_object(name=name, description=description)
showard64a95952010-01-13 21:27:16 +0000533 group.users.add(models.User.current_user())
showard04f2cd82008-07-25 20:53:31 +0000534 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000535
536
537def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000538 group = models.AclGroup.smart_get(id)
539 group.check_for_acl_violation_acl_group()
540 group.update_object(data)
541 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000542
543
544def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000545 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000546 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000547 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000548 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000549
550
551def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000552 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000553 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000554 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000555 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000556 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000557
558
559def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000560 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000561 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000562 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000563 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000564 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000565
566
567def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000568 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000569 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000570 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000571 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000572 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000573
574
575def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000576 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000577
578
579def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000580 acl_groups = models.AclGroup.list_objects(filter_data)
581 for acl_group in acl_groups:
582 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
583 acl_group['users'] = [user.login
584 for user in acl_group_obj.users.all()]
585 acl_group['hosts'] = [host.hostname
586 for host in acl_group_obj.hosts.all()]
587 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000588
589
590# jobs
591
mbligh120351e2009-01-24 01:40:45 +0000592def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000593 client_control_file='', use_container=False,
showard232b7ae2009-11-10 00:46:48 +0000594 profile_only=None, upload_kernel_config=False):
jadmanski0afbb632008-06-06 21:10:57 +0000595 """
mbligh120351e2009-01-24 01:40:45 +0000596 Generates a client-side control file to load a kernel and run tests.
597
598 @param tests List of tests to run.
mbligha3c58d22009-08-24 22:01:51 +0000599 @param kernel A list of kernel info dictionaries configuring which kernels
600 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000601 @param label Name of label to grab kernel config from.
602 @param profilers List of profilers to activate during the job.
603 @param client_control_file The contents of a client-side control file to
604 run at the end of all tests. If this is supplied, all tests must be
605 client side.
606 TODO: in the future we should support server control files directly
607 to wrap with a kernel. That'll require changing the parameter
608 name and adding a boolean to indicate if it is a client or server
609 control file.
610 @param use_container unused argument today. TODO: Enable containers
611 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000612 @param profile_only A boolean that indicates what default profile_only
613 mode to use in the control file. Passing None will generate a
614 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000615 @param upload_kernel_config: if enabled it will generate server control
616 file code that uploads the kernel config file to the client and
617 tells the client of the new (local) path when compiling the kernel;
618 the tests must be server side tests
mbligh120351e2009-01-24 01:40:45 +0000619
620 @returns a dict with the following keys:
621 control_file: str, The control file text.
622 is_server: bool, is the control file a server-side control file?
623 synch_count: How many machines the job uses per autoserv execution.
624 synch_count == 1 means the job is asynchronous.
625 dependencies: A list of the names of labels on which the job depends.
626 """
showardd86debe2009-06-10 17:37:56 +0000627 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000628 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000629 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000630
showard989f25d2008-10-01 11:38:11 +0000631 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000632 rpc_utils.prepare_generate_control_file(tests, kernel, label,
633 profilers))
showard989f25d2008-10-01 11:38:11 +0000634 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000635 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000636 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000637 client_control_file=client_control_file, profile_only=profile_only,
638 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000639 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000640
641
jamesren4a41e012010-07-16 22:33:48 +0000642def create_parameterized_job(name, priority, test, parameters, kernel=None,
643 label=None, profilers=(), profiler_parameters=None,
644 use_container=False, profile_only=None,
645 upload_kernel_config=False, hosts=(),
646 meta_hosts=(), one_time_hosts=(),
647 atomic_group_name=None, synch_count=None,
648 is_template=False, timeout=None,
Simran Basi7e605742013-11-12 13:43:36 -0800649 timeout_mins=None, max_runtime_mins=None,
650 run_verify=False, email_list='', dependencies=(),
651 reboot_before=None, reboot_after=None,
652 parse_failed_repair=None, hostless=False,
Dan Shiec1d47d2015-02-13 11:38:13 -0800653 keyvals=None, drone_set=None, run_reset=True,
654 require_ssq=None):
jamesren4a41e012010-07-16 22:33:48 +0000655 """
656 Creates and enqueues a parameterized job.
657
658 Most parameters a combination of the parameters for generate_control_file()
659 and create_job(), with the exception of:
660
661 @param test name or ID of the test to run
662 @param parameters a map of parameter name ->
663 tuple of (param value, param type)
664 @param profiler_parameters a dictionary of parameters for the profilers:
665 key: profiler name
666 value: dict of param name -> tuple of
667 (param value,
668 param type)
669 """
670 # Save the values of the passed arguments here. What we're going to do with
671 # them is pass them all to rpc_utils.get_create_job_common_args(), which
672 # will extract the subset of these arguments that apply for
673 # rpc_utils.create_job_common(), which we then pass in to that function.
674 args = locals()
675
676 # Set up the parameterized job configs
677 test_obj = models.Test.smart_get(test)
Aviv Keshet3dd8beb2013-05-13 17:36:04 -0700678 control_type = test_obj.test_type
jamesren4a41e012010-07-16 22:33:48 +0000679
680 try:
681 label = models.Label.smart_get(label)
682 except models.Label.DoesNotExist:
683 label = None
684
685 kernel_objs = models.Kernel.create_kernels(kernel)
686 profiler_objs = [models.Profiler.smart_get(profiler)
687 for profiler in profilers]
688
689 parameterized_job = models.ParameterizedJob.objects.create(
690 test=test_obj, label=label, use_container=use_container,
691 profile_only=profile_only,
692 upload_kernel_config=upload_kernel_config)
693 parameterized_job.kernels.add(*kernel_objs)
694
695 for profiler in profiler_objs:
696 parameterized_profiler = models.ParameterizedJobProfiler.objects.create(
697 parameterized_job=parameterized_job,
698 profiler=profiler)
699 profiler_params = profiler_parameters.get(profiler.name, {})
700 for name, (value, param_type) in profiler_params.iteritems():
701 models.ParameterizedJobProfilerParameter.objects.create(
702 parameterized_job_profiler=parameterized_profiler,
703 parameter_name=name,
704 parameter_value=value,
705 parameter_type=param_type)
706
707 try:
708 for parameter in test_obj.testparameter_set.all():
709 if parameter.name in parameters:
710 param_value, param_type = parameters.pop(parameter.name)
711 parameterized_job.parameterizedjobparameter_set.create(
712 test_parameter=parameter, parameter_value=param_value,
713 parameter_type=param_type)
714
715 if parameters:
716 raise Exception('Extra parameters remain: %r' % parameters)
717
718 return rpc_utils.create_job_common(
719 parameterized_job=parameterized_job.id,
720 control_type=control_type,
721 **rpc_utils.get_create_job_common_args(args))
722 except:
723 parameterized_job.delete()
724 raise
725
726
Simran Basib6ec8ae2014-04-23 12:05:08 -0700727def create_job_page_handler(name, priority, control_file, control_type,
728 image=None, hostless=False, **kwargs):
729 """\
730 Create and enqueue a job.
731
732 @param name name of this job
733 @param priority Integer priority of this job. Higher is more important.
734 @param control_file String contents of the control file.
735 @param control_type Type of control file, Client or Server.
736 @param kwargs extra args that will be required by create_suite_job or
737 create_job.
738
739 @returns The created Job id number.
740 """
741 control_file = rpc_utils.encode_ascii(control_file)
Jiaxi Luodd67beb2014-07-18 16:28:31 -0700742 if not control_file:
743 raise model_logic.ValidationError({
744 'control_file' : "Control file cannot be empty"})
Simran Basib6ec8ae2014-04-23 12:05:08 -0700745
746 if image and hostless:
747 return site_rpc_interface.create_suite_job(
748 name=name, control_file=control_file, priority=priority,
749 build=image, **kwargs)
750 return create_job(name, priority, control_file, control_type, image=image,
751 hostless=hostless, **kwargs)
752
753
showard12f3e322009-05-13 21:27:42 +0000754def create_job(name, priority, control_file, control_type,
755 hosts=(), meta_hosts=(), one_time_hosts=(),
756 atomic_group_name=None, synch_count=None, is_template=False,
Simran Basi7e605742013-11-12 13:43:36 -0800757 timeout=None, timeout_mins=None, max_runtime_mins=None,
758 run_verify=False, email_list='', dependencies=(),
759 reboot_before=None, reboot_after=None, parse_failed_repair=None,
760 hostless=False, keyvals=None, drone_set=None, image=None,
Dan Shiec1d47d2015-02-13 11:38:13 -0800761 parent_job_id=None, test_retry=0, run_reset=True,
762 require_ssp=None, args=(), **kwargs):
jadmanski0afbb632008-06-06 21:10:57 +0000763 """\
764 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000765
showarda1e74b32009-05-12 17:32:04 +0000766 @param name name of this job
Alex Miller7d658cf2013-09-04 16:00:35 -0700767 @param priority Integer priority of this job. Higher is more important.
showarda1e74b32009-05-12 17:32:04 +0000768 @param control_file String contents of the control file.
769 @param control_type Type of control file, Client or Server.
770 @param synch_count How many machines the job uses per autoserv execution.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700771 synch_count == 1 means the job is asynchronous. If an atomic group is
772 given this value is treated as a minimum.
showarda1e74b32009-05-12 17:32:04 +0000773 @param is_template If true then create a template job.
774 @param timeout Hours after this call returns until the job times out.
Simran Basi7e605742013-11-12 13:43:36 -0800775 @param timeout_mins Minutes after this call returns until the job times
Jiaxi Luo90190c92014-06-18 12:35:57 -0700776 out.
Simran Basi34217022012-11-06 13:43:15 -0800777 @param max_runtime_mins Minutes from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000778 @param run_verify Should the host be verified before running the test?
779 @param email_list String containing emails to mail when the job is done
780 @param dependencies List of label names on which this job depends
781 @param reboot_before Never, If dirty, or Always
782 @param reboot_after Never, If all tests passed, or Always
783 @param parse_failed_repair if true, results of failed repairs launched by
Jiaxi Luo90190c92014-06-18 12:35:57 -0700784 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000785 @param hostless if true, create a hostless job
showardc1a98d12010-01-15 00:22:22 +0000786 @param keyvals dict of keyvals to associate with the job
showarda1e74b32009-05-12 17:32:04 +0000787 @param hosts List of hosts to run job on.
788 @param meta_hosts List where each entry is a label name, and for each entry
Jiaxi Luo90190c92014-06-18 12:35:57 -0700789 one host will be chosen from that label to run the job on.
showarda1e74b32009-05-12 17:32:04 +0000790 @param one_time_hosts List of hosts not in the database to run the job on.
791 @param atomic_group_name The name of an atomic group to schedule the job on.
jamesren76fcf192010-04-21 20:39:50 +0000792 @param drone_set The name of the drone set to run this test on.
Paul Pendlebury5a8c6ad2011-02-01 07:20:17 -0800793 @param image OS image to install before running job.
Aviv Keshet0b9cfc92013-02-05 11:36:02 -0800794 @param parent_job_id id of a job considered to be parent of created job.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700795 @param test_retry Number of times to retry test if the test did not
Jiaxi Luo90190c92014-06-18 12:35:57 -0700796 complete successfully. (optional, default: 0)
Simran Basib6ec8ae2014-04-23 12:05:08 -0700797 @param run_reset Should the host be reset before running the test?
Dan Shiec1d47d2015-02-13 11:38:13 -0800798 @param require_ssp Set to True to require server-side packaging to run the
799 test. If it's set to None, drone will still try to run
800 the server side with server-side packaging. If the
801 autotest-server package doesn't exist for the build or
802 image is not set, drone will run the test without server-
803 side packaging. Default is None.
Jiaxi Luo90190c92014-06-18 12:35:57 -0700804 @param args A list of args to be injected into control file.
Simran Basib6ec8ae2014-04-23 12:05:08 -0700805 @param kwargs extra keyword args. NOT USED.
showardc92da832009-04-07 18:14:34 +0000806
807 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000808 """
Jiaxi Luo90190c92014-06-18 12:35:57 -0700809 if args:
810 control_file = tools.inject_vars({'args': args}, control_file)
811
Simran Basiab5a1bf2014-05-28 15:39:44 -0700812 if image is None:
813 return rpc_utils.create_job_common(
814 **rpc_utils.get_create_job_common_args(locals()))
815
816 # When image is supplied use a known parameterized test already in the
817 # database to pass the OS image path from the front end, through the
818 # scheduler, and finally to autoserv as the --image parameter.
819
820 # The test autoupdate_ParameterizedJob is in afe_autotests and used to
821 # instantiate a Test object and from there a ParameterizedJob.
822 known_test_obj = models.Test.smart_get('autoupdate_ParameterizedJob')
823 known_parameterized_job = models.ParameterizedJob.objects.create(
824 test=known_test_obj)
825
826 # autoupdate_ParameterizedJob has a single parameter, the image parameter,
827 # stored in the table afe_test_parameters. We retrieve and set this
828 # instance of the parameter to the OS image path.
829 image_parameter = known_test_obj.testparameter_set.get(test=known_test_obj,
830 name='image')
831 known_parameterized_job.parameterizedjobparameter_set.create(
832 test_parameter=image_parameter, parameter_value=image,
833 parameter_type='string')
834
835 # By passing a parameterized_job to create_job_common the job entry in
836 # the afe_jobs table will have the field parameterized_job_id set.
837 # The scheduler uses this id in the afe_parameterized_jobs table to
838 # match this job to our known test, and then with the
839 # afe_parameterized_job_parameters table to get the actual image path.
jamesren4a41e012010-07-16 22:33:48 +0000840 return rpc_utils.create_job_common(
Simran Basiab5a1bf2014-05-28 15:39:44 -0700841 parameterized_job=known_parameterized_job.id,
jamesren4a41e012010-07-16 22:33:48 +0000842 **rpc_utils.get_create_job_common_args(locals()))
mblighe8819cd2008-02-15 16:48:40 +0000843
844
showard9dbdcda2008-10-14 17:34:36 +0000845def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000846 """\
showard9dbdcda2008-10-14 17:34:36 +0000847 Abort a set of host queue entries.
Fang Deng63b0e452014-12-19 14:38:15 -0800848
849 @return: A list of dictionaries, each contains information
850 about an aborted HQE.
jadmanski0afbb632008-06-06 21:10:57 +0000851 """
showard9dbdcda2008-10-14 17:34:36 +0000852 query = models.HostQueueEntry.query_objects(filter_data)
beepsfaecbce2013-10-29 11:35:10 -0700853
854 # Dont allow aborts on:
855 # 1. Jobs that have already completed (whether or not they were aborted)
856 # 2. Jobs that we have already been aborted (but may not have completed)
857 query = query.filter(complete=False).filter(aborted=False)
showarddc817512008-11-12 18:16:41 +0000858 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000859 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000860 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000861
Simran Basic1b26762013-06-26 14:23:21 -0700862 models.HostQueueEntry.abort_host_queue_entries(host_queue_entries)
Fang Deng63b0e452014-12-19 14:38:15 -0800863 hqe_info = [{'HostQueueEntry': hqe.id, 'Job': hqe.job_id,
864 'Job name': hqe.job.name} for hqe in host_queue_entries]
865 return hqe_info
showard9d821ab2008-07-11 16:54:29 +0000866
867
beeps8bb1f7d2013-08-05 01:30:09 -0700868def abort_special_tasks(**filter_data):
869 """\
870 Abort the special task, or tasks, specified in the filter.
871 """
872 query = models.SpecialTask.query_objects(filter_data)
873 special_tasks = query.filter(is_active=True)
874 for task in special_tasks:
875 task.abort()
876
877
Simran Basi73dae552013-02-25 14:57:46 -0800878def _call_special_tasks_on_hosts(task, hosts):
879 """\
880 Schedules a set of hosts for a special task.
881
882 @returns A list of hostnames that a special task was created for.
883 """
884 models.AclGroup.check_for_acl_violation_hosts(hosts)
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800885 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts)
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800886 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian6edaaf92014-11-24 16:36:25 -0800887 raise ValueError('The following hosts are on shards, please '
888 'follow the link to the shards and create jobs '
889 'there instead. %s.' % shard_host_map)
Simran Basi73dae552013-02-25 14:57:46 -0800890 for host in hosts:
891 models.SpecialTask.schedule_special_task(host, task)
892 return list(sorted(host.hostname for host in hosts))
893
894
showard1ff7b2e2009-05-15 23:17:18 +0000895def reverify_hosts(**filter_data):
896 """\
897 Schedules a set of hosts for verify.
mbligh4e545a52009-12-19 05:30:39 +0000898
899 @returns A list of hostnames that a verify task was created for.
showard1ff7b2e2009-05-15 23:17:18 +0000900 """
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800901 hosts = models.Host.query_objects(filter_data)
902 shard_host_map = rpc_utils.bucket_hosts_by_shard(hosts, rpc_hostnames=True)
903
904 # Filter out hosts on a shard from those on the master, forward
905 # rpcs to the shard with an additional hostname__in filter, and
906 # create a local SpecialTask for each remaining host.
Prashanth Balasubramanian8c98ac12014-12-23 11:26:44 -0800907 if shard_host_map and not utils.is_shard():
Prashanth Balasubramanian40981232014-12-16 19:01:58 -0800908 hosts = [h for h in hosts if h.shard is None]
909 for shard, hostnames in shard_host_map.iteritems():
910
911 # The main client of this module is the frontend website, and
912 # it invokes it with an 'id' or an 'id__in' filter. Regardless,
913 # the 'hostname' filter should narrow down the list of hosts on
914 # each shard even though we supply all the ids in filter_data.
915 # This method uses hostname instead of id because it fits better
916 # with the overall architecture of redirection functions in rpc_utils.
917 shard_filter = filter_data.copy()
918 shard_filter['hostname__in'] = hostnames
919 rpc_utils.run_rpc_on_multiple_hostnames(
920 'reverify_hosts', [shard], **shard_filter)
921
922 # There is a race condition here if someone assigns a shard to one of these
923 # hosts before we create the task. The host will stay on the master if:
924 # 1. The host is not Ready
925 # 2. The host is Ready but has a task
926 # But if the host is Ready and doesn't have a task yet, it will get sent
927 # to the shard as we're creating a task here.
928
929 # Given that we only rarely verify Ready hosts it isn't worth putting this
930 # entire method in a transaction. The worst case scenario is that we have
931 # a verify running on a Ready host while the shard is using it, if the verify
932 # fails no subsequent tasks will be created against the host on the master,
933 # and verifies are safe enough that this is OK.
934 return _call_special_tasks_on_hosts(models.SpecialTask.Task.VERIFY, hosts)
Simran Basi73dae552013-02-25 14:57:46 -0800935
936
937def repair_hosts(**filter_data):
938 """\
939 Schedules a set of hosts for repair.
940
941 @returns A list of hostnames that a repair task was created for.
942 """
943 return _call_special_tasks_on_hosts(models.SpecialTask.Task.REPAIR,
944 models.Host.query_objects(filter_data))
showard1ff7b2e2009-05-15 23:17:18 +0000945
946
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700947def get_jobs(not_yet_run=False, running=False, finished=False,
948 suite=False, sub=False, standalone=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000949 """\
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700950 Extra status filter args for get_jobs:
jadmanski0afbb632008-06-06 21:10:57 +0000951 -not_yet_run: Include only jobs that have not yet started running.
952 -running: Include only jobs that have start running but for which not
953 all hosts have completed.
954 -finished: Include only jobs for which all hosts have completed (or
955 aborted).
956 At most one of these three fields should be specified.
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700957
958 Extra type filter args for get_jobs:
959 -suite: Include only jobs with child jobs.
960 -sub: Include only jobs with a parent job.
961 -standalone: Inlcude only jobs with no child or parent jobs.
962 At most one of these three fields should be specified.
jadmanski0afbb632008-06-06 21:10:57 +0000963 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700964 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
965 running,
966 finished)
967 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
968 suite,
969 sub,
970 standalone)
showard0957a842009-05-11 19:25:08 +0000971 job_dicts = []
972 jobs = list(models.Job.query_objects(filter_data))
973 models.Job.objects.populate_relationships(jobs, models.Label,
974 'dependencies')
showardc1a98d12010-01-15 00:22:22 +0000975 models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals')
showard0957a842009-05-11 19:25:08 +0000976 for job in jobs:
977 job_dict = job.get_object_dict()
978 job_dict['dependencies'] = ','.join(label.name
979 for label in job.dependencies)
showardc1a98d12010-01-15 00:22:22 +0000980 job_dict['keyvals'] = dict((keyval.key, keyval.value)
981 for keyval in job.keyvals)
Eric Lid23bc192011-02-09 14:38:57 -0800982 if job.parameterized_job:
983 job_dict['image'] = get_parameterized_autoupdate_image_url(job)
showard0957a842009-05-11 19:25:08 +0000984 job_dicts.append(job_dict)
985 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000986
987
988def get_num_jobs(not_yet_run=False, running=False, finished=False,
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700989 suite=False, sub=False, standalone=False,
jadmanski0afbb632008-06-06 21:10:57 +0000990 **filter_data):
991 """\
992 See get_jobs() for documentation of extra filter parameters.
993 """
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700994 extra_args = rpc_utils.extra_job_status_filters(not_yet_run,
995 running,
996 finished)
997 filter_data['extra_args'] = rpc_utils.extra_job_type_filters(extra_args,
998 suite,
999 sub,
1000 standalone)
jadmanski0afbb632008-06-06 21:10:57 +00001001 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +00001002
1003
mblighe8819cd2008-02-15 16:48:40 +00001004def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001005 """\
Jiaxi Luoaac54572014-06-04 13:57:02 -07001006 Like get_jobs(), but adds 'status_counts' and 'result_counts' field.
1007
1008 'status_counts' filed is a dictionary mapping status strings to the number
1009 of hosts currently with that status, i.e. {'Queued' : 4, 'Running' : 2}.
1010
1011 'result_counts' field is piped to tko's rpc_interface and has the return
1012 format specified under get_group_counts.
jadmanski0afbb632008-06-06 21:10:57 +00001013 """
1014 jobs = get_jobs(**filter_data)
1015 ids = [job['id'] for job in jobs]
1016 all_status_counts = models.Job.objects.get_status_counts(ids)
1017 for job in jobs:
1018 job['status_counts'] = all_status_counts[job['id']]
Jiaxi Luoaac54572014-06-04 13:57:02 -07001019 job['result_counts'] = tko_rpc_interface.get_status_counts(
1020 ['afe_job_id', 'afe_job_id'],
1021 header_groups=[['afe_job_id'], ['afe_job_id']],
1022 **{'afe_job_id': job['id']})
jadmanski0afbb632008-06-06 21:10:57 +00001023 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +00001024
1025
showarda965cef2009-05-15 23:17:41 +00001026def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +00001027 """\
1028 Retrieves all the information needed to clone a job.
1029 """
showarda8709c52008-07-03 19:44:54 +00001030 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +00001031 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +00001032 preserve_metahosts,
1033 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +00001034
showardd9992fe2008-07-31 02:15:03 +00001035 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +00001036 for host in job_info['hosts']:
1037 host_dict = get_hosts(id=host.id)[0]
1038 other_labels = host_dict['labels']
1039 if host_dict['platform']:
1040 other_labels.remove(host_dict['platform'])
1041 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +00001042 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001043
showard29f7cd22009-04-29 21:16:24 +00001044 for host in job_info['one_time_hosts']:
1045 host_dict = dict(hostname=host.hostname,
1046 id=host.id,
1047 platform='(one-time host)',
1048 locked_text='')
1049 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +00001050
showard4d077562009-05-08 18:24:36 +00001051 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +00001052 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +00001053 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +00001054
1055 info = dict(job=job.get_object_dict(),
1056 meta_host_counts=meta_host_counts,
1057 hosts=host_dicts)
1058 info['job']['dependencies'] = job_info['dependencies']
1059 if job_info['atomic_group']:
1060 info['atomic_group_name'] = (job_info['atomic_group']).name
1061 else:
1062 info['atomic_group_name'] = None
jamesren2275ef12010-04-12 18:25:06 +00001063 info['hostless'] = job_info['hostless']
jamesren76fcf192010-04-21 20:39:50 +00001064 info['drone_set'] = job.drone_set and job.drone_set.name
showarda8709c52008-07-03 19:44:54 +00001065
Eric Lid23bc192011-02-09 14:38:57 -08001066 if job.parameterized_job:
1067 info['job']['image'] = get_parameterized_autoupdate_image_url(job)
1068
showarda8709c52008-07-03 19:44:54 +00001069 return rpc_utils.prepare_for_serialization(info)
1070
1071
showard34dc5fa2008-04-24 20:58:40 +00001072# host queue entries
1073
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001074def get_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001075 """\
showardc92da832009-04-07 18:14:34 +00001076 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +00001077 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001078 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1079 'started_on__lte',
1080 start_time,
1081 end_time,
1082 **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001083 return rpc_utils.prepare_rows_as_nested_dicts(
1084 models.HostQueueEntry.query_objects(filter_data),
1085 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +00001086
1087
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001088def get_num_host_queue_entries(start_time=None, end_time=None, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +00001089 """\
1090 Get the number of host queue entries associated with this job.
1091 """
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001092 filter_data = rpc_utils.inject_times_to_filter('started_on__gte',
1093 'started_on__lte',
1094 start_time,
1095 end_time,
1096 **filter_data)
jadmanski0afbb632008-06-06 21:10:57 +00001097 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +00001098
1099
showard1e935f12008-07-11 00:11:36 +00001100def get_hqe_percentage_complete(**filter_data):
1101 """
showardc92da832009-04-07 18:14:34 +00001102 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +00001103 that are complete.
1104 """
1105 query = models.HostQueueEntry.query_objects(filter_data)
1106 complete_count = query.filter(complete=True).count()
1107 total_count = query.count()
1108 if total_count == 0:
1109 return 1
1110 return float(complete_count) / total_count
1111
1112
showard1a5a4082009-07-28 20:01:37 +00001113# special tasks
1114
1115def get_special_tasks(**filter_data):
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001116 """Get special task entries from the local database.
1117
1118 Query the special tasks table for tasks matching the given
1119 `filter_data`, and return a list of the results. No attempt is
1120 made to forward the call to shards; the buck will stop here.
1121 The caller is expected to know the target shard for such reasons
1122 as:
1123 * The caller is a service (such as gs_offloader) configured
1124 to operate on behalf of one specific shard, and no other.
1125 * The caller has a host as a parameter, and knows that this is
1126 the shard assigned to that host.
1127
1128 @param filter_data Filter keywords to pass to the underlying
1129 database query.
1130
1131 """
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001132 return rpc_utils.prepare_rows_as_nested_dicts(
1133 models.SpecialTask.query_objects(filter_data),
1134 ('host', 'queue_entry'))
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001135
1136
1137def get_host_special_tasks(host_id, **filter_data):
1138 """Get special task entries for a given host.
1139
1140 Query the special tasks table for tasks that ran on the host
1141 given by `host_id` and matching the given `filter_data`.
1142 Return a list of the results. If the host is assigned to a
1143 shard, forward this call to that shard.
1144
1145 @param host_id Id in the database of the target host.
1146 @param filter_data Filter keywords to pass to the underlying
1147 database query.
1148
1149 """
MK Ryu0c1a37d2015-04-30 12:00:55 -07001150 # Retrieve host data even if the host is in an invalid state.
1151 host = models.Host.smart_get(host_id, False)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001152 if not host.shard:
J. Richard Barnettefdfcd662015-04-13 17:20:29 -07001153 return get_special_tasks(host_id=host_id, **filter_data)
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001154 else:
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001155 # The return values from AFE methods are post-processed
1156 # objects that aren't JSON-serializable. So, we have to
1157 # call AFE.run() to get the raw, serializable output from
1158 # the shard.
J. Richard Barnetteb5164d62015-04-13 12:59:31 -07001159 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1160 return shard_afe.run('get_special_tasks',
1161 host_id=host_id, **filter_data)
showard1a5a4082009-07-28 20:01:37 +00001162
1163
MK Ryu0c1a37d2015-04-30 12:00:55 -07001164def get_num_special_tasks(**kwargs):
1165 """Get the number of special task entries from the local database.
1166
1167 Query the special tasks table for tasks matching the given 'kwargs',
1168 and return the number of the results. No attempt is made to forward
1169 the call to shards; the buck will stop here.
1170
1171 @param kwargs Filter keywords to pass to the underlying database query.
1172
1173 """
1174 return models.SpecialTask.query_count(kwargs)
1175
1176
1177def get_host_num_special_tasks(host, **kwargs):
1178 """Get special task entries for a given host.
1179
1180 Query the special tasks table for tasks that ran on the host
1181 given by 'host' and matching the given 'kwargs'.
1182 Return a list of the results. If the host is assigned to a
1183 shard, forward this call to that shard.
1184
1185 @param host id or name of a host. More often a hostname.
1186 @param kwargs Filter keywords to pass to the underlying database query.
1187
1188 """
1189 # Retrieve host data even if the host is in an invalid state.
1190 host_model = models.Host.smart_get(host, False)
1191 if not host_model.shard:
1192 return get_num_special_tasks(host=host, **kwargs)
1193 else:
1194 shard_afe = frontend.AFE(server=host_model.shard.rpc_hostname())
1195 return shard_afe.run('get_num_special_tasks', host=host, **kwargs)
1196
1197
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001198def get_status_task(host_id, end_time):
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001199 """Get the "status task" for a host from the local shard.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001200
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001201 Returns a single special task representing the given host's
1202 "status task". The status task is a completed special task that
1203 identifies whether the corresponding host was working or broken
1204 when it completed. A successful task indicates a working host;
1205 a failed task indicates broken.
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001206
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001207 This call will not be forward to a shard; the receiving server
1208 must be the shard that owns the host.
1209
1210 @param host_id Id in the database of the target host.
1211 @param end_time Time reference for the host's status.
1212
1213 @return A single task; its status (successful or not)
1214 corresponds to the status of the host (working or
1215 broken) at the given time. If no task is found, return
1216 `None`.
1217
1218 """
1219 tasklist = rpc_utils.prepare_rows_as_nested_dicts(
1220 status_history.get_status_task(host_id, end_time),
1221 ('host', 'queue_entry'))
1222 return tasklist[0] if tasklist else None
1223
1224
1225def get_host_status_task(host_id, end_time):
1226 """Get the "status task" for a host from its owning shard.
1227
1228 Finds the given host's owning shard, and forwards to it a call
1229 to `get_status_task()` (see above).
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001230
1231 @param host_id Id in the database of the target host.
1232 @param end_time Time reference for the host's status.
1233
1234 @return A single task; its status (successful or not)
1235 corresponds to the status of the host (working or
1236 broken) at the given time. If no task is found, return
1237 `None`.
1238
1239 """
1240 host = models.Host.smart_get(host_id)
1241 if not host.shard:
J. Richard Barnette4d7e6e62015-05-01 10:47:34 -07001242 return get_status_task(host_id, end_time)
J. Richard Barnette39255fa2015-04-14 17:23:41 -07001243 else:
1244 # The return values from AFE methods are post-processed
1245 # objects that aren't JSON-serializable. So, we have to
1246 # call AFE.run() to get the raw, serializable output from
1247 # the shard.
1248 shard_afe = frontend.AFE(server=host.shard.rpc_hostname())
1249 return shard_afe.run('get_status_task',
1250 host_id=host_id, end_time=end_time)
1251
1252
showardc0ac3a72009-07-08 21:14:45 +00001253# support for host detail view
1254
MK Ryu0c1a37d2015-04-30 12:00:55 -07001255def get_host_queue_entries_and_special_tasks(host, query_start=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001256 query_limit=None, start_time=None,
1257 end_time=None):
showardc0ac3a72009-07-08 21:14:45 +00001258 """
1259 @returns an interleaved list of HostQueueEntries and SpecialTasks,
1260 in approximate run order. each dict contains keys for type, host,
1261 job, status, started_on, execution_path, and ID.
1262 """
1263 total_limit = None
1264 if query_limit is not None:
1265 total_limit = query_start + query_limit
MK Ryu0c1a37d2015-04-30 12:00:55 -07001266 filter_data_common = {'host': host,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001267 'query_limit': total_limit,
1268 'sort_by': ['-id']}
showardc0ac3a72009-07-08 21:14:45 +00001269
MK Ryu0c1a37d2015-04-30 12:00:55 -07001270 filter_data_special_tasks = rpc_utils.inject_times_to_filter(
1271 'time_started__gte', 'time_started__lte', start_time, end_time,
1272 **filter_data_common)
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001273
MK Ryu0c1a37d2015-04-30 12:00:55 -07001274 queue_entries = get_host_queue_entries(
1275 start_time, end_time, **filter_data_common)
1276 special_tasks = get_host_special_tasks(host, **filter_data_special_tasks)
showardc0ac3a72009-07-08 21:14:45 +00001277
1278 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
1279 special_tasks)
1280 if query_start is not None:
1281 interleaved_entries = interleaved_entries[query_start:]
1282 if query_limit is not None:
1283 interleaved_entries = interleaved_entries[:query_limit]
MK Ryu0c1a37d2015-04-30 12:00:55 -07001284 return rpc_utils.prepare_host_queue_entries_and_special_tasks(
1285 interleaved_entries, queue_entries)
showardc0ac3a72009-07-08 21:14:45 +00001286
1287
MK Ryu0c1a37d2015-04-30 12:00:55 -07001288def get_num_host_queue_entries_and_special_tasks(host, start_time=None,
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001289 end_time=None):
MK Ryu0c1a37d2015-04-30 12:00:55 -07001290 filter_data_common = {'host': host}
Jiaxi Luo57bc1952014-07-22 15:27:30 -07001291
1292 filter_data_queue_entries, filter_data_special_tasks = (
1293 rpc_utils.inject_times_to_hqe_special_tasks_filters(
1294 filter_data_common, start_time, end_time))
1295
1296 return (models.HostQueueEntry.query_count(filter_data_queue_entries)
MK Ryu0c1a37d2015-04-30 12:00:55 -07001297 + get_host_num_special_tasks(**filter_data_special_tasks))
showardc0ac3a72009-07-08 21:14:45 +00001298
1299
showard29f7cd22009-04-29 21:16:24 +00001300# recurring run
1301
1302def get_recurring(**filter_data):
1303 return rpc_utils.prepare_rows_as_nested_dicts(
1304 models.RecurringRun.query_objects(filter_data),
1305 ('job', 'owner'))
1306
1307
1308def get_num_recurring(**filter_data):
1309 return models.RecurringRun.query_count(filter_data)
1310
1311
1312def delete_recurring_runs(**filter_data):
1313 to_delete = models.RecurringRun.query_objects(filter_data)
1314 to_delete.delete()
1315
1316
1317def create_recurring_run(job_id, start_date, loop_period, loop_count):
showard64a95952010-01-13 21:27:16 +00001318 owner = models.User.current_user().login
showard29f7cd22009-04-29 21:16:24 +00001319 job = models.Job.objects.get(id=job_id)
1320 return job.create_recurring_job(start_date=start_date,
1321 loop_period=loop_period,
1322 loop_count=loop_count,
1323 owner=owner)
1324
1325
mblighe8819cd2008-02-15 16:48:40 +00001326# other
1327
showarde0b63622008-08-04 20:58:47 +00001328def echo(data=""):
1329 """\
1330 Returns a passed in string. For doing a basic test to see if RPC calls
1331 can successfully be made.
1332 """
1333 return data
1334
1335
showardb7a52fd2009-04-27 20:10:56 +00001336def get_motd():
1337 """\
1338 Returns the message of the day as a string.
1339 """
1340 return rpc_utils.get_motd()
1341
1342
mblighe8819cd2008-02-15 16:48:40 +00001343def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +00001344 """\
1345 Returns a dictionary containing a bunch of data that shouldn't change
1346 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +00001347
1348 priorities: List of job priority choices.
1349 default_priority: Default priority value for new jobs.
1350 users: Sorted list of all users.
Jiaxi Luo31874592014-06-11 10:36:35 -07001351 labels: Sorted list of labels not start with 'cros-version' and
1352 'fw-version'.
showardc92da832009-04-07 18:14:34 +00001353 atomic_groups: Sorted list of all atomic groups.
1354 tests: Sorted list of all tests.
1355 profilers: Sorted list of all profilers.
1356 current_user: Logged-in username.
1357 host_statuses: Sorted list of possible Host statuses.
1358 job_statuses: Sorted list of possible HostQueueEntry statuses.
Simran Basi7e605742013-11-12 13:43:36 -08001359 job_timeout_default: The default job timeout length in minutes.
showarda1e74b32009-05-12 17:32:04 +00001360 parse_failed_repair_default: Default value for the parse_failed_repair job
Jiaxi Luo31874592014-06-11 10:36:35 -07001361 option.
showardc92da832009-04-07 18:14:34 +00001362 reboot_before_options: A list of valid RebootBefore string enums.
1363 reboot_after_options: A list of valid RebootAfter string enums.
1364 motd: Server's message of the day.
1365 status_dictionary: A mapping from one word job status names to a more
1366 informative description.
jadmanski0afbb632008-06-06 21:10:57 +00001367 """
showard21baa452008-10-21 00:08:39 +00001368
1369 job_fields = models.Job.get_field_dict()
jamesren76fcf192010-04-21 20:39:50 +00001370 default_drone_set_name = models.DroneSet.default_drone_set_name()
1371 drone_sets = ([default_drone_set_name] +
1372 sorted(drone_set.name for drone_set in
1373 models.DroneSet.objects.exclude(
1374 name=default_drone_set_name)))
showard21baa452008-10-21 00:08:39 +00001375
jadmanski0afbb632008-06-06 21:10:57 +00001376 result = {}
Alex Miller7d658cf2013-09-04 16:00:35 -07001377 result['priorities'] = priorities.Priority.choices()
1378 default_priority = priorities.Priority.DEFAULT
1379 result['default_priority'] = 'Default'
1380 result['max_schedulable_priority'] = priorities.Priority.DEFAULT
jadmanski0afbb632008-06-06 21:10:57 +00001381 result['users'] = get_users(sort_by=['login'])
Jiaxi Luo31874592014-06-11 10:36:35 -07001382
1383 label_exclude_filters = [{'name__startswith': 'cros-version'},
1384 {'name__startswith': 'fw-version'}]
1385 result['labels'] = get_labels(
1386 label_exclude_filters,
1387 sort_by=['-platform', 'name'])
1388
showardc92da832009-04-07 18:14:34 +00001389 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +00001390 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +00001391 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +00001392 result['current_user'] = rpc_utils.prepare_for_serialization(
showard64a95952010-01-13 21:27:16 +00001393 models.User.current_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +00001394 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +00001395 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
Simran Basi7e605742013-11-12 13:43:36 -08001396 result['job_timeout_mins_default'] = models.Job.DEFAULT_TIMEOUT_MINS
Simran Basi34217022012-11-06 13:43:15 -08001397 result['job_max_runtime_mins_default'] = (
1398 models.Job.DEFAULT_MAX_RUNTIME_MINS)
showarda1e74b32009-05-12 17:32:04 +00001399 result['parse_failed_repair_default'] = bool(
1400 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
jamesrendd855242010-03-02 22:23:44 +00001401 result['reboot_before_options'] = model_attributes.RebootBefore.names
1402 result['reboot_after_options'] = model_attributes.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +00001403 result['motd'] = rpc_utils.get_motd()
jamesren76fcf192010-04-21 20:39:50 +00001404 result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled()
1405 result['drone_sets'] = drone_sets
jamesren4a41e012010-07-16 22:33:48 +00001406 result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled()
showard8ac29b42008-07-17 17:01:55 +00001407
showardd3dc1992009-04-22 21:01:40 +00001408 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +00001409 "Verifying": "Verifying Host",
Alex Millerdfff2fd2013-05-28 13:05:06 -07001410 "Provisioning": "Provisioning Host",
showard8ac29b42008-07-17 17:01:55 +00001411 "Pending": "Waiting on other hosts",
1412 "Running": "Running autoserv",
1413 "Completed": "Autoserv completed",
1414 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +00001415 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +00001416 "Starting": "Next in host's queue",
1417 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +00001418 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +00001419 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +00001420 "Template": "Template job for recurring run",
mbligh4608b002010-01-05 18:22:35 +00001421 "Waiting": "Waiting for scheduler action",
Dan Shi07e09af2013-04-12 09:31:29 -07001422 "Archiving": "Archiving results",
1423 "Resetting": "Resetting hosts"}
Jiaxi Luo421608e2014-07-07 14:38:00 -07001424
1425 result['wmatrix_url'] = rpc_utils.get_wmatrix_url()
Simran Basi71206ef2014-08-13 13:51:18 -07001426 result['is_moblab'] = bool(utils.is_moblab())
Jiaxi Luo421608e2014-07-07 14:38:00 -07001427
jadmanski0afbb632008-06-06 21:10:57 +00001428 return result
showard29f7cd22009-04-29 21:16:24 +00001429
1430
1431def get_server_time():
1432 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")