blob: 08b4e404ee7cfb7b5b45a366ab3b8f597c932a0a [file] [log] [blame]
mblighe8819cd2008-02-15 16:48:40 +00001"""\
2Functions to expose over the RPC interface.
3
4For all modify* and delete* functions that ask for an 'id' parameter to
5identify the object to operate on, the id may be either
6 * the database row ID
7 * the name of the object (label name, hostname, user login, etc.)
8 * a dictionary containing uniquely identifying field (this option should seldom
9 be used)
10
11When specifying foreign key fields (i.e. adding hosts to a label, or adding
12users to an ACL group), the given value may be either the database row ID or the
13name of the object.
14
15All get* functions return lists of dictionaries. Each dictionary represents one
16object and maps field names to values.
17
18Some examples:
19modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
20modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
21modify_test('sleeptest', test_type='Client', params=', seconds=60')
22delete_acl_group(1) # delete by ID
23delete_acl_group('Everyone') # delete by name
24acl_group_add_users('Everyone', ['mbligh', 'showard'])
25get_jobs(owner='showard', status='Queued')
26
mbligh93c80e62009-02-03 17:48:30 +000027See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000028"""
29
30__author__ = 'showard@google.com (Steve Howard)'
31
showard29f7cd22009-04-29 21:16:24 +000032import datetime
showardcafd16e2009-05-29 18:37:49 +000033import common
34from autotest_lib.frontend import thread_local
showard6d7b2ff2009-06-10 00:16:47 +000035from autotest_lib.frontend.afe import models, model_logic
36from autotest_lib.frontend.afe import control_file, rpc_utils
showard3bb499f2008-07-03 19:42:20 +000037from autotest_lib.client.common_lib import global_config
38
mblighe8819cd2008-02-15 16:48:40 +000039
40# labels
41
showard989f25d2008-10-01 11:38:11 +000042def add_label(name, kernel_config=None, platform=None, only_if_needed=None):
showardc92da832009-04-07 18:14:34 +000043 return models.Label.add_object(
44 name=name, kernel_config=kernel_config, platform=platform,
45 only_if_needed=only_if_needed).id
mblighe8819cd2008-02-15 16:48:40 +000046
47
48def modify_label(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +000049 models.Label.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000050
51
52def delete_label(id):
jadmanski0afbb632008-06-06 21:10:57 +000053 models.Label.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +000054
55
showardbbabf502008-06-06 00:02:02 +000056def label_add_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000057 host_objs = models.Host.smart_get_bulk(hosts)
showardcafd16e2009-05-29 18:37:49 +000058 label = models.Label.smart_get(id)
59 if label.platform:
60 models.Host.check_no_platform(host_objs)
61 label.host_set.add(*host_objs)
showardbbabf502008-06-06 00:02:02 +000062
63
64def label_remove_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000065 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +000066 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +000067
68
mblighe8819cd2008-02-15 16:48:40 +000069def get_labels(**filter_data):
showardc92da832009-04-07 18:14:34 +000070 """\
71 @returns A sequence of nested dictionaries of label information.
72 """
73 return rpc_utils.prepare_rows_as_nested_dicts(
74 models.Label.query_objects(filter_data),
75 ('atomic_group',))
76
77
78# atomic groups
79
showarde9450c92009-06-30 01:58:52 +000080def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +000081 return models.AtomicGroup.add_object(
82 name=name, max_number_of_machines=max_number_of_machines,
83 description=description).id
84
85
86def modify_atomic_group(id, **data):
87 models.AtomicGroup.smart_get(id).update_object(data)
88
89
90def delete_atomic_group(id):
91 models.AtomicGroup.smart_get(id).delete()
92
93
94def atomic_group_add_labels(id, labels):
95 label_objs = models.Label.smart_get_bulk(labels)
96 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
97
98
99def atomic_group_remove_labels(id, labels):
100 label_objs = models.Label.smart_get_bulk(labels)
101 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
102
103
104def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000105 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000106 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000107
108
109# hosts
110
showarddf062562008-07-03 19:56:37 +0000111def add_host(hostname, status=None, locked=None, protection=None):
jadmanski0afbb632008-06-06 21:10:57 +0000112 return models.Host.add_object(hostname=hostname, status=status,
showarddf062562008-07-03 19:56:37 +0000113 locked=locked, protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000114
115
116def modify_host(id, **data):
showardbe0d8692009-08-20 23:42:44 +0000117 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000118 host = models.Host.smart_get(id)
119 rpc_utils.check_modify_host_locking(host, data)
120 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000121
122
showard276f9442009-05-20 00:33:16 +0000123def modify_hosts(host_filter_data, update_data):
124 """
showardbe0d8692009-08-20 23:42:44 +0000125 @param host_filter_data: Filters out which hosts to modify.
126 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000127 """
showardbe0d8692009-08-20 23:42:44 +0000128 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000129 hosts = models.Host.query_objects(host_filter_data)
130 for host in hosts:
131 host.update_object(update_data)
132
133
mblighe8819cd2008-02-15 16:48:40 +0000134def host_add_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000135 labels = models.Label.smart_get_bulk(labels)
showardcafd16e2009-05-29 18:37:49 +0000136 host = models.Host.smart_get(id)
137
138 platforms = [label.name for label in labels if label.platform]
139 if len(platforms) > 1:
140 raise model_logic.ValidationError(
141 {'labels': 'Adding more than one platform label: %s' %
142 ', '.join(platforms)})
143 if len(platforms) == 1:
144 models.Host.check_no_platform([host])
145 host.labels.add(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000146
147
148def host_remove_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000149 labels = models.Label.smart_get_bulk(labels)
jadmanski0afbb632008-06-06 21:10:57 +0000150 models.Host.smart_get(id).labels.remove(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000151
152
showard0957a842009-05-11 19:25:08 +0000153def set_host_attribute(attribute, value, **host_filter_data):
154 """
155 @param attribute string name of attribute
156 @param value string, or None to delete an attribute
157 @param host_filter_data filter data to apply to Hosts to choose hosts to act
158 upon
159 """
160 assert host_filter_data # disallow accidental actions on all hosts
161 hosts = models.Host.query_objects(host_filter_data)
162 models.AclGroup.check_for_acl_violation_hosts(hosts)
163
164 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000165 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000166
167
mblighe8819cd2008-02-15 16:48:40 +0000168def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000169 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000170
171
showard87cc38f2009-08-20 23:37:04 +0000172def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000173 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000174 """
175 @param multiple_labels: match hosts in all of the labels given. Should
176 be a list of label names.
177 @param exclude_only_if_needed_labels: Exclude hosts with at least one
178 "only_if_needed" label applied.
179 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
180 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000181 """
showard43a3d262008-11-12 18:17:05 +0000182 hosts = rpc_utils.get_host_query(multiple_labels,
183 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000184 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000185 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000186 hosts = list(hosts)
187 models.Host.objects.populate_relationships(hosts, models.Label,
188 'label_list')
189 models.Host.objects.populate_relationships(hosts, models.AclGroup,
190 'acl_list')
191 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
192 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000193 host_dicts = []
194 for host_obj in hosts:
195 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000196 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000197 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
198 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000199 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
200 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
201 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000202 host_dicts.append(host_dict)
203 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000204
205
showard87cc38f2009-08-20 23:37:04 +0000206def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000207 exclude_atomic_group_hosts=False, valid_only=True,
208 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000209 """
210 Same parameters as get_hosts().
211
212 @returns The number of matching hosts.
213 """
showard43a3d262008-11-12 18:17:05 +0000214 hosts = rpc_utils.get_host_query(multiple_labels,
215 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000216 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000217 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000218 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000219
mblighe8819cd2008-02-15 16:48:40 +0000220
221# tests
222
showard909c7a62008-07-15 21:52:38 +0000223def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000224 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000225 test_time=None, test_category=None, description=None,
226 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000227 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000228 author=author, dependencies=dependencies,
229 experimental=experimental,
230 run_verify=run_verify, test_time=test_time,
231 test_category=test_category,
232 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000233 test_class=test_class,
234 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000235
236
237def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000238 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000239
240
241def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000242 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000243
244
245def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000246 return rpc_utils.prepare_for_serialization(
247 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000248
249
showard2b9a88b2008-06-13 20:55:03 +0000250# profilers
251
252def add_profiler(name, description=None):
253 return models.Profiler.add_object(name=name, description=description).id
254
255
256def modify_profiler(id, **data):
257 models.Profiler.smart_get(id).update_object(data)
258
259
260def delete_profiler(id):
261 models.Profiler.smart_get(id).delete()
262
263
264def get_profilers(**filter_data):
265 return rpc_utils.prepare_for_serialization(
266 models.Profiler.list_objects(filter_data))
267
268
mblighe8819cd2008-02-15 16:48:40 +0000269# users
270
271def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000272 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000273
274
275def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000276 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000277
278
279def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000280 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000281
282
283def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000284 return rpc_utils.prepare_for_serialization(
285 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000286
287
288# acl groups
289
290def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000291 group = models.AclGroup.add_object(name=name, description=description)
292 group.users.add(thread_local.get_user())
293 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000294
295
296def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000297 group = models.AclGroup.smart_get(id)
298 group.check_for_acl_violation_acl_group()
299 group.update_object(data)
300 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000301
302
303def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000304 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000305 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000306 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000307 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000308
309
310def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000311 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000312 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000313 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000314 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000315 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000316
317
318def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000319 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000320 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000321 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000322 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000323 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000324
325
326def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000327 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000328 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000329 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000330 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000331 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000332
333
334def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000335 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000336
337
338def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000339 acl_groups = models.AclGroup.list_objects(filter_data)
340 for acl_group in acl_groups:
341 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
342 acl_group['users'] = [user.login
343 for user in acl_group_obj.users.all()]
344 acl_group['hosts'] = [host.hostname
345 for host in acl_group_obj.hosts.all()]
346 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000347
348
349# jobs
350
mbligh120351e2009-01-24 01:40:45 +0000351def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000352 client_control_file='', use_container=False,
353 profile_only=None):
jadmanski0afbb632008-06-06 21:10:57 +0000354 """
mbligh120351e2009-01-24 01:40:45 +0000355 Generates a client-side control file to load a kernel and run tests.
356
357 @param tests List of tests to run.
mbligha3c58d22009-08-24 22:01:51 +0000358 @param kernel A list of kernel info dictionaries configuring which kernels
359 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000360 @param label Name of label to grab kernel config from.
361 @param profilers List of profilers to activate during the job.
362 @param client_control_file The contents of a client-side control file to
363 run at the end of all tests. If this is supplied, all tests must be
364 client side.
365 TODO: in the future we should support server control files directly
366 to wrap with a kernel. That'll require changing the parameter
367 name and adding a boolean to indicate if it is a client or server
368 control file.
369 @param use_container unused argument today. TODO: Enable containers
370 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000371 @param profile_only A boolean that indicates what default profile_only
372 mode to use in the control file. Passing None will generate a
373 control file that does not explcitly set the default mode at all.
mbligh120351e2009-01-24 01:40:45 +0000374
375 @returns a dict with the following keys:
376 control_file: str, The control file text.
377 is_server: bool, is the control file a server-side control file?
378 synch_count: How many machines the job uses per autoserv execution.
379 synch_count == 1 means the job is asynchronous.
380 dependencies: A list of the names of labels on which the job depends.
381 """
showardd86debe2009-06-10 17:37:56 +0000382 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000383 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000384 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000385
showard989f25d2008-10-01 11:38:11 +0000386 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000387 rpc_utils.prepare_generate_control_file(tests, kernel, label,
388 profilers))
showard989f25d2008-10-01 11:38:11 +0000389 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000390 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000391 profilers=profiler_objects, is_server=cf_info['is_server'],
showard91f85102009-10-12 20:34:52 +0000392 client_control_file=client_control_file, profile_only=profile_only)
showard989f25d2008-10-01 11:38:11 +0000393 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000394
395
showard12f3e322009-05-13 21:27:42 +0000396def create_job(name, priority, control_file, control_type,
397 hosts=(), meta_hosts=(), one_time_hosts=(),
398 atomic_group_name=None, synch_count=None, is_template=False,
399 timeout=None, max_runtime_hrs=None, run_verify=True,
400 email_list='', dependencies=(), reboot_before=None,
401 reboot_after=None, parse_failed_repair=None):
jadmanski0afbb632008-06-06 21:10:57 +0000402 """\
403 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000404
showarda1e74b32009-05-12 17:32:04 +0000405 @param name name of this job
406 @param priority Low, Medium, High, Urgent
407 @param control_file String contents of the control file.
408 @param control_type Type of control file, Client or Server.
409 @param synch_count How many machines the job uses per autoserv execution.
410 synch_count == 1 means the job is asynchronous. If an atomic group is
411 given this value is treated as a minimum.
412 @param is_template If true then create a template job.
413 @param timeout Hours after this call returns until the job times out.
showard12f3e322009-05-13 21:27:42 +0000414 @param max_runtime_hrs Hours from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000415 @param run_verify Should the host be verified before running the test?
416 @param email_list String containing emails to mail when the job is done
417 @param dependencies List of label names on which this job depends
418 @param reboot_before Never, If dirty, or Always
419 @param reboot_after Never, If all tests passed, or Always
420 @param parse_failed_repair if true, results of failed repairs launched by
421 this job will be parsed as part of the job.
422
423 @param hosts List of hosts to run job on.
424 @param meta_hosts List where each entry is a label name, and for each entry
425 one host will be chosen from that label to run the job on.
426 @param one_time_hosts List of hosts not in the database to run the job on.
427 @param atomic_group_name The name of an atomic group to schedule the job on.
428
showardc92da832009-04-07 18:14:34 +0000429
430 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000431 """
showard87658162009-05-29 18:39:50 +0000432 user = thread_local.get_user()
433 owner = user.login
jadmanski0afbb632008-06-06 21:10:57 +0000434 # input validation
showardc92da832009-04-07 18:14:34 +0000435 if not (hosts or meta_hosts or one_time_hosts or atomic_group_name):
mblighec5546d2008-06-16 16:51:28 +0000436 raise model_logic.ValidationError({
showardb8471e32008-07-03 19:51:08 +0000437 'arguments' : "You must pass at least one of 'hosts', "
showardc92da832009-04-07 18:14:34 +0000438 "'meta_hosts', 'one_time_hosts', "
439 "or 'atomic_group_name'"
jadmanski0afbb632008-06-06 21:10:57 +0000440 })
mblighe8819cd2008-02-15 16:48:40 +0000441
showardbc93f0f2009-06-10 00:16:21 +0000442 labels_by_name = dict((label.name, label)
443 for label in models.Label.objects.all())
444 atomic_groups_by_name = dict((ag.name, ag)
445 for ag in models.AtomicGroup.objects.all())
446
showardc8730322009-06-30 01:56:38 +0000447 # Schedule on an atomic group automagically if one of the labels given
448 # is an atomic group label and no explicit atomic_group_name was supplied.
449 if not atomic_group_name:
450 for label_name in meta_hosts or []:
451 label = labels_by_name.get(label_name)
452 if label and label.atomic_group:
453 atomic_group_name = label.atomic_group.name
454 break
455
showardbc93f0f2009-06-10 00:16:21 +0000456 # convert hostnames & meta hosts to host/label objects
457 host_objects = models.Host.smart_get_bulk(hosts)
458 metahost_objects = []
showardc8730322009-06-30 01:56:38 +0000459 for label_name in meta_hosts or []:
460 if label_name in labels_by_name:
461 label = labels_by_name[label_name]
462 metahost_objects.append(label)
463 elif label_name in atomic_groups_by_name:
464 # If given a metahost name that isn't a Label, check to
465 # see if the user was specifying an Atomic Group instead.
466 atomic_group = atomic_groups_by_name[label_name]
showardbc93f0f2009-06-10 00:16:21 +0000467 if atomic_group_name and atomic_group_name != atomic_group.name:
468 raise model_logic.ValidationError({
469 'meta_hosts': (
470 'Label "%s" not found. If assumed to be an '
471 'atomic group it would conflict with the '
472 'supplied atomic group "%s".' % (
showardc8730322009-06-30 01:56:38 +0000473 label_name, atomic_group_name))})
showardbc93f0f2009-06-10 00:16:21 +0000474 atomic_group_name = atomic_group.name
475 else:
476 raise model_logic.ValidationError(
477 {'meta_hosts' : 'Label "%s" not found' % label})
478
showardc92da832009-04-07 18:14:34 +0000479 # Create and sanity check an AtomicGroup object if requested.
480 if atomic_group_name:
481 if one_time_hosts:
482 raise model_logic.ValidationError(
483 {'one_time_hosts':
484 'One time hosts cannot be used with an Atomic Group.'})
485 atomic_group = models.AtomicGroup.smart_get(atomic_group_name)
486 if synch_count and synch_count > atomic_group.max_number_of_machines:
487 raise model_logic.ValidationError(
488 {'atomic_group_name' :
489 'You have requested a synch_count (%d) greater than the '
490 'maximum machines in the requested Atomic Group (%d).' %
491 (synch_count, atomic_group.max_number_of_machines)})
492 else:
493 atomic_group = None
494
showardb8471e32008-07-03 19:51:08 +0000495 for host in one_time_hosts or []:
496 this_host = models.Host.create_one_time_host(host)
497 host_objects.append(this_host)
showardba872902008-06-28 00:51:08 +0000498
showard87658162009-05-29 18:39:50 +0000499 if reboot_before is None:
500 reboot_before = user.get_reboot_before_display()
501 if reboot_after is None:
502 reboot_after = user.get_reboot_after_display()
503
showarda1e74b32009-05-12 17:32:04 +0000504 options = dict(name=name,
505 priority=priority,
506 control_file=control_file,
507 control_type=control_type,
508 is_template=is_template,
509 timeout=timeout,
showard12f3e322009-05-13 21:27:42 +0000510 max_runtime_hrs=max_runtime_hrs,
showarda1e74b32009-05-12 17:32:04 +0000511 synch_count=synch_count,
512 run_verify=run_verify,
513 email_list=email_list,
514 dependencies=dependencies,
515 reboot_before=reboot_before,
516 reboot_after=reboot_after,
517 parse_failed_repair=parse_failed_repair)
showard29f7cd22009-04-29 21:16:24 +0000518 return rpc_utils.create_new_job(owner=owner,
showarda1e74b32009-05-12 17:32:04 +0000519 options=options,
showard29f7cd22009-04-29 21:16:24 +0000520 host_objects=host_objects,
521 metahost_objects=metahost_objects,
showard29f7cd22009-04-29 21:16:24 +0000522 atomic_group=atomic_group)
mblighe8819cd2008-02-15 16:48:40 +0000523
524
showard9dbdcda2008-10-14 17:34:36 +0000525def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000526 """\
showard9dbdcda2008-10-14 17:34:36 +0000527 Abort a set of host queue entries.
jadmanski0afbb632008-06-06 21:10:57 +0000528 """
showard9dbdcda2008-10-14 17:34:36 +0000529 query = models.HostQueueEntry.query_objects(filter_data)
showard0c185192009-01-16 03:07:57 +0000530 query = query.filter(complete=False)
showarddc817512008-11-12 18:16:41 +0000531 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000532 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000533 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000534
showard9dbdcda2008-10-14 17:34:36 +0000535 user = thread_local.get_user()
536 for queue_entry in host_queue_entries:
537 queue_entry.abort(user)
showard9d821ab2008-07-11 16:54:29 +0000538
539
showard1ff7b2e2009-05-15 23:17:18 +0000540def reverify_hosts(**filter_data):
541 """\
542 Schedules a set of hosts for verify.
543 """
544 hosts = models.Host.query_objects(filter_data)
545 models.AclGroup.check_for_acl_violation_hosts(hosts)
showard6d7b2ff2009-06-10 00:16:47 +0000546 models.SpecialTask.schedule_special_task(hosts,
showard2fe3f1d2009-07-06 20:19:11 +0000547 models.SpecialTask.Task.VERIFY)
showard1ff7b2e2009-05-15 23:17:18 +0000548
549
mblighe8819cd2008-02-15 16:48:40 +0000550def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000551 """\
552 Extra filter args for get_jobs:
553 -not_yet_run: Include only jobs that have not yet started running.
554 -running: Include only jobs that have start running but for which not
555 all hosts have completed.
556 -finished: Include only jobs for which all hosts have completed (or
557 aborted).
558 At most one of these three fields should be specified.
559 """
560 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
561 running,
562 finished)
showard0957a842009-05-11 19:25:08 +0000563 job_dicts = []
564 jobs = list(models.Job.query_objects(filter_data))
565 models.Job.objects.populate_relationships(jobs, models.Label,
566 'dependencies')
567 for job in jobs:
568 job_dict = job.get_object_dict()
569 job_dict['dependencies'] = ','.join(label.name
570 for label in job.dependencies)
571 job_dicts.append(job_dict)
572 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000573
574
575def get_num_jobs(not_yet_run=False, running=False, finished=False,
jadmanski0afbb632008-06-06 21:10:57 +0000576 **filter_data):
577 """\
578 See get_jobs() for documentation of extra filter parameters.
579 """
580 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
581 running,
582 finished)
583 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +0000584
585
mblighe8819cd2008-02-15 16:48:40 +0000586def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000587 """\
showarda8709c52008-07-03 19:44:54 +0000588 Like get_jobs(), but adds a 'status_counts' field, which is a dictionary
jadmanski0afbb632008-06-06 21:10:57 +0000589 mapping status strings to the number of hosts currently with that
590 status, i.e. {'Queued' : 4, 'Running' : 2}.
591 """
592 jobs = get_jobs(**filter_data)
593 ids = [job['id'] for job in jobs]
594 all_status_counts = models.Job.objects.get_status_counts(ids)
595 for job in jobs:
596 job['status_counts'] = all_status_counts[job['id']]
597 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +0000598
599
showarda965cef2009-05-15 23:17:41 +0000600def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +0000601 """\
602 Retrieves all the information needed to clone a job.
603 """
showarda8709c52008-07-03 19:44:54 +0000604 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +0000605 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +0000606 preserve_metahosts,
607 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +0000608
showardd9992fe2008-07-31 02:15:03 +0000609 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +0000610 for host in job_info['hosts']:
611 host_dict = get_hosts(id=host.id)[0]
612 other_labels = host_dict['labels']
613 if host_dict['platform']:
614 other_labels.remove(host_dict['platform'])
615 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +0000616 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000617
showard29f7cd22009-04-29 21:16:24 +0000618 for host in job_info['one_time_hosts']:
619 host_dict = dict(hostname=host.hostname,
620 id=host.id,
621 platform='(one-time host)',
622 locked_text='')
623 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000624
showard4d077562009-05-08 18:24:36 +0000625 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +0000626 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +0000627 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +0000628
629 info = dict(job=job.get_object_dict(),
630 meta_host_counts=meta_host_counts,
631 hosts=host_dicts)
632 info['job']['dependencies'] = job_info['dependencies']
633 if job_info['atomic_group']:
634 info['atomic_group_name'] = (job_info['atomic_group']).name
635 else:
636 info['atomic_group_name'] = None
showarda8709c52008-07-03 19:44:54 +0000637
638 return rpc_utils.prepare_for_serialization(info)
639
640
showard34dc5fa2008-04-24 20:58:40 +0000641# host queue entries
642
643def get_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000644 """\
showardc92da832009-04-07 18:14:34 +0000645 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +0000646 """
showardc92da832009-04-07 18:14:34 +0000647 return rpc_utils.prepare_rows_as_nested_dicts(
648 models.HostQueueEntry.query_objects(filter_data),
649 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +0000650
651
652def get_num_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000653 """\
654 Get the number of host queue entries associated with this job.
655 """
656 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +0000657
658
showard1e935f12008-07-11 00:11:36 +0000659def get_hqe_percentage_complete(**filter_data):
660 """
showardc92da832009-04-07 18:14:34 +0000661 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +0000662 that are complete.
663 """
664 query = models.HostQueueEntry.query_objects(filter_data)
665 complete_count = query.filter(complete=True).count()
666 total_count = query.count()
667 if total_count == 0:
668 return 1
669 return float(complete_count) / total_count
670
671
showard1a5a4082009-07-28 20:01:37 +0000672# special tasks
673
674def get_special_tasks(**filter_data):
675 return rpc_utils.prepare_rows_as_nested_dicts(
676 models.SpecialTask.query_objects(filter_data),
677 ('host', 'queue_entry'))
678
679
showardc0ac3a72009-07-08 21:14:45 +0000680# support for host detail view
681
682def get_host_queue_entries_and_special_tasks(hostname, query_start=None,
683 query_limit=None):
684 """
685 @returns an interleaved list of HostQueueEntries and SpecialTasks,
686 in approximate run order. each dict contains keys for type, host,
687 job, status, started_on, execution_path, and ID.
688 """
689 total_limit = None
690 if query_limit is not None:
691 total_limit = query_start + query_limit
692 filter_data = {'host__hostname': hostname,
693 'query_limit': total_limit,
694 'sort_by': ['-id']}
695
696 queue_entries = list(models.HostQueueEntry.query_objects(filter_data))
697 special_tasks = list(models.SpecialTask.query_objects(filter_data))
698
699 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
700 special_tasks)
701 if query_start is not None:
702 interleaved_entries = interleaved_entries[query_start:]
703 if query_limit is not None:
704 interleaved_entries = interleaved_entries[:query_limit]
705 return rpc_utils.prepare_for_serialization(interleaved_entries)
706
707
708def get_num_host_queue_entries_and_special_tasks(hostname):
709 filter_data = {'host__hostname': hostname}
710 return (models.HostQueueEntry.query_count(filter_data)
711 + models.SpecialTask.query_count(filter_data))
712
713
showard29f7cd22009-04-29 21:16:24 +0000714# recurring run
715
716def get_recurring(**filter_data):
717 return rpc_utils.prepare_rows_as_nested_dicts(
718 models.RecurringRun.query_objects(filter_data),
719 ('job', 'owner'))
720
721
722def get_num_recurring(**filter_data):
723 return models.RecurringRun.query_count(filter_data)
724
725
726def delete_recurring_runs(**filter_data):
727 to_delete = models.RecurringRun.query_objects(filter_data)
728 to_delete.delete()
729
730
731def create_recurring_run(job_id, start_date, loop_period, loop_count):
732 owner = thread_local.get_user().login
733 job = models.Job.objects.get(id=job_id)
734 return job.create_recurring_job(start_date=start_date,
735 loop_period=loop_period,
736 loop_count=loop_count,
737 owner=owner)
738
739
mblighe8819cd2008-02-15 16:48:40 +0000740# other
741
showarde0b63622008-08-04 20:58:47 +0000742def echo(data=""):
743 """\
744 Returns a passed in string. For doing a basic test to see if RPC calls
745 can successfully be made.
746 """
747 return data
748
749
showardb7a52fd2009-04-27 20:10:56 +0000750def get_motd():
751 """\
752 Returns the message of the day as a string.
753 """
754 return rpc_utils.get_motd()
755
756
mblighe8819cd2008-02-15 16:48:40 +0000757def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +0000758 """\
759 Returns a dictionary containing a bunch of data that shouldn't change
760 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +0000761
762 priorities: List of job priority choices.
763 default_priority: Default priority value for new jobs.
764 users: Sorted list of all users.
765 labels: Sorted list of all labels.
766 atomic_groups: Sorted list of all atomic groups.
767 tests: Sorted list of all tests.
768 profilers: Sorted list of all profilers.
769 current_user: Logged-in username.
770 host_statuses: Sorted list of possible Host statuses.
771 job_statuses: Sorted list of possible HostQueueEntry statuses.
772 job_timeout_default: The default job timeout length in hours.
showarda1e74b32009-05-12 17:32:04 +0000773 parse_failed_repair_default: Default value for the parse_failed_repair job
774 option.
showardc92da832009-04-07 18:14:34 +0000775 reboot_before_options: A list of valid RebootBefore string enums.
776 reboot_after_options: A list of valid RebootAfter string enums.
777 motd: Server's message of the day.
778 status_dictionary: A mapping from one word job status names to a more
779 informative description.
jadmanski0afbb632008-06-06 21:10:57 +0000780 """
showard21baa452008-10-21 00:08:39 +0000781
782 job_fields = models.Job.get_field_dict()
783
jadmanski0afbb632008-06-06 21:10:57 +0000784 result = {}
785 result['priorities'] = models.Job.Priority.choices()
showard21baa452008-10-21 00:08:39 +0000786 default_priority = job_fields['priority'].default
jadmanski0afbb632008-06-06 21:10:57 +0000787 default_string = models.Job.Priority.get_string(default_priority)
788 result['default_priority'] = default_string
789 result['users'] = get_users(sort_by=['login'])
790 result['labels'] = get_labels(sort_by=['-platform', 'name'])
showardc92da832009-04-07 18:14:34 +0000791 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +0000792 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +0000793 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +0000794 result['current_user'] = rpc_utils.prepare_for_serialization(
795 thread_local.get_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +0000796 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +0000797 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
showardb1e51872008-10-07 11:08:18 +0000798 result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT
showard12f3e322009-05-13 21:27:42 +0000799 result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS
showarda1e74b32009-05-12 17:32:04 +0000800 result['parse_failed_repair_default'] = bool(
801 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
showard0fc38302008-10-23 00:44:07 +0000802 result['reboot_before_options'] = models.RebootBefore.names
803 result['reboot_after_options'] = models.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +0000804 result['motd'] = rpc_utils.get_motd()
showard8ac29b42008-07-17 17:01:55 +0000805
showardd3dc1992009-04-22 21:01:40 +0000806 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +0000807 "Verifying": "Verifying Host",
808 "Pending": "Waiting on other hosts",
809 "Running": "Running autoserv",
810 "Completed": "Autoserv completed",
811 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +0000812 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +0000813 "Starting": "Next in host's queue",
814 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +0000815 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +0000816 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +0000817 "Template": "Template job for recurring run",
818 "Waiting": "Waiting for scheduler action"}
jadmanski0afbb632008-06-06 21:10:57 +0000819 return result
showard29f7cd22009-04-29 21:16:24 +0000820
821
822def get_server_time():
823 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")