blob: dd3660234b17cb0e3c014b10a718b7d60527e5e7 [file] [log] [blame]
mblighe8819cd2008-02-15 16:48:40 +00001"""\
2Functions to expose over the RPC interface.
3
4For all modify* and delete* functions that ask for an 'id' parameter to
5identify the object to operate on, the id may be either
6 * the database row ID
7 * the name of the object (label name, hostname, user login, etc.)
8 * a dictionary containing uniquely identifying field (this option should seldom
9 be used)
10
11When specifying foreign key fields (i.e. adding hosts to a label, or adding
12users to an ACL group), the given value may be either the database row ID or the
13name of the object.
14
15All get* functions return lists of dictionaries. Each dictionary represents one
16object and maps field names to values.
17
18Some examples:
19modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
20modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
21modify_test('sleeptest', test_type='Client', params=', seconds=60')
22delete_acl_group(1) # delete by ID
23delete_acl_group('Everyone') # delete by name
24acl_group_add_users('Everyone', ['mbligh', 'showard'])
25get_jobs(owner='showard', status='Queued')
26
mbligh93c80e62009-02-03 17:48:30 +000027See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000028"""
29
30__author__ = 'showard@google.com (Steve Howard)'
31
showard29f7cd22009-04-29 21:16:24 +000032import datetime
showardcafd16e2009-05-29 18:37:49 +000033import common
showard6d7b2ff2009-06-10 00:16:47 +000034from autotest_lib.frontend.afe import models, model_logic
35from autotest_lib.frontend.afe import control_file, rpc_utils
showard3bb499f2008-07-03 19:42:20 +000036from autotest_lib.client.common_lib import global_config
37
mblighe8819cd2008-02-15 16:48:40 +000038
39# labels
40
showard989f25d2008-10-01 11:38:11 +000041def add_label(name, kernel_config=None, platform=None, only_if_needed=None):
showardc92da832009-04-07 18:14:34 +000042 return models.Label.add_object(
43 name=name, kernel_config=kernel_config, platform=platform,
44 only_if_needed=only_if_needed).id
mblighe8819cd2008-02-15 16:48:40 +000045
46
47def modify_label(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +000048 models.Label.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000049
50
51def delete_label(id):
jadmanski0afbb632008-06-06 21:10:57 +000052 models.Label.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +000053
54
showardbbabf502008-06-06 00:02:02 +000055def label_add_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000056 host_objs = models.Host.smart_get_bulk(hosts)
showardcafd16e2009-05-29 18:37:49 +000057 label = models.Label.smart_get(id)
58 if label.platform:
59 models.Host.check_no_platform(host_objs)
60 label.host_set.add(*host_objs)
showardbbabf502008-06-06 00:02:02 +000061
62
63def label_remove_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000064 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +000065 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +000066
67
mblighe8819cd2008-02-15 16:48:40 +000068def get_labels(**filter_data):
showardc92da832009-04-07 18:14:34 +000069 """\
70 @returns A sequence of nested dictionaries of label information.
71 """
72 return rpc_utils.prepare_rows_as_nested_dicts(
73 models.Label.query_objects(filter_data),
74 ('atomic_group',))
75
76
77# atomic groups
78
showarde9450c92009-06-30 01:58:52 +000079def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +000080 return models.AtomicGroup.add_object(
81 name=name, max_number_of_machines=max_number_of_machines,
82 description=description).id
83
84
85def modify_atomic_group(id, **data):
86 models.AtomicGroup.smart_get(id).update_object(data)
87
88
89def delete_atomic_group(id):
90 models.AtomicGroup.smart_get(id).delete()
91
92
93def atomic_group_add_labels(id, labels):
94 label_objs = models.Label.smart_get_bulk(labels)
95 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
96
97
98def atomic_group_remove_labels(id, labels):
99 label_objs = models.Label.smart_get_bulk(labels)
100 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
101
102
103def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000104 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000105 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000106
107
108# hosts
109
showarddf062562008-07-03 19:56:37 +0000110def add_host(hostname, status=None, locked=None, protection=None):
jadmanski0afbb632008-06-06 21:10:57 +0000111 return models.Host.add_object(hostname=hostname, status=status,
showarddf062562008-07-03 19:56:37 +0000112 locked=locked, protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000113
114
115def modify_host(id, **data):
showardbe0d8692009-08-20 23:42:44 +0000116 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000117 host = models.Host.smart_get(id)
118 rpc_utils.check_modify_host_locking(host, data)
119 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000120
121
showard276f9442009-05-20 00:33:16 +0000122def modify_hosts(host_filter_data, update_data):
123 """
showardbe0d8692009-08-20 23:42:44 +0000124 @param host_filter_data: Filters out which hosts to modify.
125 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000126 """
showardbe0d8692009-08-20 23:42:44 +0000127 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000128 hosts = models.Host.query_objects(host_filter_data)
129 for host in hosts:
130 host.update_object(update_data)
131
132
mblighe8819cd2008-02-15 16:48:40 +0000133def host_add_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000134 labels = models.Label.smart_get_bulk(labels)
showardcafd16e2009-05-29 18:37:49 +0000135 host = models.Host.smart_get(id)
136
137 platforms = [label.name for label in labels if label.platform]
138 if len(platforms) > 1:
139 raise model_logic.ValidationError(
140 {'labels': 'Adding more than one platform label: %s' %
141 ', '.join(platforms)})
142 if len(platforms) == 1:
143 models.Host.check_no_platform([host])
144 host.labels.add(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000145
146
147def host_remove_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000148 labels = models.Label.smart_get_bulk(labels)
jadmanski0afbb632008-06-06 21:10:57 +0000149 models.Host.smart_get(id).labels.remove(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000150
151
showard0957a842009-05-11 19:25:08 +0000152def set_host_attribute(attribute, value, **host_filter_data):
153 """
154 @param attribute string name of attribute
155 @param value string, or None to delete an attribute
156 @param host_filter_data filter data to apply to Hosts to choose hosts to act
157 upon
158 """
159 assert host_filter_data # disallow accidental actions on all hosts
160 hosts = models.Host.query_objects(host_filter_data)
161 models.AclGroup.check_for_acl_violation_hosts(hosts)
162
163 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000164 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000165
166
mblighe8819cd2008-02-15 16:48:40 +0000167def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000168 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000169
170
showard87cc38f2009-08-20 23:37:04 +0000171def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000172 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000173 """
174 @param multiple_labels: match hosts in all of the labels given. Should
175 be a list of label names.
176 @param exclude_only_if_needed_labels: Exclude hosts with at least one
177 "only_if_needed" label applied.
178 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
179 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000180 """
showard43a3d262008-11-12 18:17:05 +0000181 hosts = rpc_utils.get_host_query(multiple_labels,
182 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000183 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000184 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000185 hosts = list(hosts)
186 models.Host.objects.populate_relationships(hosts, models.Label,
187 'label_list')
188 models.Host.objects.populate_relationships(hosts, models.AclGroup,
189 'acl_list')
190 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
191 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000192 host_dicts = []
193 for host_obj in hosts:
194 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000195 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000196 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
197 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000198 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
199 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
200 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000201 host_dicts.append(host_dict)
202 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000203
204
showard87cc38f2009-08-20 23:37:04 +0000205def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000206 exclude_atomic_group_hosts=False, valid_only=True,
207 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000208 """
209 Same parameters as get_hosts().
210
211 @returns The number of matching hosts.
212 """
showard43a3d262008-11-12 18:17:05 +0000213 hosts = rpc_utils.get_host_query(multiple_labels,
214 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000215 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000216 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000217 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000218
mblighe8819cd2008-02-15 16:48:40 +0000219
220# tests
221
showard909c7a62008-07-15 21:52:38 +0000222def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000223 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000224 test_time=None, test_category=None, description=None,
225 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000226 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000227 author=author, dependencies=dependencies,
228 experimental=experimental,
229 run_verify=run_verify, test_time=test_time,
230 test_category=test_category,
231 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000232 test_class=test_class,
233 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000234
235
236def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000237 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000238
239
240def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000241 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000242
243
244def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000245 return rpc_utils.prepare_for_serialization(
246 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000247
248
showard2b9a88b2008-06-13 20:55:03 +0000249# profilers
250
251def add_profiler(name, description=None):
252 return models.Profiler.add_object(name=name, description=description).id
253
254
255def modify_profiler(id, **data):
256 models.Profiler.smart_get(id).update_object(data)
257
258
259def delete_profiler(id):
260 models.Profiler.smart_get(id).delete()
261
262
263def get_profilers(**filter_data):
264 return rpc_utils.prepare_for_serialization(
265 models.Profiler.list_objects(filter_data))
266
267
mblighe8819cd2008-02-15 16:48:40 +0000268# users
269
270def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000271 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000272
273
274def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000275 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000276
277
278def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000279 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000280
281
282def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000283 return rpc_utils.prepare_for_serialization(
284 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000285
286
287# acl groups
288
289def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000290 group = models.AclGroup.add_object(name=name, description=description)
showard64a95952010-01-13 21:27:16 +0000291 group.users.add(models.User.current_user())
showard04f2cd82008-07-25 20:53:31 +0000292 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000293
294
295def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000296 group = models.AclGroup.smart_get(id)
297 group.check_for_acl_violation_acl_group()
298 group.update_object(data)
299 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000300
301
302def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000303 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000304 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000305 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000306 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000307
308
309def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000310 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000311 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000312 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000313 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000314 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000315
316
317def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000318 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000319 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000320 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000321 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000322 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000323
324
325def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000326 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000327 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000328 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000329 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000330 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000331
332
333def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000334 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000335
336
337def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000338 acl_groups = models.AclGroup.list_objects(filter_data)
339 for acl_group in acl_groups:
340 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
341 acl_group['users'] = [user.login
342 for user in acl_group_obj.users.all()]
343 acl_group['hosts'] = [host.hostname
344 for host in acl_group_obj.hosts.all()]
345 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000346
347
348# jobs
349
mbligh120351e2009-01-24 01:40:45 +0000350def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000351 client_control_file='', use_container=False,
showard232b7ae2009-11-10 00:46:48 +0000352 profile_only=None, upload_kernel_config=False):
jadmanski0afbb632008-06-06 21:10:57 +0000353 """
mbligh120351e2009-01-24 01:40:45 +0000354 Generates a client-side control file to load a kernel and run tests.
355
356 @param tests List of tests to run.
mbligha3c58d22009-08-24 22:01:51 +0000357 @param kernel A list of kernel info dictionaries configuring which kernels
358 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000359 @param label Name of label to grab kernel config from.
360 @param profilers List of profilers to activate during the job.
361 @param client_control_file The contents of a client-side control file to
362 run at the end of all tests. If this is supplied, all tests must be
363 client side.
364 TODO: in the future we should support server control files directly
365 to wrap with a kernel. That'll require changing the parameter
366 name and adding a boolean to indicate if it is a client or server
367 control file.
368 @param use_container unused argument today. TODO: Enable containers
369 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000370 @param profile_only A boolean that indicates what default profile_only
371 mode to use in the control file. Passing None will generate a
372 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000373 @param upload_kernel_config: if enabled it will generate server control
374 file code that uploads the kernel config file to the client and
375 tells the client of the new (local) path when compiling the kernel;
376 the tests must be server side tests
mbligh120351e2009-01-24 01:40:45 +0000377
378 @returns a dict with the following keys:
379 control_file: str, The control file text.
380 is_server: bool, is the control file a server-side control file?
381 synch_count: How many machines the job uses per autoserv execution.
382 synch_count == 1 means the job is asynchronous.
383 dependencies: A list of the names of labels on which the job depends.
384 """
showardd86debe2009-06-10 17:37:56 +0000385 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000386 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000387 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000388
showard989f25d2008-10-01 11:38:11 +0000389 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000390 rpc_utils.prepare_generate_control_file(tests, kernel, label,
391 profilers))
showard989f25d2008-10-01 11:38:11 +0000392 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000393 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000394 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000395 client_control_file=client_control_file, profile_only=profile_only,
396 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000397 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000398
399
showard12f3e322009-05-13 21:27:42 +0000400def create_job(name, priority, control_file, control_type,
401 hosts=(), meta_hosts=(), one_time_hosts=(),
402 atomic_group_name=None, synch_count=None, is_template=False,
403 timeout=None, max_runtime_hrs=None, run_verify=True,
404 email_list='', dependencies=(), reboot_before=None,
showarda9545c02009-12-18 22:44:26 +0000405 reboot_after=None, parse_failed_repair=None, hostless=False):
jadmanski0afbb632008-06-06 21:10:57 +0000406 """\
407 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000408
showarda1e74b32009-05-12 17:32:04 +0000409 @param name name of this job
410 @param priority Low, Medium, High, Urgent
411 @param control_file String contents of the control file.
412 @param control_type Type of control file, Client or Server.
413 @param synch_count How many machines the job uses per autoserv execution.
414 synch_count == 1 means the job is asynchronous. If an atomic group is
415 given this value is treated as a minimum.
416 @param is_template If true then create a template job.
417 @param timeout Hours after this call returns until the job times out.
showard12f3e322009-05-13 21:27:42 +0000418 @param max_runtime_hrs Hours from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000419 @param run_verify Should the host be verified before running the test?
420 @param email_list String containing emails to mail when the job is done
421 @param dependencies List of label names on which this job depends
422 @param reboot_before Never, If dirty, or Always
423 @param reboot_after Never, If all tests passed, or Always
424 @param parse_failed_repair if true, results of failed repairs launched by
425 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000426 @param hostless if true, create a hostless job
showarda1e74b32009-05-12 17:32:04 +0000427
428 @param hosts List of hosts to run job on.
429 @param meta_hosts List where each entry is a label name, and for each entry
430 one host will be chosen from that label to run the job on.
431 @param one_time_hosts List of hosts not in the database to run the job on.
432 @param atomic_group_name The name of an atomic group to schedule the job on.
433
showardc92da832009-04-07 18:14:34 +0000434
435 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000436 """
showard64a95952010-01-13 21:27:16 +0000437 user = models.User.current_user()
showard87658162009-05-29 18:39:50 +0000438 owner = user.login
jadmanski0afbb632008-06-06 21:10:57 +0000439 # input validation
showarda9545c02009-12-18 22:44:26 +0000440 if not (hosts or meta_hosts or one_time_hosts or atomic_group_name
441 or hostless):
mblighec5546d2008-06-16 16:51:28 +0000442 raise model_logic.ValidationError({
showardb8471e32008-07-03 19:51:08 +0000443 'arguments' : "You must pass at least one of 'hosts', "
showardc92da832009-04-07 18:14:34 +0000444 "'meta_hosts', 'one_time_hosts', "
showarda9545c02009-12-18 22:44:26 +0000445 "'atomic_group_name', or 'hostless'"
jadmanski0afbb632008-06-06 21:10:57 +0000446 })
mblighe8819cd2008-02-15 16:48:40 +0000447
showarda9545c02009-12-18 22:44:26 +0000448 if hostless:
449 if hosts or meta_hosts or one_time_hosts or atomic_group_name:
450 raise model_logic.ValidationError({
451 'hostless': 'Hostless jobs cannot include any hosts!'})
452 server_type = models.Job.ControlType.get_string(
453 models.Job.ControlType.SERVER)
454 if control_type != server_type:
455 raise model_logic.ValidationError({
456 'control_type': 'Hostless jobs cannot use client-side '
457 'control files'})
458
showardbc93f0f2009-06-10 00:16:21 +0000459 labels_by_name = dict((label.name, label)
460 for label in models.Label.objects.all())
461 atomic_groups_by_name = dict((ag.name, ag)
462 for ag in models.AtomicGroup.objects.all())
463
showardc8730322009-06-30 01:56:38 +0000464 # Schedule on an atomic group automagically if one of the labels given
465 # is an atomic group label and no explicit atomic_group_name was supplied.
466 if not atomic_group_name:
467 for label_name in meta_hosts or []:
468 label = labels_by_name.get(label_name)
469 if label and label.atomic_group:
470 atomic_group_name = label.atomic_group.name
471 break
472
showardbc93f0f2009-06-10 00:16:21 +0000473 # convert hostnames & meta hosts to host/label objects
474 host_objects = models.Host.smart_get_bulk(hosts)
475 metahost_objects = []
showardc8730322009-06-30 01:56:38 +0000476 for label_name in meta_hosts or []:
477 if label_name in labels_by_name:
478 label = labels_by_name[label_name]
479 metahost_objects.append(label)
480 elif label_name in atomic_groups_by_name:
481 # If given a metahost name that isn't a Label, check to
482 # see if the user was specifying an Atomic Group instead.
483 atomic_group = atomic_groups_by_name[label_name]
showardbc93f0f2009-06-10 00:16:21 +0000484 if atomic_group_name and atomic_group_name != atomic_group.name:
485 raise model_logic.ValidationError({
486 'meta_hosts': (
487 'Label "%s" not found. If assumed to be an '
488 'atomic group it would conflict with the '
489 'supplied atomic group "%s".' % (
showardc8730322009-06-30 01:56:38 +0000490 label_name, atomic_group_name))})
showardbc93f0f2009-06-10 00:16:21 +0000491 atomic_group_name = atomic_group.name
492 else:
493 raise model_logic.ValidationError(
494 {'meta_hosts' : 'Label "%s" not found' % label})
495
showardc92da832009-04-07 18:14:34 +0000496 # Create and sanity check an AtomicGroup object if requested.
497 if atomic_group_name:
498 if one_time_hosts:
499 raise model_logic.ValidationError(
500 {'one_time_hosts':
501 'One time hosts cannot be used with an Atomic Group.'})
502 atomic_group = models.AtomicGroup.smart_get(atomic_group_name)
503 if synch_count and synch_count > atomic_group.max_number_of_machines:
504 raise model_logic.ValidationError(
505 {'atomic_group_name' :
506 'You have requested a synch_count (%d) greater than the '
507 'maximum machines in the requested Atomic Group (%d).' %
508 (synch_count, atomic_group.max_number_of_machines)})
509 else:
510 atomic_group = None
511
showardb8471e32008-07-03 19:51:08 +0000512 for host in one_time_hosts or []:
513 this_host = models.Host.create_one_time_host(host)
514 host_objects.append(this_host)
showardba872902008-06-28 00:51:08 +0000515
showard87658162009-05-29 18:39:50 +0000516 if reboot_before is None:
517 reboot_before = user.get_reboot_before_display()
518 if reboot_after is None:
519 reboot_after = user.get_reboot_after_display()
520
showarda1e74b32009-05-12 17:32:04 +0000521 options = dict(name=name,
522 priority=priority,
523 control_file=control_file,
524 control_type=control_type,
525 is_template=is_template,
526 timeout=timeout,
showard12f3e322009-05-13 21:27:42 +0000527 max_runtime_hrs=max_runtime_hrs,
showarda1e74b32009-05-12 17:32:04 +0000528 synch_count=synch_count,
529 run_verify=run_verify,
530 email_list=email_list,
531 dependencies=dependencies,
532 reboot_before=reboot_before,
533 reboot_after=reboot_after,
534 parse_failed_repair=parse_failed_repair)
showard29f7cd22009-04-29 21:16:24 +0000535 return rpc_utils.create_new_job(owner=owner,
showarda1e74b32009-05-12 17:32:04 +0000536 options=options,
showard29f7cd22009-04-29 21:16:24 +0000537 host_objects=host_objects,
538 metahost_objects=metahost_objects,
showard29f7cd22009-04-29 21:16:24 +0000539 atomic_group=atomic_group)
mblighe8819cd2008-02-15 16:48:40 +0000540
541
showard9dbdcda2008-10-14 17:34:36 +0000542def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000543 """\
showard9dbdcda2008-10-14 17:34:36 +0000544 Abort a set of host queue entries.
jadmanski0afbb632008-06-06 21:10:57 +0000545 """
showard9dbdcda2008-10-14 17:34:36 +0000546 query = models.HostQueueEntry.query_objects(filter_data)
showard0c185192009-01-16 03:07:57 +0000547 query = query.filter(complete=False)
showarddc817512008-11-12 18:16:41 +0000548 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000549 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000550 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000551
showard9dbdcda2008-10-14 17:34:36 +0000552 for queue_entry in host_queue_entries:
showard64a95952010-01-13 21:27:16 +0000553 queue_entry.abort()
showard9d821ab2008-07-11 16:54:29 +0000554
555
showard1ff7b2e2009-05-15 23:17:18 +0000556def reverify_hosts(**filter_data):
557 """\
558 Schedules a set of hosts for verify.
mbligh4e545a52009-12-19 05:30:39 +0000559
560 @returns A list of hostnames that a verify task was created for.
showard1ff7b2e2009-05-15 23:17:18 +0000561 """
562 hosts = models.Host.query_objects(filter_data)
563 models.AclGroup.check_for_acl_violation_hosts(hosts)
showardc5103442010-01-15 00:20:26 +0000564 for host in hosts:
565 models.SpecialTask.schedule_special_task(host,
566 models.SpecialTask.Task.VERIFY)
mbligh4e545a52009-12-19 05:30:39 +0000567 return list(sorted(host.hostname for host in hosts))
showard1ff7b2e2009-05-15 23:17:18 +0000568
569
mblighe8819cd2008-02-15 16:48:40 +0000570def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000571 """\
572 Extra filter args for get_jobs:
573 -not_yet_run: Include only jobs that have not yet started running.
574 -running: Include only jobs that have start running but for which not
575 all hosts have completed.
576 -finished: Include only jobs for which all hosts have completed (or
577 aborted).
578 At most one of these three fields should be specified.
579 """
580 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
581 running,
582 finished)
showard0957a842009-05-11 19:25:08 +0000583 job_dicts = []
584 jobs = list(models.Job.query_objects(filter_data))
585 models.Job.objects.populate_relationships(jobs, models.Label,
586 'dependencies')
587 for job in jobs:
588 job_dict = job.get_object_dict()
589 job_dict['dependencies'] = ','.join(label.name
590 for label in job.dependencies)
591 job_dicts.append(job_dict)
592 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000593
594
595def get_num_jobs(not_yet_run=False, running=False, finished=False,
jadmanski0afbb632008-06-06 21:10:57 +0000596 **filter_data):
597 """\
598 See get_jobs() for documentation of extra filter parameters.
599 """
600 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
601 running,
602 finished)
603 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +0000604
605
mblighe8819cd2008-02-15 16:48:40 +0000606def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000607 """\
showarda8709c52008-07-03 19:44:54 +0000608 Like get_jobs(), but adds a 'status_counts' field, which is a dictionary
jadmanski0afbb632008-06-06 21:10:57 +0000609 mapping status strings to the number of hosts currently with that
610 status, i.e. {'Queued' : 4, 'Running' : 2}.
611 """
612 jobs = get_jobs(**filter_data)
613 ids = [job['id'] for job in jobs]
614 all_status_counts = models.Job.objects.get_status_counts(ids)
615 for job in jobs:
616 job['status_counts'] = all_status_counts[job['id']]
617 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +0000618
619
showarda965cef2009-05-15 23:17:41 +0000620def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +0000621 """\
622 Retrieves all the information needed to clone a job.
623 """
showarda8709c52008-07-03 19:44:54 +0000624 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +0000625 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +0000626 preserve_metahosts,
627 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +0000628
showardd9992fe2008-07-31 02:15:03 +0000629 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +0000630 for host in job_info['hosts']:
631 host_dict = get_hosts(id=host.id)[0]
632 other_labels = host_dict['labels']
633 if host_dict['platform']:
634 other_labels.remove(host_dict['platform'])
635 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +0000636 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000637
showard29f7cd22009-04-29 21:16:24 +0000638 for host in job_info['one_time_hosts']:
639 host_dict = dict(hostname=host.hostname,
640 id=host.id,
641 platform='(one-time host)',
642 locked_text='')
643 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000644
showard4d077562009-05-08 18:24:36 +0000645 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +0000646 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +0000647 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +0000648
649 info = dict(job=job.get_object_dict(),
650 meta_host_counts=meta_host_counts,
651 hosts=host_dicts)
652 info['job']['dependencies'] = job_info['dependencies']
653 if job_info['atomic_group']:
654 info['atomic_group_name'] = (job_info['atomic_group']).name
655 else:
656 info['atomic_group_name'] = None
showarda8709c52008-07-03 19:44:54 +0000657
658 return rpc_utils.prepare_for_serialization(info)
659
660
showard34dc5fa2008-04-24 20:58:40 +0000661# host queue entries
662
663def get_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000664 """\
showardc92da832009-04-07 18:14:34 +0000665 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +0000666 """
showardc92da832009-04-07 18:14:34 +0000667 return rpc_utils.prepare_rows_as_nested_dicts(
668 models.HostQueueEntry.query_objects(filter_data),
669 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +0000670
671
672def get_num_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000673 """\
674 Get the number of host queue entries associated with this job.
675 """
676 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +0000677
678
showard1e935f12008-07-11 00:11:36 +0000679def get_hqe_percentage_complete(**filter_data):
680 """
showardc92da832009-04-07 18:14:34 +0000681 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +0000682 that are complete.
683 """
684 query = models.HostQueueEntry.query_objects(filter_data)
685 complete_count = query.filter(complete=True).count()
686 total_count = query.count()
687 if total_count == 0:
688 return 1
689 return float(complete_count) / total_count
690
691
showard1a5a4082009-07-28 20:01:37 +0000692# special tasks
693
694def get_special_tasks(**filter_data):
695 return rpc_utils.prepare_rows_as_nested_dicts(
696 models.SpecialTask.query_objects(filter_data),
697 ('host', 'queue_entry'))
698
699
showardc0ac3a72009-07-08 21:14:45 +0000700# support for host detail view
701
702def get_host_queue_entries_and_special_tasks(hostname, query_start=None,
703 query_limit=None):
704 """
705 @returns an interleaved list of HostQueueEntries and SpecialTasks,
706 in approximate run order. each dict contains keys for type, host,
707 job, status, started_on, execution_path, and ID.
708 """
709 total_limit = None
710 if query_limit is not None:
711 total_limit = query_start + query_limit
712 filter_data = {'host__hostname': hostname,
713 'query_limit': total_limit,
714 'sort_by': ['-id']}
715
716 queue_entries = list(models.HostQueueEntry.query_objects(filter_data))
717 special_tasks = list(models.SpecialTask.query_objects(filter_data))
718
719 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
720 special_tasks)
721 if query_start is not None:
722 interleaved_entries = interleaved_entries[query_start:]
723 if query_limit is not None:
724 interleaved_entries = interleaved_entries[:query_limit]
725 return rpc_utils.prepare_for_serialization(interleaved_entries)
726
727
728def get_num_host_queue_entries_and_special_tasks(hostname):
729 filter_data = {'host__hostname': hostname}
730 return (models.HostQueueEntry.query_count(filter_data)
731 + models.SpecialTask.query_count(filter_data))
732
733
showard29f7cd22009-04-29 21:16:24 +0000734# recurring run
735
736def get_recurring(**filter_data):
737 return rpc_utils.prepare_rows_as_nested_dicts(
738 models.RecurringRun.query_objects(filter_data),
739 ('job', 'owner'))
740
741
742def get_num_recurring(**filter_data):
743 return models.RecurringRun.query_count(filter_data)
744
745
746def delete_recurring_runs(**filter_data):
747 to_delete = models.RecurringRun.query_objects(filter_data)
748 to_delete.delete()
749
750
751def create_recurring_run(job_id, start_date, loop_period, loop_count):
showard64a95952010-01-13 21:27:16 +0000752 owner = models.User.current_user().login
showard29f7cd22009-04-29 21:16:24 +0000753 job = models.Job.objects.get(id=job_id)
754 return job.create_recurring_job(start_date=start_date,
755 loop_period=loop_period,
756 loop_count=loop_count,
757 owner=owner)
758
759
mblighe8819cd2008-02-15 16:48:40 +0000760# other
761
showarde0b63622008-08-04 20:58:47 +0000762def echo(data=""):
763 """\
764 Returns a passed in string. For doing a basic test to see if RPC calls
765 can successfully be made.
766 """
767 return data
768
769
showardb7a52fd2009-04-27 20:10:56 +0000770def get_motd():
771 """\
772 Returns the message of the day as a string.
773 """
774 return rpc_utils.get_motd()
775
776
mblighe8819cd2008-02-15 16:48:40 +0000777def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +0000778 """\
779 Returns a dictionary containing a bunch of data that shouldn't change
780 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +0000781
782 priorities: List of job priority choices.
783 default_priority: Default priority value for new jobs.
784 users: Sorted list of all users.
785 labels: Sorted list of all labels.
786 atomic_groups: Sorted list of all atomic groups.
787 tests: Sorted list of all tests.
788 profilers: Sorted list of all profilers.
789 current_user: Logged-in username.
790 host_statuses: Sorted list of possible Host statuses.
791 job_statuses: Sorted list of possible HostQueueEntry statuses.
792 job_timeout_default: The default job timeout length in hours.
showarda1e74b32009-05-12 17:32:04 +0000793 parse_failed_repair_default: Default value for the parse_failed_repair job
794 option.
showardc92da832009-04-07 18:14:34 +0000795 reboot_before_options: A list of valid RebootBefore string enums.
796 reboot_after_options: A list of valid RebootAfter string enums.
797 motd: Server's message of the day.
798 status_dictionary: A mapping from one word job status names to a more
799 informative description.
jadmanski0afbb632008-06-06 21:10:57 +0000800 """
showard21baa452008-10-21 00:08:39 +0000801
802 job_fields = models.Job.get_field_dict()
803
jadmanski0afbb632008-06-06 21:10:57 +0000804 result = {}
805 result['priorities'] = models.Job.Priority.choices()
showard21baa452008-10-21 00:08:39 +0000806 default_priority = job_fields['priority'].default
jadmanski0afbb632008-06-06 21:10:57 +0000807 default_string = models.Job.Priority.get_string(default_priority)
808 result['default_priority'] = default_string
809 result['users'] = get_users(sort_by=['login'])
810 result['labels'] = get_labels(sort_by=['-platform', 'name'])
showardc92da832009-04-07 18:14:34 +0000811 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +0000812 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +0000813 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +0000814 result['current_user'] = rpc_utils.prepare_for_serialization(
showard64a95952010-01-13 21:27:16 +0000815 models.User.current_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +0000816 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +0000817 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
showardb1e51872008-10-07 11:08:18 +0000818 result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT
showard12f3e322009-05-13 21:27:42 +0000819 result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS
showarda1e74b32009-05-12 17:32:04 +0000820 result['parse_failed_repair_default'] = bool(
821 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
showard0fc38302008-10-23 00:44:07 +0000822 result['reboot_before_options'] = models.RebootBefore.names
823 result['reboot_after_options'] = models.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +0000824 result['motd'] = rpc_utils.get_motd()
showard8ac29b42008-07-17 17:01:55 +0000825
showardd3dc1992009-04-22 21:01:40 +0000826 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +0000827 "Verifying": "Verifying Host",
828 "Pending": "Waiting on other hosts",
829 "Running": "Running autoserv",
830 "Completed": "Autoserv completed",
831 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +0000832 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +0000833 "Starting": "Next in host's queue",
834 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +0000835 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +0000836 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +0000837 "Template": "Template job for recurring run",
mbligh4608b002010-01-05 18:22:35 +0000838 "Waiting": "Waiting for scheduler action",
839 "Archiving": "Archiving results"}
jadmanski0afbb632008-06-06 21:10:57 +0000840 return result
showard29f7cd22009-04-29 21:16:24 +0000841
842
843def get_server_time():
844 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")