blob: fc60a75e0d827323b208aee4fa11e8404cc7f4fb [file] [log] [blame]
mblighe8819cd2008-02-15 16:48:40 +00001"""\
2Functions to expose over the RPC interface.
3
4For all modify* and delete* functions that ask for an 'id' parameter to
5identify the object to operate on, the id may be either
6 * the database row ID
7 * the name of the object (label name, hostname, user login, etc.)
8 * a dictionary containing uniquely identifying field (this option should seldom
9 be used)
10
11When specifying foreign key fields (i.e. adding hosts to a label, or adding
12users to an ACL group), the given value may be either the database row ID or the
13name of the object.
14
15All get* functions return lists of dictionaries. Each dictionary represents one
16object and maps field names to values.
17
18Some examples:
19modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
20modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
21modify_test('sleeptest', test_type='Client', params=', seconds=60')
22delete_acl_group(1) # delete by ID
23delete_acl_group('Everyone') # delete by name
24acl_group_add_users('Everyone', ['mbligh', 'showard'])
25get_jobs(owner='showard', status='Queued')
26
mbligh93c80e62009-02-03 17:48:30 +000027See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000028"""
29
30__author__ = 'showard@google.com (Steve Howard)'
31
showard29f7cd22009-04-29 21:16:24 +000032import datetime
showardcafd16e2009-05-29 18:37:49 +000033import common
jamesrendd855242010-03-02 22:23:44 +000034from autotest_lib.frontend.afe import models, model_logic, model_attributes
showard6d7b2ff2009-06-10 00:16:47 +000035from autotest_lib.frontend.afe import control_file, rpc_utils
showard3bb499f2008-07-03 19:42:20 +000036from autotest_lib.client.common_lib import global_config
37
mblighe8819cd2008-02-15 16:48:40 +000038
39# labels
40
showard989f25d2008-10-01 11:38:11 +000041def add_label(name, kernel_config=None, platform=None, only_if_needed=None):
showardc92da832009-04-07 18:14:34 +000042 return models.Label.add_object(
43 name=name, kernel_config=kernel_config, platform=platform,
44 only_if_needed=only_if_needed).id
mblighe8819cd2008-02-15 16:48:40 +000045
46
47def modify_label(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +000048 models.Label.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000049
50
51def delete_label(id):
jadmanski0afbb632008-06-06 21:10:57 +000052 models.Label.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +000053
54
showardbbabf502008-06-06 00:02:02 +000055def label_add_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000056 host_objs = models.Host.smart_get_bulk(hosts)
showardcafd16e2009-05-29 18:37:49 +000057 label = models.Label.smart_get(id)
58 if label.platform:
59 models.Host.check_no_platform(host_objs)
60 label.host_set.add(*host_objs)
showardbbabf502008-06-06 00:02:02 +000061
62
63def label_remove_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000064 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +000065 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +000066
67
mblighe8819cd2008-02-15 16:48:40 +000068def get_labels(**filter_data):
showardc92da832009-04-07 18:14:34 +000069 """\
70 @returns A sequence of nested dictionaries of label information.
71 """
72 return rpc_utils.prepare_rows_as_nested_dicts(
73 models.Label.query_objects(filter_data),
74 ('atomic_group',))
75
76
77# atomic groups
78
showarde9450c92009-06-30 01:58:52 +000079def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +000080 return models.AtomicGroup.add_object(
81 name=name, max_number_of_machines=max_number_of_machines,
82 description=description).id
83
84
85def modify_atomic_group(id, **data):
86 models.AtomicGroup.smart_get(id).update_object(data)
87
88
89def delete_atomic_group(id):
90 models.AtomicGroup.smart_get(id).delete()
91
92
93def atomic_group_add_labels(id, labels):
94 label_objs = models.Label.smart_get_bulk(labels)
95 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
96
97
98def atomic_group_remove_labels(id, labels):
99 label_objs = models.Label.smart_get_bulk(labels)
100 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
101
102
103def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000104 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000105 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000106
107
108# hosts
109
showarddf062562008-07-03 19:56:37 +0000110def add_host(hostname, status=None, locked=None, protection=None):
jadmanski0afbb632008-06-06 21:10:57 +0000111 return models.Host.add_object(hostname=hostname, status=status,
showarddf062562008-07-03 19:56:37 +0000112 locked=locked, protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000113
114
115def modify_host(id, **data):
showardbe0d8692009-08-20 23:42:44 +0000116 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000117 host = models.Host.smart_get(id)
118 rpc_utils.check_modify_host_locking(host, data)
119 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000120
121
showard276f9442009-05-20 00:33:16 +0000122def modify_hosts(host_filter_data, update_data):
123 """
showardbe0d8692009-08-20 23:42:44 +0000124 @param host_filter_data: Filters out which hosts to modify.
125 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000126 """
showardbe0d8692009-08-20 23:42:44 +0000127 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000128 hosts = models.Host.query_objects(host_filter_data)
129 for host in hosts:
130 host.update_object(update_data)
131
132
mblighe8819cd2008-02-15 16:48:40 +0000133def host_add_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000134 labels = models.Label.smart_get_bulk(labels)
showardcafd16e2009-05-29 18:37:49 +0000135 host = models.Host.smart_get(id)
136
137 platforms = [label.name for label in labels if label.platform]
138 if len(platforms) > 1:
139 raise model_logic.ValidationError(
140 {'labels': 'Adding more than one platform label: %s' %
141 ', '.join(platforms)})
142 if len(platforms) == 1:
143 models.Host.check_no_platform([host])
144 host.labels.add(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000145
146
147def host_remove_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000148 labels = models.Label.smart_get_bulk(labels)
jadmanski0afbb632008-06-06 21:10:57 +0000149 models.Host.smart_get(id).labels.remove(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000150
151
showard0957a842009-05-11 19:25:08 +0000152def set_host_attribute(attribute, value, **host_filter_data):
153 """
154 @param attribute string name of attribute
155 @param value string, or None to delete an attribute
156 @param host_filter_data filter data to apply to Hosts to choose hosts to act
157 upon
158 """
159 assert host_filter_data # disallow accidental actions on all hosts
160 hosts = models.Host.query_objects(host_filter_data)
161 models.AclGroup.check_for_acl_violation_hosts(hosts)
162
163 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000164 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000165
166
mblighe8819cd2008-02-15 16:48:40 +0000167def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000168 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000169
170
showard87cc38f2009-08-20 23:37:04 +0000171def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000172 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000173 """
174 @param multiple_labels: match hosts in all of the labels given. Should
175 be a list of label names.
176 @param exclude_only_if_needed_labels: Exclude hosts with at least one
177 "only_if_needed" label applied.
178 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
179 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000180 """
showard43a3d262008-11-12 18:17:05 +0000181 hosts = rpc_utils.get_host_query(multiple_labels,
182 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000183 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000184 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000185 hosts = list(hosts)
186 models.Host.objects.populate_relationships(hosts, models.Label,
187 'label_list')
188 models.Host.objects.populate_relationships(hosts, models.AclGroup,
189 'acl_list')
190 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
191 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000192 host_dicts = []
193 for host_obj in hosts:
194 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000195 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000196 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
197 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000198 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
199 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
200 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000201 host_dicts.append(host_dict)
202 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000203
204
showard87cc38f2009-08-20 23:37:04 +0000205def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000206 exclude_atomic_group_hosts=False, valid_only=True,
207 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000208 """
209 Same parameters as get_hosts().
210
211 @returns The number of matching hosts.
212 """
showard43a3d262008-11-12 18:17:05 +0000213 hosts = rpc_utils.get_host_query(multiple_labels,
214 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000215 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000216 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000217 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000218
mblighe8819cd2008-02-15 16:48:40 +0000219
220# tests
221
showard909c7a62008-07-15 21:52:38 +0000222def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000223 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000224 test_time=None, test_category=None, description=None,
225 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000226 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000227 author=author, dependencies=dependencies,
228 experimental=experimental,
229 run_verify=run_verify, test_time=test_time,
230 test_category=test_category,
231 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000232 test_class=test_class,
233 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000234
235
236def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000237 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000238
239
240def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000241 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000242
243
244def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000245 return rpc_utils.prepare_for_serialization(
246 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000247
248
showard2b9a88b2008-06-13 20:55:03 +0000249# profilers
250
251def add_profiler(name, description=None):
252 return models.Profiler.add_object(name=name, description=description).id
253
254
255def modify_profiler(id, **data):
256 models.Profiler.smart_get(id).update_object(data)
257
258
259def delete_profiler(id):
260 models.Profiler.smart_get(id).delete()
261
262
263def get_profilers(**filter_data):
264 return rpc_utils.prepare_for_serialization(
265 models.Profiler.list_objects(filter_data))
266
267
mblighe8819cd2008-02-15 16:48:40 +0000268# users
269
270def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000271 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000272
273
274def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000275 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000276
277
278def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000279 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000280
281
282def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000283 return rpc_utils.prepare_for_serialization(
284 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000285
286
287# acl groups
288
289def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000290 group = models.AclGroup.add_object(name=name, description=description)
showard64a95952010-01-13 21:27:16 +0000291 group.users.add(models.User.current_user())
showard04f2cd82008-07-25 20:53:31 +0000292 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000293
294
295def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000296 group = models.AclGroup.smart_get(id)
297 group.check_for_acl_violation_acl_group()
298 group.update_object(data)
299 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000300
301
302def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000303 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000304 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000305 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000306 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000307
308
309def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000310 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000311 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000312 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000313 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000314 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000315
316
317def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000318 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000319 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000320 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000321 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000322 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000323
324
325def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000326 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000327 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000328 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000329 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000330 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000331
332
333def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000334 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000335
336
337def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000338 acl_groups = models.AclGroup.list_objects(filter_data)
339 for acl_group in acl_groups:
340 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
341 acl_group['users'] = [user.login
342 for user in acl_group_obj.users.all()]
343 acl_group['hosts'] = [host.hostname
344 for host in acl_group_obj.hosts.all()]
345 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000346
347
348# jobs
349
mbligh120351e2009-01-24 01:40:45 +0000350def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000351 client_control_file='', use_container=False,
showard232b7ae2009-11-10 00:46:48 +0000352 profile_only=None, upload_kernel_config=False):
jadmanski0afbb632008-06-06 21:10:57 +0000353 """
mbligh120351e2009-01-24 01:40:45 +0000354 Generates a client-side control file to load a kernel and run tests.
355
356 @param tests List of tests to run.
mbligha3c58d22009-08-24 22:01:51 +0000357 @param kernel A list of kernel info dictionaries configuring which kernels
358 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000359 @param label Name of label to grab kernel config from.
360 @param profilers List of profilers to activate during the job.
361 @param client_control_file The contents of a client-side control file to
362 run at the end of all tests. If this is supplied, all tests must be
363 client side.
364 TODO: in the future we should support server control files directly
365 to wrap with a kernel. That'll require changing the parameter
366 name and adding a boolean to indicate if it is a client or server
367 control file.
368 @param use_container unused argument today. TODO: Enable containers
369 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000370 @param profile_only A boolean that indicates what default profile_only
371 mode to use in the control file. Passing None will generate a
372 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000373 @param upload_kernel_config: if enabled it will generate server control
374 file code that uploads the kernel config file to the client and
375 tells the client of the new (local) path when compiling the kernel;
376 the tests must be server side tests
mbligh120351e2009-01-24 01:40:45 +0000377
378 @returns a dict with the following keys:
379 control_file: str, The control file text.
380 is_server: bool, is the control file a server-side control file?
381 synch_count: How many machines the job uses per autoserv execution.
382 synch_count == 1 means the job is asynchronous.
383 dependencies: A list of the names of labels on which the job depends.
384 """
showardd86debe2009-06-10 17:37:56 +0000385 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000386 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000387 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000388
showard989f25d2008-10-01 11:38:11 +0000389 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000390 rpc_utils.prepare_generate_control_file(tests, kernel, label,
391 profilers))
showard989f25d2008-10-01 11:38:11 +0000392 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000393 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000394 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000395 client_control_file=client_control_file, profile_only=profile_only,
396 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000397 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000398
399
showard12f3e322009-05-13 21:27:42 +0000400def create_job(name, priority, control_file, control_type,
401 hosts=(), meta_hosts=(), one_time_hosts=(),
402 atomic_group_name=None, synch_count=None, is_template=False,
403 timeout=None, max_runtime_hrs=None, run_verify=True,
404 email_list='', dependencies=(), reboot_before=None,
showardc1a98d12010-01-15 00:22:22 +0000405 reboot_after=None, parse_failed_repair=None, hostless=False,
jamesren76fcf192010-04-21 20:39:50 +0000406 keyvals=None, drone_set=None):
jadmanski0afbb632008-06-06 21:10:57 +0000407 """\
408 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000409
showarda1e74b32009-05-12 17:32:04 +0000410 @param name name of this job
411 @param priority Low, Medium, High, Urgent
412 @param control_file String contents of the control file.
413 @param control_type Type of control file, Client or Server.
414 @param synch_count How many machines the job uses per autoserv execution.
415 synch_count == 1 means the job is asynchronous. If an atomic group is
416 given this value is treated as a minimum.
417 @param is_template If true then create a template job.
418 @param timeout Hours after this call returns until the job times out.
showard12f3e322009-05-13 21:27:42 +0000419 @param max_runtime_hrs Hours from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000420 @param run_verify Should the host be verified before running the test?
421 @param email_list String containing emails to mail when the job is done
422 @param dependencies List of label names on which this job depends
423 @param reboot_before Never, If dirty, or Always
424 @param reboot_after Never, If all tests passed, or Always
425 @param parse_failed_repair if true, results of failed repairs launched by
426 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000427 @param hostless if true, create a hostless job
showardc1a98d12010-01-15 00:22:22 +0000428 @param keyvals dict of keyvals to associate with the job
showarda1e74b32009-05-12 17:32:04 +0000429
430 @param hosts List of hosts to run job on.
431 @param meta_hosts List where each entry is a label name, and for each entry
432 one host will be chosen from that label to run the job on.
433 @param one_time_hosts List of hosts not in the database to run the job on.
434 @param atomic_group_name The name of an atomic group to schedule the job on.
jamesren76fcf192010-04-21 20:39:50 +0000435 @param drone_set The name of the drone set to run this test on.
showarda1e74b32009-05-12 17:32:04 +0000436
showardc92da832009-04-07 18:14:34 +0000437
438 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000439 """
showard64a95952010-01-13 21:27:16 +0000440 user = models.User.current_user()
showard87658162009-05-29 18:39:50 +0000441 owner = user.login
jadmanski0afbb632008-06-06 21:10:57 +0000442 # input validation
showarda9545c02009-12-18 22:44:26 +0000443 if not (hosts or meta_hosts or one_time_hosts or atomic_group_name
444 or hostless):
mblighec5546d2008-06-16 16:51:28 +0000445 raise model_logic.ValidationError({
showardb8471e32008-07-03 19:51:08 +0000446 'arguments' : "You must pass at least one of 'hosts', "
showardc92da832009-04-07 18:14:34 +0000447 "'meta_hosts', 'one_time_hosts', "
showarda9545c02009-12-18 22:44:26 +0000448 "'atomic_group_name', or 'hostless'"
jadmanski0afbb632008-06-06 21:10:57 +0000449 })
mblighe8819cd2008-02-15 16:48:40 +0000450
showarda9545c02009-12-18 22:44:26 +0000451 if hostless:
452 if hosts or meta_hosts or one_time_hosts or atomic_group_name:
453 raise model_logic.ValidationError({
454 'hostless': 'Hostless jobs cannot include any hosts!'})
455 server_type = models.Job.ControlType.get_string(
456 models.Job.ControlType.SERVER)
457 if control_type != server_type:
458 raise model_logic.ValidationError({
459 'control_type': 'Hostless jobs cannot use client-side '
460 'control files'})
461
showardbc93f0f2009-06-10 00:16:21 +0000462 labels_by_name = dict((label.name, label)
463 for label in models.Label.objects.all())
464 atomic_groups_by_name = dict((ag.name, ag)
465 for ag in models.AtomicGroup.objects.all())
466
showardc8730322009-06-30 01:56:38 +0000467 # Schedule on an atomic group automagically if one of the labels given
468 # is an atomic group label and no explicit atomic_group_name was supplied.
469 if not atomic_group_name:
470 for label_name in meta_hosts or []:
471 label = labels_by_name.get(label_name)
472 if label and label.atomic_group:
473 atomic_group_name = label.atomic_group.name
474 break
475
showardbc93f0f2009-06-10 00:16:21 +0000476 # convert hostnames & meta hosts to host/label objects
477 host_objects = models.Host.smart_get_bulk(hosts)
478 metahost_objects = []
showardc8730322009-06-30 01:56:38 +0000479 for label_name in meta_hosts or []:
480 if label_name in labels_by_name:
481 label = labels_by_name[label_name]
482 metahost_objects.append(label)
483 elif label_name in atomic_groups_by_name:
484 # If given a metahost name that isn't a Label, check to
485 # see if the user was specifying an Atomic Group instead.
486 atomic_group = atomic_groups_by_name[label_name]
showardbc93f0f2009-06-10 00:16:21 +0000487 if atomic_group_name and atomic_group_name != atomic_group.name:
488 raise model_logic.ValidationError({
489 'meta_hosts': (
490 'Label "%s" not found. If assumed to be an '
491 'atomic group it would conflict with the '
492 'supplied atomic group "%s".' % (
showardc8730322009-06-30 01:56:38 +0000493 label_name, atomic_group_name))})
showardbc93f0f2009-06-10 00:16:21 +0000494 atomic_group_name = atomic_group.name
495 else:
496 raise model_logic.ValidationError(
497 {'meta_hosts' : 'Label "%s" not found' % label})
498
showardc92da832009-04-07 18:14:34 +0000499 # Create and sanity check an AtomicGroup object if requested.
500 if atomic_group_name:
501 if one_time_hosts:
502 raise model_logic.ValidationError(
503 {'one_time_hosts':
504 'One time hosts cannot be used with an Atomic Group.'})
505 atomic_group = models.AtomicGroup.smart_get(atomic_group_name)
506 if synch_count and synch_count > atomic_group.max_number_of_machines:
507 raise model_logic.ValidationError(
508 {'atomic_group_name' :
509 'You have requested a synch_count (%d) greater than the '
510 'maximum machines in the requested Atomic Group (%d).' %
511 (synch_count, atomic_group.max_number_of_machines)})
512 else:
513 atomic_group = None
514
showardb8471e32008-07-03 19:51:08 +0000515 for host in one_time_hosts or []:
516 this_host = models.Host.create_one_time_host(host)
517 host_objects.append(this_host)
showardba872902008-06-28 00:51:08 +0000518
showarda1e74b32009-05-12 17:32:04 +0000519 options = dict(name=name,
520 priority=priority,
521 control_file=control_file,
522 control_type=control_type,
523 is_template=is_template,
524 timeout=timeout,
showard12f3e322009-05-13 21:27:42 +0000525 max_runtime_hrs=max_runtime_hrs,
showarda1e74b32009-05-12 17:32:04 +0000526 synch_count=synch_count,
527 run_verify=run_verify,
528 email_list=email_list,
529 dependencies=dependencies,
530 reboot_before=reboot_before,
531 reboot_after=reboot_after,
showardc1a98d12010-01-15 00:22:22 +0000532 parse_failed_repair=parse_failed_repair,
jamesren76fcf192010-04-21 20:39:50 +0000533 keyvals=keyvals,
534 drone_set=drone_set)
showard29f7cd22009-04-29 21:16:24 +0000535 return rpc_utils.create_new_job(owner=owner,
showarda1e74b32009-05-12 17:32:04 +0000536 options=options,
showard29f7cd22009-04-29 21:16:24 +0000537 host_objects=host_objects,
538 metahost_objects=metahost_objects,
showard29f7cd22009-04-29 21:16:24 +0000539 atomic_group=atomic_group)
mblighe8819cd2008-02-15 16:48:40 +0000540
541
showard9dbdcda2008-10-14 17:34:36 +0000542def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000543 """\
showard9dbdcda2008-10-14 17:34:36 +0000544 Abort a set of host queue entries.
jadmanski0afbb632008-06-06 21:10:57 +0000545 """
showard9dbdcda2008-10-14 17:34:36 +0000546 query = models.HostQueueEntry.query_objects(filter_data)
showard0c185192009-01-16 03:07:57 +0000547 query = query.filter(complete=False)
showarddc817512008-11-12 18:16:41 +0000548 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000549 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000550 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000551
showard9dbdcda2008-10-14 17:34:36 +0000552 for queue_entry in host_queue_entries:
showard64a95952010-01-13 21:27:16 +0000553 queue_entry.abort()
showard9d821ab2008-07-11 16:54:29 +0000554
555
showard1ff7b2e2009-05-15 23:17:18 +0000556def reverify_hosts(**filter_data):
557 """\
558 Schedules a set of hosts for verify.
mbligh4e545a52009-12-19 05:30:39 +0000559
560 @returns A list of hostnames that a verify task was created for.
showard1ff7b2e2009-05-15 23:17:18 +0000561 """
562 hosts = models.Host.query_objects(filter_data)
563 models.AclGroup.check_for_acl_violation_hosts(hosts)
showardc5103442010-01-15 00:20:26 +0000564 for host in hosts:
565 models.SpecialTask.schedule_special_task(host,
566 models.SpecialTask.Task.VERIFY)
mbligh4e545a52009-12-19 05:30:39 +0000567 return list(sorted(host.hostname for host in hosts))
showard1ff7b2e2009-05-15 23:17:18 +0000568
569
mblighe8819cd2008-02-15 16:48:40 +0000570def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000571 """\
572 Extra filter args for get_jobs:
573 -not_yet_run: Include only jobs that have not yet started running.
574 -running: Include only jobs that have start running but for which not
575 all hosts have completed.
576 -finished: Include only jobs for which all hosts have completed (or
577 aborted).
578 At most one of these three fields should be specified.
579 """
580 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
581 running,
582 finished)
showard0957a842009-05-11 19:25:08 +0000583 job_dicts = []
584 jobs = list(models.Job.query_objects(filter_data))
585 models.Job.objects.populate_relationships(jobs, models.Label,
586 'dependencies')
showardc1a98d12010-01-15 00:22:22 +0000587 models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals')
showard0957a842009-05-11 19:25:08 +0000588 for job in jobs:
589 job_dict = job.get_object_dict()
590 job_dict['dependencies'] = ','.join(label.name
591 for label in job.dependencies)
showardc1a98d12010-01-15 00:22:22 +0000592 job_dict['keyvals'] = dict((keyval.key, keyval.value)
593 for keyval in job.keyvals)
showard0957a842009-05-11 19:25:08 +0000594 job_dicts.append(job_dict)
595 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000596
597
598def get_num_jobs(not_yet_run=False, running=False, finished=False,
jadmanski0afbb632008-06-06 21:10:57 +0000599 **filter_data):
600 """\
601 See get_jobs() for documentation of extra filter parameters.
602 """
603 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
604 running,
605 finished)
606 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +0000607
608
mblighe8819cd2008-02-15 16:48:40 +0000609def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000610 """\
showarda8709c52008-07-03 19:44:54 +0000611 Like get_jobs(), but adds a 'status_counts' field, which is a dictionary
jadmanski0afbb632008-06-06 21:10:57 +0000612 mapping status strings to the number of hosts currently with that
613 status, i.e. {'Queued' : 4, 'Running' : 2}.
614 """
615 jobs = get_jobs(**filter_data)
616 ids = [job['id'] for job in jobs]
617 all_status_counts = models.Job.objects.get_status_counts(ids)
618 for job in jobs:
619 job['status_counts'] = all_status_counts[job['id']]
620 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +0000621
622
showarda965cef2009-05-15 23:17:41 +0000623def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +0000624 """\
625 Retrieves all the information needed to clone a job.
626 """
showarda8709c52008-07-03 19:44:54 +0000627 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +0000628 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +0000629 preserve_metahosts,
630 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +0000631
showardd9992fe2008-07-31 02:15:03 +0000632 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +0000633 for host in job_info['hosts']:
634 host_dict = get_hosts(id=host.id)[0]
635 other_labels = host_dict['labels']
636 if host_dict['platform']:
637 other_labels.remove(host_dict['platform'])
638 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +0000639 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000640
showard29f7cd22009-04-29 21:16:24 +0000641 for host in job_info['one_time_hosts']:
642 host_dict = dict(hostname=host.hostname,
643 id=host.id,
644 platform='(one-time host)',
645 locked_text='')
646 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000647
showard4d077562009-05-08 18:24:36 +0000648 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +0000649 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +0000650 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +0000651
652 info = dict(job=job.get_object_dict(),
653 meta_host_counts=meta_host_counts,
654 hosts=host_dicts)
655 info['job']['dependencies'] = job_info['dependencies']
656 if job_info['atomic_group']:
657 info['atomic_group_name'] = (job_info['atomic_group']).name
658 else:
659 info['atomic_group_name'] = None
jamesren2275ef12010-04-12 18:25:06 +0000660 info['hostless'] = job_info['hostless']
jamesren76fcf192010-04-21 20:39:50 +0000661 info['drone_set'] = job.drone_set and job.drone_set.name
showarda8709c52008-07-03 19:44:54 +0000662
663 return rpc_utils.prepare_for_serialization(info)
664
665
showard34dc5fa2008-04-24 20:58:40 +0000666# host queue entries
667
668def get_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000669 """\
showardc92da832009-04-07 18:14:34 +0000670 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +0000671 """
showardc92da832009-04-07 18:14:34 +0000672 return rpc_utils.prepare_rows_as_nested_dicts(
673 models.HostQueueEntry.query_objects(filter_data),
674 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +0000675
676
677def get_num_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000678 """\
679 Get the number of host queue entries associated with this job.
680 """
681 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +0000682
683
showard1e935f12008-07-11 00:11:36 +0000684def get_hqe_percentage_complete(**filter_data):
685 """
showardc92da832009-04-07 18:14:34 +0000686 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +0000687 that are complete.
688 """
689 query = models.HostQueueEntry.query_objects(filter_data)
690 complete_count = query.filter(complete=True).count()
691 total_count = query.count()
692 if total_count == 0:
693 return 1
694 return float(complete_count) / total_count
695
696
showard1a5a4082009-07-28 20:01:37 +0000697# special tasks
698
699def get_special_tasks(**filter_data):
700 return rpc_utils.prepare_rows_as_nested_dicts(
701 models.SpecialTask.query_objects(filter_data),
702 ('host', 'queue_entry'))
703
704
showardc0ac3a72009-07-08 21:14:45 +0000705# support for host detail view
706
707def get_host_queue_entries_and_special_tasks(hostname, query_start=None,
708 query_limit=None):
709 """
710 @returns an interleaved list of HostQueueEntries and SpecialTasks,
711 in approximate run order. each dict contains keys for type, host,
712 job, status, started_on, execution_path, and ID.
713 """
714 total_limit = None
715 if query_limit is not None:
716 total_limit = query_start + query_limit
717 filter_data = {'host__hostname': hostname,
718 'query_limit': total_limit,
719 'sort_by': ['-id']}
720
721 queue_entries = list(models.HostQueueEntry.query_objects(filter_data))
722 special_tasks = list(models.SpecialTask.query_objects(filter_data))
723
724 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
725 special_tasks)
726 if query_start is not None:
727 interleaved_entries = interleaved_entries[query_start:]
728 if query_limit is not None:
729 interleaved_entries = interleaved_entries[:query_limit]
730 return rpc_utils.prepare_for_serialization(interleaved_entries)
731
732
733def get_num_host_queue_entries_and_special_tasks(hostname):
734 filter_data = {'host__hostname': hostname}
735 return (models.HostQueueEntry.query_count(filter_data)
736 + models.SpecialTask.query_count(filter_data))
737
738
showard29f7cd22009-04-29 21:16:24 +0000739# recurring run
740
741def get_recurring(**filter_data):
742 return rpc_utils.prepare_rows_as_nested_dicts(
743 models.RecurringRun.query_objects(filter_data),
744 ('job', 'owner'))
745
746
747def get_num_recurring(**filter_data):
748 return models.RecurringRun.query_count(filter_data)
749
750
751def delete_recurring_runs(**filter_data):
752 to_delete = models.RecurringRun.query_objects(filter_data)
753 to_delete.delete()
754
755
756def create_recurring_run(job_id, start_date, loop_period, loop_count):
showard64a95952010-01-13 21:27:16 +0000757 owner = models.User.current_user().login
showard29f7cd22009-04-29 21:16:24 +0000758 job = models.Job.objects.get(id=job_id)
759 return job.create_recurring_job(start_date=start_date,
760 loop_period=loop_period,
761 loop_count=loop_count,
762 owner=owner)
763
764
mblighe8819cd2008-02-15 16:48:40 +0000765# other
766
showarde0b63622008-08-04 20:58:47 +0000767def echo(data=""):
768 """\
769 Returns a passed in string. For doing a basic test to see if RPC calls
770 can successfully be made.
771 """
772 return data
773
774
showardb7a52fd2009-04-27 20:10:56 +0000775def get_motd():
776 """\
777 Returns the message of the day as a string.
778 """
779 return rpc_utils.get_motd()
780
781
mblighe8819cd2008-02-15 16:48:40 +0000782def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +0000783 """\
784 Returns a dictionary containing a bunch of data that shouldn't change
785 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +0000786
787 priorities: List of job priority choices.
788 default_priority: Default priority value for new jobs.
789 users: Sorted list of all users.
790 labels: Sorted list of all labels.
791 atomic_groups: Sorted list of all atomic groups.
792 tests: Sorted list of all tests.
793 profilers: Sorted list of all profilers.
794 current_user: Logged-in username.
795 host_statuses: Sorted list of possible Host statuses.
796 job_statuses: Sorted list of possible HostQueueEntry statuses.
797 job_timeout_default: The default job timeout length in hours.
showarda1e74b32009-05-12 17:32:04 +0000798 parse_failed_repair_default: Default value for the parse_failed_repair job
799 option.
showardc92da832009-04-07 18:14:34 +0000800 reboot_before_options: A list of valid RebootBefore string enums.
801 reboot_after_options: A list of valid RebootAfter string enums.
802 motd: Server's message of the day.
803 status_dictionary: A mapping from one word job status names to a more
804 informative description.
jadmanski0afbb632008-06-06 21:10:57 +0000805 """
showard21baa452008-10-21 00:08:39 +0000806
807 job_fields = models.Job.get_field_dict()
jamesren76fcf192010-04-21 20:39:50 +0000808 default_drone_set_name = models.DroneSet.default_drone_set_name()
809 drone_sets = ([default_drone_set_name] +
810 sorted(drone_set.name for drone_set in
811 models.DroneSet.objects.exclude(
812 name=default_drone_set_name)))
showard21baa452008-10-21 00:08:39 +0000813
jadmanski0afbb632008-06-06 21:10:57 +0000814 result = {}
815 result['priorities'] = models.Job.Priority.choices()
showard21baa452008-10-21 00:08:39 +0000816 default_priority = job_fields['priority'].default
jadmanski0afbb632008-06-06 21:10:57 +0000817 default_string = models.Job.Priority.get_string(default_priority)
818 result['default_priority'] = default_string
819 result['users'] = get_users(sort_by=['login'])
820 result['labels'] = get_labels(sort_by=['-platform', 'name'])
showardc92da832009-04-07 18:14:34 +0000821 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +0000822 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +0000823 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +0000824 result['current_user'] = rpc_utils.prepare_for_serialization(
showard64a95952010-01-13 21:27:16 +0000825 models.User.current_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +0000826 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +0000827 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
showardb1e51872008-10-07 11:08:18 +0000828 result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT
showard12f3e322009-05-13 21:27:42 +0000829 result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS
showarda1e74b32009-05-12 17:32:04 +0000830 result['parse_failed_repair_default'] = bool(
831 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
jamesrendd855242010-03-02 22:23:44 +0000832 result['reboot_before_options'] = model_attributes.RebootBefore.names
833 result['reboot_after_options'] = model_attributes.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +0000834 result['motd'] = rpc_utils.get_motd()
jamesren76fcf192010-04-21 20:39:50 +0000835 result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled()
836 result['drone_sets'] = drone_sets
showard8ac29b42008-07-17 17:01:55 +0000837
showardd3dc1992009-04-22 21:01:40 +0000838 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +0000839 "Verifying": "Verifying Host",
840 "Pending": "Waiting on other hosts",
841 "Running": "Running autoserv",
842 "Completed": "Autoserv completed",
843 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +0000844 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +0000845 "Starting": "Next in host's queue",
846 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +0000847 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +0000848 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +0000849 "Template": "Template job for recurring run",
mbligh4608b002010-01-05 18:22:35 +0000850 "Waiting": "Waiting for scheduler action",
851 "Archiving": "Archiving results"}
jadmanski0afbb632008-06-06 21:10:57 +0000852 return result
showard29f7cd22009-04-29 21:16:24 +0000853
854
855def get_server_time():
856 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")