blob: a55fc1b8d26c4fc312d5e0b8235feae5506f2daa [file] [log] [blame]
mblighe8819cd2008-02-15 16:48:40 +00001"""\
2Functions to expose over the RPC interface.
3
4For all modify* and delete* functions that ask for an 'id' parameter to
5identify the object to operate on, the id may be either
6 * the database row ID
7 * the name of the object (label name, hostname, user login, etc.)
8 * a dictionary containing uniquely identifying field (this option should seldom
9 be used)
10
11When specifying foreign key fields (i.e. adding hosts to a label, or adding
12users to an ACL group), the given value may be either the database row ID or the
13name of the object.
14
15All get* functions return lists of dictionaries. Each dictionary represents one
16object and maps field names to values.
17
18Some examples:
19modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
20modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
21modify_test('sleeptest', test_type='Client', params=', seconds=60')
22delete_acl_group(1) # delete by ID
23delete_acl_group('Everyone') # delete by name
24acl_group_add_users('Everyone', ['mbligh', 'showard'])
25get_jobs(owner='showard', status='Queued')
26
mbligh93c80e62009-02-03 17:48:30 +000027See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000028"""
29
30__author__ = 'showard@google.com (Steve Howard)'
31
showard29f7cd22009-04-29 21:16:24 +000032import datetime
showardcafd16e2009-05-29 18:37:49 +000033import common
34from autotest_lib.frontend import thread_local
showard6d7b2ff2009-06-10 00:16:47 +000035from autotest_lib.frontend.afe import models, model_logic
36from autotest_lib.frontend.afe import control_file, rpc_utils
showard3bb499f2008-07-03 19:42:20 +000037from autotest_lib.client.common_lib import global_config
38
mblighe8819cd2008-02-15 16:48:40 +000039
40# labels
41
showard989f25d2008-10-01 11:38:11 +000042def add_label(name, kernel_config=None, platform=None, only_if_needed=None):
showardc92da832009-04-07 18:14:34 +000043 return models.Label.add_object(
44 name=name, kernel_config=kernel_config, platform=platform,
45 only_if_needed=only_if_needed).id
mblighe8819cd2008-02-15 16:48:40 +000046
47
48def modify_label(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +000049 models.Label.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000050
51
52def delete_label(id):
jadmanski0afbb632008-06-06 21:10:57 +000053 models.Label.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +000054
55
showardbbabf502008-06-06 00:02:02 +000056def label_add_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000057 host_objs = models.Host.smart_get_bulk(hosts)
showardcafd16e2009-05-29 18:37:49 +000058 label = models.Label.smart_get(id)
59 if label.platform:
60 models.Host.check_no_platform(host_objs)
61 label.host_set.add(*host_objs)
showardbbabf502008-06-06 00:02:02 +000062
63
64def label_remove_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000065 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +000066 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +000067
68
mblighe8819cd2008-02-15 16:48:40 +000069def get_labels(**filter_data):
showardc92da832009-04-07 18:14:34 +000070 """\
71 @returns A sequence of nested dictionaries of label information.
72 """
73 return rpc_utils.prepare_rows_as_nested_dicts(
74 models.Label.query_objects(filter_data),
75 ('atomic_group',))
76
77
78# atomic groups
79
showarde9450c92009-06-30 01:58:52 +000080def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +000081 return models.AtomicGroup.add_object(
82 name=name, max_number_of_machines=max_number_of_machines,
83 description=description).id
84
85
86def modify_atomic_group(id, **data):
87 models.AtomicGroup.smart_get(id).update_object(data)
88
89
90def delete_atomic_group(id):
91 models.AtomicGroup.smart_get(id).delete()
92
93
94def atomic_group_add_labels(id, labels):
95 label_objs = models.Label.smart_get_bulk(labels)
96 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
97
98
99def atomic_group_remove_labels(id, labels):
100 label_objs = models.Label.smart_get_bulk(labels)
101 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
102
103
104def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000105 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000106 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000107
108
109# hosts
110
showarddf062562008-07-03 19:56:37 +0000111def add_host(hostname, status=None, locked=None, protection=None):
jadmanski0afbb632008-06-06 21:10:57 +0000112 return models.Host.add_object(hostname=hostname, status=status,
showarddf062562008-07-03 19:56:37 +0000113 locked=locked, protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000114
115
116def modify_host(id, **data):
showardbe0d8692009-08-20 23:42:44 +0000117 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000118 host = models.Host.smart_get(id)
119 rpc_utils.check_modify_host_locking(host, data)
120 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000121
122
showard276f9442009-05-20 00:33:16 +0000123def modify_hosts(host_filter_data, update_data):
124 """
showardbe0d8692009-08-20 23:42:44 +0000125 @param host_filter_data: Filters out which hosts to modify.
126 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000127 """
showardbe0d8692009-08-20 23:42:44 +0000128 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000129 hosts = models.Host.query_objects(host_filter_data)
130 for host in hosts:
131 host.update_object(update_data)
132
133
mblighe8819cd2008-02-15 16:48:40 +0000134def host_add_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000135 labels = models.Label.smart_get_bulk(labels)
showardcafd16e2009-05-29 18:37:49 +0000136 host = models.Host.smart_get(id)
137
138 platforms = [label.name for label in labels if label.platform]
139 if len(platforms) > 1:
140 raise model_logic.ValidationError(
141 {'labels': 'Adding more than one platform label: %s' %
142 ', '.join(platforms)})
143 if len(platforms) == 1:
144 models.Host.check_no_platform([host])
145 host.labels.add(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000146
147
148def host_remove_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000149 labels = models.Label.smart_get_bulk(labels)
jadmanski0afbb632008-06-06 21:10:57 +0000150 models.Host.smart_get(id).labels.remove(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000151
152
showard0957a842009-05-11 19:25:08 +0000153def set_host_attribute(attribute, value, **host_filter_data):
154 """
155 @param attribute string name of attribute
156 @param value string, or None to delete an attribute
157 @param host_filter_data filter data to apply to Hosts to choose hosts to act
158 upon
159 """
160 assert host_filter_data # disallow accidental actions on all hosts
161 hosts = models.Host.query_objects(host_filter_data)
162 models.AclGroup.check_for_acl_violation_hosts(hosts)
163
164 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000165 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000166
167
mblighe8819cd2008-02-15 16:48:40 +0000168def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000169 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000170
171
showard87cc38f2009-08-20 23:37:04 +0000172def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000173 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000174 """
175 @param multiple_labels: match hosts in all of the labels given. Should
176 be a list of label names.
177 @param exclude_only_if_needed_labels: Exclude hosts with at least one
178 "only_if_needed" label applied.
179 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
180 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000181 """
showard43a3d262008-11-12 18:17:05 +0000182 hosts = rpc_utils.get_host_query(multiple_labels,
183 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000184 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000185 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000186 hosts = list(hosts)
187 models.Host.objects.populate_relationships(hosts, models.Label,
188 'label_list')
189 models.Host.objects.populate_relationships(hosts, models.AclGroup,
190 'acl_list')
191 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
192 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000193 host_dicts = []
194 for host_obj in hosts:
195 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000196 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000197 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
198 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000199 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
200 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
201 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000202 host_dicts.append(host_dict)
203 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000204
205
showard87cc38f2009-08-20 23:37:04 +0000206def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000207 exclude_atomic_group_hosts=False, valid_only=True,
208 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000209 """
210 Same parameters as get_hosts().
211
212 @returns The number of matching hosts.
213 """
showard43a3d262008-11-12 18:17:05 +0000214 hosts = rpc_utils.get_host_query(multiple_labels,
215 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000216 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000217 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000218 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000219
mblighe8819cd2008-02-15 16:48:40 +0000220
221# tests
222
showard909c7a62008-07-15 21:52:38 +0000223def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000224 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000225 test_time=None, test_category=None, description=None,
226 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000227 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000228 author=author, dependencies=dependencies,
229 experimental=experimental,
230 run_verify=run_verify, test_time=test_time,
231 test_category=test_category,
232 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000233 test_class=test_class,
234 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000235
236
237def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000238 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000239
240
241def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000242 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000243
244
245def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000246 return rpc_utils.prepare_for_serialization(
247 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000248
249
showard2b9a88b2008-06-13 20:55:03 +0000250# profilers
251
252def add_profiler(name, description=None):
253 return models.Profiler.add_object(name=name, description=description).id
254
255
256def modify_profiler(id, **data):
257 models.Profiler.smart_get(id).update_object(data)
258
259
260def delete_profiler(id):
261 models.Profiler.smart_get(id).delete()
262
263
264def get_profilers(**filter_data):
265 return rpc_utils.prepare_for_serialization(
266 models.Profiler.list_objects(filter_data))
267
268
mblighe8819cd2008-02-15 16:48:40 +0000269# users
270
271def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000272 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000273
274
275def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000276 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000277
278
279def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000280 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000281
282
283def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000284 return rpc_utils.prepare_for_serialization(
285 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000286
287
288# acl groups
289
290def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000291 group = models.AclGroup.add_object(name=name, description=description)
292 group.users.add(thread_local.get_user())
293 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000294
295
296def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000297 group = models.AclGroup.smart_get(id)
298 group.check_for_acl_violation_acl_group()
299 group.update_object(data)
300 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000301
302
303def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000304 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000305 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000306 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000307 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000308
309
310def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000311 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000312 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000313 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000314 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000315 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000316
317
318def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000319 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000320 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000321 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000322 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000323 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000324
325
326def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000327 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000328 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000329 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000330 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000331 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000332
333
334def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000335 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000336
337
338def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000339 acl_groups = models.AclGroup.list_objects(filter_data)
340 for acl_group in acl_groups:
341 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
342 acl_group['users'] = [user.login
343 for user in acl_group_obj.users.all()]
344 acl_group['hosts'] = [host.hostname
345 for host in acl_group_obj.hosts.all()]
346 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000347
348
349# jobs
350
mbligh120351e2009-01-24 01:40:45 +0000351def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000352 client_control_file='', use_container=False,
showard232b7ae2009-11-10 00:46:48 +0000353 profile_only=None, upload_kernel_config=False):
jadmanski0afbb632008-06-06 21:10:57 +0000354 """
mbligh120351e2009-01-24 01:40:45 +0000355 Generates a client-side control file to load a kernel and run tests.
356
357 @param tests List of tests to run.
mbligha3c58d22009-08-24 22:01:51 +0000358 @param kernel A list of kernel info dictionaries configuring which kernels
359 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000360 @param label Name of label to grab kernel config from.
361 @param profilers List of profilers to activate during the job.
362 @param client_control_file The contents of a client-side control file to
363 run at the end of all tests. If this is supplied, all tests must be
364 client side.
365 TODO: in the future we should support server control files directly
366 to wrap with a kernel. That'll require changing the parameter
367 name and adding a boolean to indicate if it is a client or server
368 control file.
369 @param use_container unused argument today. TODO: Enable containers
370 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000371 @param profile_only A boolean that indicates what default profile_only
372 mode to use in the control file. Passing None will generate a
373 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000374 @param upload_kernel_config: if enabled it will generate server control
375 file code that uploads the kernel config file to the client and
376 tells the client of the new (local) path when compiling the kernel;
377 the tests must be server side tests
mbligh120351e2009-01-24 01:40:45 +0000378
379 @returns a dict with the following keys:
380 control_file: str, The control file text.
381 is_server: bool, is the control file a server-side control file?
382 synch_count: How many machines the job uses per autoserv execution.
383 synch_count == 1 means the job is asynchronous.
384 dependencies: A list of the names of labels on which the job depends.
385 """
showardd86debe2009-06-10 17:37:56 +0000386 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000387 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000388 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000389
showard989f25d2008-10-01 11:38:11 +0000390 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000391 rpc_utils.prepare_generate_control_file(tests, kernel, label,
392 profilers))
showard989f25d2008-10-01 11:38:11 +0000393 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000394 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000395 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000396 client_control_file=client_control_file, profile_only=profile_only,
397 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000398 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000399
400
showard12f3e322009-05-13 21:27:42 +0000401def create_job(name, priority, control_file, control_type,
402 hosts=(), meta_hosts=(), one_time_hosts=(),
403 atomic_group_name=None, synch_count=None, is_template=False,
404 timeout=None, max_runtime_hrs=None, run_verify=True,
405 email_list='', dependencies=(), reboot_before=None,
406 reboot_after=None, parse_failed_repair=None):
jadmanski0afbb632008-06-06 21:10:57 +0000407 """\
408 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000409
showarda1e74b32009-05-12 17:32:04 +0000410 @param name name of this job
411 @param priority Low, Medium, High, Urgent
412 @param control_file String contents of the control file.
413 @param control_type Type of control file, Client or Server.
414 @param synch_count How many machines the job uses per autoserv execution.
415 synch_count == 1 means the job is asynchronous. If an atomic group is
416 given this value is treated as a minimum.
417 @param is_template If true then create a template job.
418 @param timeout Hours after this call returns until the job times out.
showard12f3e322009-05-13 21:27:42 +0000419 @param max_runtime_hrs Hours from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000420 @param run_verify Should the host be verified before running the test?
421 @param email_list String containing emails to mail when the job is done
422 @param dependencies List of label names on which this job depends
423 @param reboot_before Never, If dirty, or Always
424 @param reboot_after Never, If all tests passed, or Always
425 @param parse_failed_repair if true, results of failed repairs launched by
426 this job will be parsed as part of the job.
427
428 @param hosts List of hosts to run job on.
429 @param meta_hosts List where each entry is a label name, and for each entry
430 one host will be chosen from that label to run the job on.
431 @param one_time_hosts List of hosts not in the database to run the job on.
432 @param atomic_group_name The name of an atomic group to schedule the job on.
433
showardc92da832009-04-07 18:14:34 +0000434
435 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000436 """
showard87658162009-05-29 18:39:50 +0000437 user = thread_local.get_user()
438 owner = user.login
jadmanski0afbb632008-06-06 21:10:57 +0000439 # input validation
showardc92da832009-04-07 18:14:34 +0000440 if not (hosts or meta_hosts or one_time_hosts or atomic_group_name):
mblighec5546d2008-06-16 16:51:28 +0000441 raise model_logic.ValidationError({
showardb8471e32008-07-03 19:51:08 +0000442 'arguments' : "You must pass at least one of 'hosts', "
showardc92da832009-04-07 18:14:34 +0000443 "'meta_hosts', 'one_time_hosts', "
444 "or 'atomic_group_name'"
jadmanski0afbb632008-06-06 21:10:57 +0000445 })
mblighe8819cd2008-02-15 16:48:40 +0000446
showardbc93f0f2009-06-10 00:16:21 +0000447 labels_by_name = dict((label.name, label)
448 for label in models.Label.objects.all())
449 atomic_groups_by_name = dict((ag.name, ag)
450 for ag in models.AtomicGroup.objects.all())
451
showardc8730322009-06-30 01:56:38 +0000452 # Schedule on an atomic group automagically if one of the labels given
453 # is an atomic group label and no explicit atomic_group_name was supplied.
454 if not atomic_group_name:
455 for label_name in meta_hosts or []:
456 label = labels_by_name.get(label_name)
457 if label and label.atomic_group:
458 atomic_group_name = label.atomic_group.name
459 break
460
showardbc93f0f2009-06-10 00:16:21 +0000461 # convert hostnames & meta hosts to host/label objects
462 host_objects = models.Host.smart_get_bulk(hosts)
463 metahost_objects = []
showardc8730322009-06-30 01:56:38 +0000464 for label_name in meta_hosts or []:
465 if label_name in labels_by_name:
466 label = labels_by_name[label_name]
467 metahost_objects.append(label)
468 elif label_name in atomic_groups_by_name:
469 # If given a metahost name that isn't a Label, check to
470 # see if the user was specifying an Atomic Group instead.
471 atomic_group = atomic_groups_by_name[label_name]
showardbc93f0f2009-06-10 00:16:21 +0000472 if atomic_group_name and atomic_group_name != atomic_group.name:
473 raise model_logic.ValidationError({
474 'meta_hosts': (
475 'Label "%s" not found. If assumed to be an '
476 'atomic group it would conflict with the '
477 'supplied atomic group "%s".' % (
showardc8730322009-06-30 01:56:38 +0000478 label_name, atomic_group_name))})
showardbc93f0f2009-06-10 00:16:21 +0000479 atomic_group_name = atomic_group.name
480 else:
481 raise model_logic.ValidationError(
482 {'meta_hosts' : 'Label "%s" not found' % label})
483
showardc92da832009-04-07 18:14:34 +0000484 # Create and sanity check an AtomicGroup object if requested.
485 if atomic_group_name:
486 if one_time_hosts:
487 raise model_logic.ValidationError(
488 {'one_time_hosts':
489 'One time hosts cannot be used with an Atomic Group.'})
490 atomic_group = models.AtomicGroup.smart_get(atomic_group_name)
491 if synch_count and synch_count > atomic_group.max_number_of_machines:
492 raise model_logic.ValidationError(
493 {'atomic_group_name' :
494 'You have requested a synch_count (%d) greater than the '
495 'maximum machines in the requested Atomic Group (%d).' %
496 (synch_count, atomic_group.max_number_of_machines)})
497 else:
498 atomic_group = None
499
showardb8471e32008-07-03 19:51:08 +0000500 for host in one_time_hosts or []:
501 this_host = models.Host.create_one_time_host(host)
502 host_objects.append(this_host)
showardba872902008-06-28 00:51:08 +0000503
showard87658162009-05-29 18:39:50 +0000504 if reboot_before is None:
505 reboot_before = user.get_reboot_before_display()
506 if reboot_after is None:
507 reboot_after = user.get_reboot_after_display()
508
showarda1e74b32009-05-12 17:32:04 +0000509 options = dict(name=name,
510 priority=priority,
511 control_file=control_file,
512 control_type=control_type,
513 is_template=is_template,
514 timeout=timeout,
showard12f3e322009-05-13 21:27:42 +0000515 max_runtime_hrs=max_runtime_hrs,
showarda1e74b32009-05-12 17:32:04 +0000516 synch_count=synch_count,
517 run_verify=run_verify,
518 email_list=email_list,
519 dependencies=dependencies,
520 reboot_before=reboot_before,
521 reboot_after=reboot_after,
522 parse_failed_repair=parse_failed_repair)
showard29f7cd22009-04-29 21:16:24 +0000523 return rpc_utils.create_new_job(owner=owner,
showarda1e74b32009-05-12 17:32:04 +0000524 options=options,
showard29f7cd22009-04-29 21:16:24 +0000525 host_objects=host_objects,
526 metahost_objects=metahost_objects,
showard29f7cd22009-04-29 21:16:24 +0000527 atomic_group=atomic_group)
mblighe8819cd2008-02-15 16:48:40 +0000528
529
showard9dbdcda2008-10-14 17:34:36 +0000530def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000531 """\
showard9dbdcda2008-10-14 17:34:36 +0000532 Abort a set of host queue entries.
jadmanski0afbb632008-06-06 21:10:57 +0000533 """
showard9dbdcda2008-10-14 17:34:36 +0000534 query = models.HostQueueEntry.query_objects(filter_data)
showard0c185192009-01-16 03:07:57 +0000535 query = query.filter(complete=False)
showarddc817512008-11-12 18:16:41 +0000536 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000537 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000538 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000539
showard9dbdcda2008-10-14 17:34:36 +0000540 user = thread_local.get_user()
541 for queue_entry in host_queue_entries:
542 queue_entry.abort(user)
showard9d821ab2008-07-11 16:54:29 +0000543
544
showard1ff7b2e2009-05-15 23:17:18 +0000545def reverify_hosts(**filter_data):
546 """\
547 Schedules a set of hosts for verify.
548 """
549 hosts = models.Host.query_objects(filter_data)
550 models.AclGroup.check_for_acl_violation_hosts(hosts)
showard6d7b2ff2009-06-10 00:16:47 +0000551 models.SpecialTask.schedule_special_task(hosts,
showard2fe3f1d2009-07-06 20:19:11 +0000552 models.SpecialTask.Task.VERIFY)
showard1ff7b2e2009-05-15 23:17:18 +0000553
554
mblighe8819cd2008-02-15 16:48:40 +0000555def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000556 """\
557 Extra filter args for get_jobs:
558 -not_yet_run: Include only jobs that have not yet started running.
559 -running: Include only jobs that have start running but for which not
560 all hosts have completed.
561 -finished: Include only jobs for which all hosts have completed (or
562 aborted).
563 At most one of these three fields should be specified.
564 """
565 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
566 running,
567 finished)
showard0957a842009-05-11 19:25:08 +0000568 job_dicts = []
569 jobs = list(models.Job.query_objects(filter_data))
570 models.Job.objects.populate_relationships(jobs, models.Label,
571 'dependencies')
572 for job in jobs:
573 job_dict = job.get_object_dict()
574 job_dict['dependencies'] = ','.join(label.name
575 for label in job.dependencies)
576 job_dicts.append(job_dict)
577 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000578
579
580def get_num_jobs(not_yet_run=False, running=False, finished=False,
jadmanski0afbb632008-06-06 21:10:57 +0000581 **filter_data):
582 """\
583 See get_jobs() for documentation of extra filter parameters.
584 """
585 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
586 running,
587 finished)
588 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +0000589
590
mblighe8819cd2008-02-15 16:48:40 +0000591def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000592 """\
showarda8709c52008-07-03 19:44:54 +0000593 Like get_jobs(), but adds a 'status_counts' field, which is a dictionary
jadmanski0afbb632008-06-06 21:10:57 +0000594 mapping status strings to the number of hosts currently with that
595 status, i.e. {'Queued' : 4, 'Running' : 2}.
596 """
597 jobs = get_jobs(**filter_data)
598 ids = [job['id'] for job in jobs]
599 all_status_counts = models.Job.objects.get_status_counts(ids)
600 for job in jobs:
601 job['status_counts'] = all_status_counts[job['id']]
602 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +0000603
604
showarda965cef2009-05-15 23:17:41 +0000605def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +0000606 """\
607 Retrieves all the information needed to clone a job.
608 """
showarda8709c52008-07-03 19:44:54 +0000609 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +0000610 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +0000611 preserve_metahosts,
612 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +0000613
showardd9992fe2008-07-31 02:15:03 +0000614 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +0000615 for host in job_info['hosts']:
616 host_dict = get_hosts(id=host.id)[0]
617 other_labels = host_dict['labels']
618 if host_dict['platform']:
619 other_labels.remove(host_dict['platform'])
620 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +0000621 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000622
showard29f7cd22009-04-29 21:16:24 +0000623 for host in job_info['one_time_hosts']:
624 host_dict = dict(hostname=host.hostname,
625 id=host.id,
626 platform='(one-time host)',
627 locked_text='')
628 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000629
showard4d077562009-05-08 18:24:36 +0000630 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +0000631 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +0000632 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +0000633
634 info = dict(job=job.get_object_dict(),
635 meta_host_counts=meta_host_counts,
636 hosts=host_dicts)
637 info['job']['dependencies'] = job_info['dependencies']
638 if job_info['atomic_group']:
639 info['atomic_group_name'] = (job_info['atomic_group']).name
640 else:
641 info['atomic_group_name'] = None
showarda8709c52008-07-03 19:44:54 +0000642
643 return rpc_utils.prepare_for_serialization(info)
644
645
showard34dc5fa2008-04-24 20:58:40 +0000646# host queue entries
647
648def get_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000649 """\
showardc92da832009-04-07 18:14:34 +0000650 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +0000651 """
showardc92da832009-04-07 18:14:34 +0000652 return rpc_utils.prepare_rows_as_nested_dicts(
653 models.HostQueueEntry.query_objects(filter_data),
654 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +0000655
656
657def get_num_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000658 """\
659 Get the number of host queue entries associated with this job.
660 """
661 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +0000662
663
showard1e935f12008-07-11 00:11:36 +0000664def get_hqe_percentage_complete(**filter_data):
665 """
showardc92da832009-04-07 18:14:34 +0000666 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +0000667 that are complete.
668 """
669 query = models.HostQueueEntry.query_objects(filter_data)
670 complete_count = query.filter(complete=True).count()
671 total_count = query.count()
672 if total_count == 0:
673 return 1
674 return float(complete_count) / total_count
675
676
showard1a5a4082009-07-28 20:01:37 +0000677# special tasks
678
679def get_special_tasks(**filter_data):
680 return rpc_utils.prepare_rows_as_nested_dicts(
681 models.SpecialTask.query_objects(filter_data),
682 ('host', 'queue_entry'))
683
684
showardc0ac3a72009-07-08 21:14:45 +0000685# support for host detail view
686
687def get_host_queue_entries_and_special_tasks(hostname, query_start=None,
688 query_limit=None):
689 """
690 @returns an interleaved list of HostQueueEntries and SpecialTasks,
691 in approximate run order. each dict contains keys for type, host,
692 job, status, started_on, execution_path, and ID.
693 """
694 total_limit = None
695 if query_limit is not None:
696 total_limit = query_start + query_limit
697 filter_data = {'host__hostname': hostname,
698 'query_limit': total_limit,
699 'sort_by': ['-id']}
700
701 queue_entries = list(models.HostQueueEntry.query_objects(filter_data))
702 special_tasks = list(models.SpecialTask.query_objects(filter_data))
703
704 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
705 special_tasks)
706 if query_start is not None:
707 interleaved_entries = interleaved_entries[query_start:]
708 if query_limit is not None:
709 interleaved_entries = interleaved_entries[:query_limit]
710 return rpc_utils.prepare_for_serialization(interleaved_entries)
711
712
713def get_num_host_queue_entries_and_special_tasks(hostname):
714 filter_data = {'host__hostname': hostname}
715 return (models.HostQueueEntry.query_count(filter_data)
716 + models.SpecialTask.query_count(filter_data))
717
718
showard29f7cd22009-04-29 21:16:24 +0000719# recurring run
720
721def get_recurring(**filter_data):
722 return rpc_utils.prepare_rows_as_nested_dicts(
723 models.RecurringRun.query_objects(filter_data),
724 ('job', 'owner'))
725
726
727def get_num_recurring(**filter_data):
728 return models.RecurringRun.query_count(filter_data)
729
730
731def delete_recurring_runs(**filter_data):
732 to_delete = models.RecurringRun.query_objects(filter_data)
733 to_delete.delete()
734
735
736def create_recurring_run(job_id, start_date, loop_period, loop_count):
737 owner = thread_local.get_user().login
738 job = models.Job.objects.get(id=job_id)
739 return job.create_recurring_job(start_date=start_date,
740 loop_period=loop_period,
741 loop_count=loop_count,
742 owner=owner)
743
744
mblighe8819cd2008-02-15 16:48:40 +0000745# other
746
showarde0b63622008-08-04 20:58:47 +0000747def echo(data=""):
748 """\
749 Returns a passed in string. For doing a basic test to see if RPC calls
750 can successfully be made.
751 """
752 return data
753
754
showardb7a52fd2009-04-27 20:10:56 +0000755def get_motd():
756 """\
757 Returns the message of the day as a string.
758 """
759 return rpc_utils.get_motd()
760
761
mblighe8819cd2008-02-15 16:48:40 +0000762def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +0000763 """\
764 Returns a dictionary containing a bunch of data that shouldn't change
765 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +0000766
767 priorities: List of job priority choices.
768 default_priority: Default priority value for new jobs.
769 users: Sorted list of all users.
770 labels: Sorted list of all labels.
771 atomic_groups: Sorted list of all atomic groups.
772 tests: Sorted list of all tests.
773 profilers: Sorted list of all profilers.
774 current_user: Logged-in username.
775 host_statuses: Sorted list of possible Host statuses.
776 job_statuses: Sorted list of possible HostQueueEntry statuses.
777 job_timeout_default: The default job timeout length in hours.
showarda1e74b32009-05-12 17:32:04 +0000778 parse_failed_repair_default: Default value for the parse_failed_repair job
779 option.
showardc92da832009-04-07 18:14:34 +0000780 reboot_before_options: A list of valid RebootBefore string enums.
781 reboot_after_options: A list of valid RebootAfter string enums.
782 motd: Server's message of the day.
783 status_dictionary: A mapping from one word job status names to a more
784 informative description.
jadmanski0afbb632008-06-06 21:10:57 +0000785 """
showard21baa452008-10-21 00:08:39 +0000786
787 job_fields = models.Job.get_field_dict()
788
jadmanski0afbb632008-06-06 21:10:57 +0000789 result = {}
790 result['priorities'] = models.Job.Priority.choices()
showard21baa452008-10-21 00:08:39 +0000791 default_priority = job_fields['priority'].default
jadmanski0afbb632008-06-06 21:10:57 +0000792 default_string = models.Job.Priority.get_string(default_priority)
793 result['default_priority'] = default_string
794 result['users'] = get_users(sort_by=['login'])
795 result['labels'] = get_labels(sort_by=['-platform', 'name'])
showardc92da832009-04-07 18:14:34 +0000796 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +0000797 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +0000798 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +0000799 result['current_user'] = rpc_utils.prepare_for_serialization(
800 thread_local.get_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +0000801 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +0000802 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
showardb1e51872008-10-07 11:08:18 +0000803 result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT
showard12f3e322009-05-13 21:27:42 +0000804 result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS
showarda1e74b32009-05-12 17:32:04 +0000805 result['parse_failed_repair_default'] = bool(
806 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
showard0fc38302008-10-23 00:44:07 +0000807 result['reboot_before_options'] = models.RebootBefore.names
808 result['reboot_after_options'] = models.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +0000809 result['motd'] = rpc_utils.get_motd()
showard8ac29b42008-07-17 17:01:55 +0000810
showardd3dc1992009-04-22 21:01:40 +0000811 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +0000812 "Verifying": "Verifying Host",
813 "Pending": "Waiting on other hosts",
814 "Running": "Running autoserv",
815 "Completed": "Autoserv completed",
816 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +0000817 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +0000818 "Starting": "Next in host's queue",
819 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +0000820 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +0000821 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +0000822 "Template": "Template job for recurring run",
823 "Waiting": "Waiting for scheduler action"}
jadmanski0afbb632008-06-06 21:10:57 +0000824 return result
showard29f7cd22009-04-29 21:16:24 +0000825
826
827def get_server_time():
828 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")