blob: 765339df182b2d98f46ca09bc14b64161c4fe6e5 [file] [log] [blame]
mblighe8819cd2008-02-15 16:48:40 +00001"""\
2Functions to expose over the RPC interface.
3
4For all modify* and delete* functions that ask for an 'id' parameter to
5identify the object to operate on, the id may be either
6 * the database row ID
7 * the name of the object (label name, hostname, user login, etc.)
8 * a dictionary containing uniquely identifying field (this option should seldom
9 be used)
10
11When specifying foreign key fields (i.e. adding hosts to a label, or adding
12users to an ACL group), the given value may be either the database row ID or the
13name of the object.
14
15All get* functions return lists of dictionaries. Each dictionary represents one
16object and maps field names to values.
17
18Some examples:
19modify_host(2, hostname='myhost') # modify hostname of host with database ID 2
20modify_host('ipaj2', hostname='myhost') # modify hostname of host 'ipaj2'
21modify_test('sleeptest', test_type='Client', params=', seconds=60')
22delete_acl_group(1) # delete by ID
23delete_acl_group('Everyone') # delete by name
24acl_group_add_users('Everyone', ['mbligh', 'showard'])
25get_jobs(owner='showard', status='Queued')
26
mbligh93c80e62009-02-03 17:48:30 +000027See doctests/001_rpc_test.txt for (lots) more examples.
mblighe8819cd2008-02-15 16:48:40 +000028"""
29
30__author__ = 'showard@google.com (Steve Howard)'
31
showard29f7cd22009-04-29 21:16:24 +000032import datetime
showardcafd16e2009-05-29 18:37:49 +000033import common
34from autotest_lib.frontend import thread_local
showard6d7b2ff2009-06-10 00:16:47 +000035from autotest_lib.frontend.afe import models, model_logic
36from autotest_lib.frontend.afe import control_file, rpc_utils
showard3bb499f2008-07-03 19:42:20 +000037from autotest_lib.client.common_lib import global_config
38
mblighe8819cd2008-02-15 16:48:40 +000039
40# labels
41
showard989f25d2008-10-01 11:38:11 +000042def add_label(name, kernel_config=None, platform=None, only_if_needed=None):
showardc92da832009-04-07 18:14:34 +000043 return models.Label.add_object(
44 name=name, kernel_config=kernel_config, platform=platform,
45 only_if_needed=only_if_needed).id
mblighe8819cd2008-02-15 16:48:40 +000046
47
48def modify_label(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +000049 models.Label.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +000050
51
52def delete_label(id):
jadmanski0afbb632008-06-06 21:10:57 +000053 models.Label.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +000054
55
showardbbabf502008-06-06 00:02:02 +000056def label_add_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000057 host_objs = models.Host.smart_get_bulk(hosts)
showardcafd16e2009-05-29 18:37:49 +000058 label = models.Label.smart_get(id)
59 if label.platform:
60 models.Host.check_no_platform(host_objs)
61 label.host_set.add(*host_objs)
showardbbabf502008-06-06 00:02:02 +000062
63
64def label_remove_hosts(id, hosts):
showardbe3ec042008-11-12 18:16:07 +000065 host_objs = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +000066 models.Label.smart_get(id).host_set.remove(*host_objs)
showardbbabf502008-06-06 00:02:02 +000067
68
mblighe8819cd2008-02-15 16:48:40 +000069def get_labels(**filter_data):
showardc92da832009-04-07 18:14:34 +000070 """\
71 @returns A sequence of nested dictionaries of label information.
72 """
73 return rpc_utils.prepare_rows_as_nested_dicts(
74 models.Label.query_objects(filter_data),
75 ('atomic_group',))
76
77
78# atomic groups
79
showarde9450c92009-06-30 01:58:52 +000080def add_atomic_group(name, max_number_of_machines=None, description=None):
showardc92da832009-04-07 18:14:34 +000081 return models.AtomicGroup.add_object(
82 name=name, max_number_of_machines=max_number_of_machines,
83 description=description).id
84
85
86def modify_atomic_group(id, **data):
87 models.AtomicGroup.smart_get(id).update_object(data)
88
89
90def delete_atomic_group(id):
91 models.AtomicGroup.smart_get(id).delete()
92
93
94def atomic_group_add_labels(id, labels):
95 label_objs = models.Label.smart_get_bulk(labels)
96 models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
97
98
99def atomic_group_remove_labels(id, labels):
100 label_objs = models.Label.smart_get_bulk(labels)
101 models.AtomicGroup.smart_get(id).label_set.remove(*label_objs)
102
103
104def get_atomic_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000105 return rpc_utils.prepare_for_serialization(
showardc92da832009-04-07 18:14:34 +0000106 models.AtomicGroup.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000107
108
109# hosts
110
showarddf062562008-07-03 19:56:37 +0000111def add_host(hostname, status=None, locked=None, protection=None):
jadmanski0afbb632008-06-06 21:10:57 +0000112 return models.Host.add_object(hostname=hostname, status=status,
showarddf062562008-07-03 19:56:37 +0000113 locked=locked, protection=protection).id
mblighe8819cd2008-02-15 16:48:40 +0000114
115
116def modify_host(id, **data):
showardbe0d8692009-08-20 23:42:44 +0000117 rpc_utils.check_modify_host(data)
showardce7c0922009-09-11 18:39:24 +0000118 host = models.Host.smart_get(id)
119 rpc_utils.check_modify_host_locking(host, data)
120 host.update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000121
122
showard276f9442009-05-20 00:33:16 +0000123def modify_hosts(host_filter_data, update_data):
124 """
showardbe0d8692009-08-20 23:42:44 +0000125 @param host_filter_data: Filters out which hosts to modify.
126 @param update_data: A dictionary with the changes to make to the hosts.
showard276f9442009-05-20 00:33:16 +0000127 """
showardbe0d8692009-08-20 23:42:44 +0000128 rpc_utils.check_modify_host(update_data)
showard276f9442009-05-20 00:33:16 +0000129 hosts = models.Host.query_objects(host_filter_data)
130 for host in hosts:
131 host.update_object(update_data)
132
133
mblighe8819cd2008-02-15 16:48:40 +0000134def host_add_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000135 labels = models.Label.smart_get_bulk(labels)
showardcafd16e2009-05-29 18:37:49 +0000136 host = models.Host.smart_get(id)
137
138 platforms = [label.name for label in labels if label.platform]
139 if len(platforms) > 1:
140 raise model_logic.ValidationError(
141 {'labels': 'Adding more than one platform label: %s' %
142 ', '.join(platforms)})
143 if len(platforms) == 1:
144 models.Host.check_no_platform([host])
145 host.labels.add(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000146
147
148def host_remove_labels(id, labels):
showardbe3ec042008-11-12 18:16:07 +0000149 labels = models.Label.smart_get_bulk(labels)
jadmanski0afbb632008-06-06 21:10:57 +0000150 models.Host.smart_get(id).labels.remove(*labels)
mblighe8819cd2008-02-15 16:48:40 +0000151
152
showard0957a842009-05-11 19:25:08 +0000153def set_host_attribute(attribute, value, **host_filter_data):
154 """
155 @param attribute string name of attribute
156 @param value string, or None to delete an attribute
157 @param host_filter_data filter data to apply to Hosts to choose hosts to act
158 upon
159 """
160 assert host_filter_data # disallow accidental actions on all hosts
161 hosts = models.Host.query_objects(host_filter_data)
162 models.AclGroup.check_for_acl_violation_hosts(hosts)
163
164 for host in hosts:
showardf8b19042009-05-12 17:22:49 +0000165 host.set_or_delete_attribute(attribute, value)
showard0957a842009-05-11 19:25:08 +0000166
167
mblighe8819cd2008-02-15 16:48:40 +0000168def delete_host(id):
jadmanski0afbb632008-06-06 21:10:57 +0000169 models.Host.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000170
171
showard87cc38f2009-08-20 23:37:04 +0000172def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000173 exclude_atomic_group_hosts=False, valid_only=True, **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000174 """
175 @param multiple_labels: match hosts in all of the labels given. Should
176 be a list of label names.
177 @param exclude_only_if_needed_labels: Exclude hosts with at least one
178 "only_if_needed" label applied.
179 @param exclude_atomic_group_hosts: Exclude hosts that have one or more
180 atomic group labels associated with them.
jadmanski0afbb632008-06-06 21:10:57 +0000181 """
showard43a3d262008-11-12 18:17:05 +0000182 hosts = rpc_utils.get_host_query(multiple_labels,
183 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000184 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000185 valid_only, filter_data)
showard0957a842009-05-11 19:25:08 +0000186 hosts = list(hosts)
187 models.Host.objects.populate_relationships(hosts, models.Label,
188 'label_list')
189 models.Host.objects.populate_relationships(hosts, models.AclGroup,
190 'acl_list')
191 models.Host.objects.populate_relationships(hosts, models.HostAttribute,
192 'attribute_list')
showard43a3d262008-11-12 18:17:05 +0000193 host_dicts = []
194 for host_obj in hosts:
195 host_dict = host_obj.get_object_dict()
showard0957a842009-05-11 19:25:08 +0000196 host_dict['labels'] = [label.name for label in host_obj.label_list]
showard909c9142009-07-07 20:54:42 +0000197 host_dict['platform'], host_dict['atomic_group'] = (rpc_utils.
198 find_platform_and_atomic_group(host_obj))
showard0957a842009-05-11 19:25:08 +0000199 host_dict['acls'] = [acl.name for acl in host_obj.acl_list]
200 host_dict['attributes'] = dict((attribute.attribute, attribute.value)
201 for attribute in host_obj.attribute_list)
showard43a3d262008-11-12 18:17:05 +0000202 host_dicts.append(host_dict)
203 return rpc_utils.prepare_for_serialization(host_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000204
205
showard87cc38f2009-08-20 23:37:04 +0000206def get_num_hosts(multiple_labels=(), exclude_only_if_needed_labels=False,
showard8aa84fc2009-09-16 17:17:55 +0000207 exclude_atomic_group_hosts=False, valid_only=True,
208 **filter_data):
showard87cc38f2009-08-20 23:37:04 +0000209 """
210 Same parameters as get_hosts().
211
212 @returns The number of matching hosts.
213 """
showard43a3d262008-11-12 18:17:05 +0000214 hosts = rpc_utils.get_host_query(multiple_labels,
215 exclude_only_if_needed_labels,
showard87cc38f2009-08-20 23:37:04 +0000216 exclude_atomic_group_hosts,
showard8aa84fc2009-09-16 17:17:55 +0000217 valid_only, filter_data)
showard43a3d262008-11-12 18:17:05 +0000218 return hosts.count()
showard1385b162008-03-13 15:59:40 +0000219
mblighe8819cd2008-02-15 16:48:40 +0000220
221# tests
222
showard909c7a62008-07-15 21:52:38 +0000223def add_test(name, test_type, path, author=None, dependencies=None,
showard3d9899a2008-07-31 02:11:58 +0000224 experimental=True, run_verify=None, test_class=None,
showard909c7a62008-07-15 21:52:38 +0000225 test_time=None, test_category=None, description=None,
226 sync_count=1):
jadmanski0afbb632008-06-06 21:10:57 +0000227 return models.Test.add_object(name=name, test_type=test_type, path=path,
showard909c7a62008-07-15 21:52:38 +0000228 author=author, dependencies=dependencies,
229 experimental=experimental,
230 run_verify=run_verify, test_time=test_time,
231 test_category=test_category,
232 sync_count=sync_count,
jadmanski0afbb632008-06-06 21:10:57 +0000233 test_class=test_class,
234 description=description).id
mblighe8819cd2008-02-15 16:48:40 +0000235
236
237def modify_test(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000238 models.Test.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000239
240
241def delete_test(id):
jadmanski0afbb632008-06-06 21:10:57 +0000242 models.Test.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000243
244
245def get_tests(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000246 return rpc_utils.prepare_for_serialization(
247 models.Test.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000248
249
showard2b9a88b2008-06-13 20:55:03 +0000250# profilers
251
252def add_profiler(name, description=None):
253 return models.Profiler.add_object(name=name, description=description).id
254
255
256def modify_profiler(id, **data):
257 models.Profiler.smart_get(id).update_object(data)
258
259
260def delete_profiler(id):
261 models.Profiler.smart_get(id).delete()
262
263
264def get_profilers(**filter_data):
265 return rpc_utils.prepare_for_serialization(
266 models.Profiler.list_objects(filter_data))
267
268
mblighe8819cd2008-02-15 16:48:40 +0000269# users
270
271def add_user(login, access_level=None):
jadmanski0afbb632008-06-06 21:10:57 +0000272 return models.User.add_object(login=login, access_level=access_level).id
mblighe8819cd2008-02-15 16:48:40 +0000273
274
275def modify_user(id, **data):
jadmanski0afbb632008-06-06 21:10:57 +0000276 models.User.smart_get(id).update_object(data)
mblighe8819cd2008-02-15 16:48:40 +0000277
278
279def delete_user(id):
jadmanski0afbb632008-06-06 21:10:57 +0000280 models.User.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000281
282
283def get_users(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000284 return rpc_utils.prepare_for_serialization(
285 models.User.list_objects(filter_data))
mblighe8819cd2008-02-15 16:48:40 +0000286
287
288# acl groups
289
290def add_acl_group(name, description=None):
showard04f2cd82008-07-25 20:53:31 +0000291 group = models.AclGroup.add_object(name=name, description=description)
292 group.users.add(thread_local.get_user())
293 return group.id
mblighe8819cd2008-02-15 16:48:40 +0000294
295
296def modify_acl_group(id, **data):
showard04f2cd82008-07-25 20:53:31 +0000297 group = models.AclGroup.smart_get(id)
298 group.check_for_acl_violation_acl_group()
299 group.update_object(data)
300 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000301
302
303def acl_group_add_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000304 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000305 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000306 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000307 group.users.add(*users)
mblighe8819cd2008-02-15 16:48:40 +0000308
309
310def acl_group_remove_users(id, users):
jadmanski0afbb632008-06-06 21:10:57 +0000311 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000312 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000313 users = models.User.smart_get_bulk(users)
jadmanski0afbb632008-06-06 21:10:57 +0000314 group.users.remove(*users)
showard04f2cd82008-07-25 20:53:31 +0000315 group.add_current_user_if_empty()
mblighe8819cd2008-02-15 16:48:40 +0000316
317
318def acl_group_add_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000319 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000320 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000321 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000322 group.hosts.add(*hosts)
showard08f981b2008-06-24 21:59:03 +0000323 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000324
325
326def acl_group_remove_hosts(id, hosts):
jadmanski0afbb632008-06-06 21:10:57 +0000327 group = models.AclGroup.smart_get(id)
showard04f2cd82008-07-25 20:53:31 +0000328 group.check_for_acl_violation_acl_group()
showardbe3ec042008-11-12 18:16:07 +0000329 hosts = models.Host.smart_get_bulk(hosts)
jadmanski0afbb632008-06-06 21:10:57 +0000330 group.hosts.remove(*hosts)
showard08f981b2008-06-24 21:59:03 +0000331 group.on_host_membership_change()
mblighe8819cd2008-02-15 16:48:40 +0000332
333
334def delete_acl_group(id):
jadmanski0afbb632008-06-06 21:10:57 +0000335 models.AclGroup.smart_get(id).delete()
mblighe8819cd2008-02-15 16:48:40 +0000336
337
338def get_acl_groups(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000339 acl_groups = models.AclGroup.list_objects(filter_data)
340 for acl_group in acl_groups:
341 acl_group_obj = models.AclGroup.objects.get(id=acl_group['id'])
342 acl_group['users'] = [user.login
343 for user in acl_group_obj.users.all()]
344 acl_group['hosts'] = [host.hostname
345 for host in acl_group_obj.hosts.all()]
346 return rpc_utils.prepare_for_serialization(acl_groups)
mblighe8819cd2008-02-15 16:48:40 +0000347
348
349# jobs
350
mbligh120351e2009-01-24 01:40:45 +0000351def generate_control_file(tests=(), kernel=None, label=None, profilers=(),
showard91f85102009-10-12 20:34:52 +0000352 client_control_file='', use_container=False,
showard232b7ae2009-11-10 00:46:48 +0000353 profile_only=None, upload_kernel_config=False):
jadmanski0afbb632008-06-06 21:10:57 +0000354 """
mbligh120351e2009-01-24 01:40:45 +0000355 Generates a client-side control file to load a kernel and run tests.
356
357 @param tests List of tests to run.
mbligha3c58d22009-08-24 22:01:51 +0000358 @param kernel A list of kernel info dictionaries configuring which kernels
359 to boot for this job and other options for them
mbligh120351e2009-01-24 01:40:45 +0000360 @param label Name of label to grab kernel config from.
361 @param profilers List of profilers to activate during the job.
362 @param client_control_file The contents of a client-side control file to
363 run at the end of all tests. If this is supplied, all tests must be
364 client side.
365 TODO: in the future we should support server control files directly
366 to wrap with a kernel. That'll require changing the parameter
367 name and adding a boolean to indicate if it is a client or server
368 control file.
369 @param use_container unused argument today. TODO: Enable containers
370 on the host during a client side test.
showard91f85102009-10-12 20:34:52 +0000371 @param profile_only A boolean that indicates what default profile_only
372 mode to use in the control file. Passing None will generate a
373 control file that does not explcitly set the default mode at all.
showard232b7ae2009-11-10 00:46:48 +0000374 @param upload_kernel_config: if enabled it will generate server control
375 file code that uploads the kernel config file to the client and
376 tells the client of the new (local) path when compiling the kernel;
377 the tests must be server side tests
mbligh120351e2009-01-24 01:40:45 +0000378
379 @returns a dict with the following keys:
380 control_file: str, The control file text.
381 is_server: bool, is the control file a server-side control file?
382 synch_count: How many machines the job uses per autoserv execution.
383 synch_count == 1 means the job is asynchronous.
384 dependencies: A list of the names of labels on which the job depends.
385 """
showardd86debe2009-06-10 17:37:56 +0000386 if not tests and not client_control_file:
showard2bab8f42008-11-12 18:15:22 +0000387 return dict(control_file='', is_server=False, synch_count=1,
showard989f25d2008-10-01 11:38:11 +0000388 dependencies=[])
mblighe8819cd2008-02-15 16:48:40 +0000389
showard989f25d2008-10-01 11:38:11 +0000390 cf_info, test_objects, profiler_objects, label = (
showard2b9a88b2008-06-13 20:55:03 +0000391 rpc_utils.prepare_generate_control_file(tests, kernel, label,
392 profilers))
showard989f25d2008-10-01 11:38:11 +0000393 cf_info['control_file'] = control_file.generate_control(
mbligha3c58d22009-08-24 22:01:51 +0000394 tests=test_objects, kernels=kernel, platform=label,
mbligh120351e2009-01-24 01:40:45 +0000395 profilers=profiler_objects, is_server=cf_info['is_server'],
showard232b7ae2009-11-10 00:46:48 +0000396 client_control_file=client_control_file, profile_only=profile_only,
397 upload_kernel_config=upload_kernel_config)
showard989f25d2008-10-01 11:38:11 +0000398 return cf_info
mblighe8819cd2008-02-15 16:48:40 +0000399
400
showard12f3e322009-05-13 21:27:42 +0000401def create_job(name, priority, control_file, control_type,
402 hosts=(), meta_hosts=(), one_time_hosts=(),
403 atomic_group_name=None, synch_count=None, is_template=False,
404 timeout=None, max_runtime_hrs=None, run_verify=True,
405 email_list='', dependencies=(), reboot_before=None,
showarda9545c02009-12-18 22:44:26 +0000406 reboot_after=None, parse_failed_repair=None, hostless=False):
jadmanski0afbb632008-06-06 21:10:57 +0000407 """\
408 Create and enqueue a job.
mblighe8819cd2008-02-15 16:48:40 +0000409
showarda1e74b32009-05-12 17:32:04 +0000410 @param name name of this job
411 @param priority Low, Medium, High, Urgent
412 @param control_file String contents of the control file.
413 @param control_type Type of control file, Client or Server.
414 @param synch_count How many machines the job uses per autoserv execution.
415 synch_count == 1 means the job is asynchronous. If an atomic group is
416 given this value is treated as a minimum.
417 @param is_template If true then create a template job.
418 @param timeout Hours after this call returns until the job times out.
showard12f3e322009-05-13 21:27:42 +0000419 @param max_runtime_hrs Hours from job starting time until job times out
showarda1e74b32009-05-12 17:32:04 +0000420 @param run_verify Should the host be verified before running the test?
421 @param email_list String containing emails to mail when the job is done
422 @param dependencies List of label names on which this job depends
423 @param reboot_before Never, If dirty, or Always
424 @param reboot_after Never, If all tests passed, or Always
425 @param parse_failed_repair if true, results of failed repairs launched by
426 this job will be parsed as part of the job.
showarda9545c02009-12-18 22:44:26 +0000427 @param hostless if true, create a hostless job
showarda1e74b32009-05-12 17:32:04 +0000428
429 @param hosts List of hosts to run job on.
430 @param meta_hosts List where each entry is a label name, and for each entry
431 one host will be chosen from that label to run the job on.
432 @param one_time_hosts List of hosts not in the database to run the job on.
433 @param atomic_group_name The name of an atomic group to schedule the job on.
434
showardc92da832009-04-07 18:14:34 +0000435
436 @returns The created Job id number.
jadmanski0afbb632008-06-06 21:10:57 +0000437 """
showard87658162009-05-29 18:39:50 +0000438 user = thread_local.get_user()
439 owner = user.login
jadmanski0afbb632008-06-06 21:10:57 +0000440 # input validation
showarda9545c02009-12-18 22:44:26 +0000441 if not (hosts or meta_hosts or one_time_hosts or atomic_group_name
442 or hostless):
mblighec5546d2008-06-16 16:51:28 +0000443 raise model_logic.ValidationError({
showardb8471e32008-07-03 19:51:08 +0000444 'arguments' : "You must pass at least one of 'hosts', "
showardc92da832009-04-07 18:14:34 +0000445 "'meta_hosts', 'one_time_hosts', "
showarda9545c02009-12-18 22:44:26 +0000446 "'atomic_group_name', or 'hostless'"
jadmanski0afbb632008-06-06 21:10:57 +0000447 })
mblighe8819cd2008-02-15 16:48:40 +0000448
showarda9545c02009-12-18 22:44:26 +0000449 if hostless:
450 if hosts or meta_hosts or one_time_hosts or atomic_group_name:
451 raise model_logic.ValidationError({
452 'hostless': 'Hostless jobs cannot include any hosts!'})
453 server_type = models.Job.ControlType.get_string(
454 models.Job.ControlType.SERVER)
455 if control_type != server_type:
456 raise model_logic.ValidationError({
457 'control_type': 'Hostless jobs cannot use client-side '
458 'control files'})
459
showardbc93f0f2009-06-10 00:16:21 +0000460 labels_by_name = dict((label.name, label)
461 for label in models.Label.objects.all())
462 atomic_groups_by_name = dict((ag.name, ag)
463 for ag in models.AtomicGroup.objects.all())
464
showardc8730322009-06-30 01:56:38 +0000465 # Schedule on an atomic group automagically if one of the labels given
466 # is an atomic group label and no explicit atomic_group_name was supplied.
467 if not atomic_group_name:
468 for label_name in meta_hosts or []:
469 label = labels_by_name.get(label_name)
470 if label and label.atomic_group:
471 atomic_group_name = label.atomic_group.name
472 break
473
showardbc93f0f2009-06-10 00:16:21 +0000474 # convert hostnames & meta hosts to host/label objects
475 host_objects = models.Host.smart_get_bulk(hosts)
476 metahost_objects = []
showardc8730322009-06-30 01:56:38 +0000477 for label_name in meta_hosts or []:
478 if label_name in labels_by_name:
479 label = labels_by_name[label_name]
480 metahost_objects.append(label)
481 elif label_name in atomic_groups_by_name:
482 # If given a metahost name that isn't a Label, check to
483 # see if the user was specifying an Atomic Group instead.
484 atomic_group = atomic_groups_by_name[label_name]
showardbc93f0f2009-06-10 00:16:21 +0000485 if atomic_group_name and atomic_group_name != atomic_group.name:
486 raise model_logic.ValidationError({
487 'meta_hosts': (
488 'Label "%s" not found. If assumed to be an '
489 'atomic group it would conflict with the '
490 'supplied atomic group "%s".' % (
showardc8730322009-06-30 01:56:38 +0000491 label_name, atomic_group_name))})
showardbc93f0f2009-06-10 00:16:21 +0000492 atomic_group_name = atomic_group.name
493 else:
494 raise model_logic.ValidationError(
495 {'meta_hosts' : 'Label "%s" not found' % label})
496
showardc92da832009-04-07 18:14:34 +0000497 # Create and sanity check an AtomicGroup object if requested.
498 if atomic_group_name:
499 if one_time_hosts:
500 raise model_logic.ValidationError(
501 {'one_time_hosts':
502 'One time hosts cannot be used with an Atomic Group.'})
503 atomic_group = models.AtomicGroup.smart_get(atomic_group_name)
504 if synch_count and synch_count > atomic_group.max_number_of_machines:
505 raise model_logic.ValidationError(
506 {'atomic_group_name' :
507 'You have requested a synch_count (%d) greater than the '
508 'maximum machines in the requested Atomic Group (%d).' %
509 (synch_count, atomic_group.max_number_of_machines)})
510 else:
511 atomic_group = None
512
showardb8471e32008-07-03 19:51:08 +0000513 for host in one_time_hosts or []:
514 this_host = models.Host.create_one_time_host(host)
515 host_objects.append(this_host)
showardba872902008-06-28 00:51:08 +0000516
showard87658162009-05-29 18:39:50 +0000517 if reboot_before is None:
518 reboot_before = user.get_reboot_before_display()
519 if reboot_after is None:
520 reboot_after = user.get_reboot_after_display()
521
showarda1e74b32009-05-12 17:32:04 +0000522 options = dict(name=name,
523 priority=priority,
524 control_file=control_file,
525 control_type=control_type,
526 is_template=is_template,
527 timeout=timeout,
showard12f3e322009-05-13 21:27:42 +0000528 max_runtime_hrs=max_runtime_hrs,
showarda1e74b32009-05-12 17:32:04 +0000529 synch_count=synch_count,
530 run_verify=run_verify,
531 email_list=email_list,
532 dependencies=dependencies,
533 reboot_before=reboot_before,
534 reboot_after=reboot_after,
535 parse_failed_repair=parse_failed_repair)
showard29f7cd22009-04-29 21:16:24 +0000536 return rpc_utils.create_new_job(owner=owner,
showarda1e74b32009-05-12 17:32:04 +0000537 options=options,
showard29f7cd22009-04-29 21:16:24 +0000538 host_objects=host_objects,
539 metahost_objects=metahost_objects,
showard29f7cd22009-04-29 21:16:24 +0000540 atomic_group=atomic_group)
mblighe8819cd2008-02-15 16:48:40 +0000541
542
showard9dbdcda2008-10-14 17:34:36 +0000543def abort_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000544 """\
showard9dbdcda2008-10-14 17:34:36 +0000545 Abort a set of host queue entries.
jadmanski0afbb632008-06-06 21:10:57 +0000546 """
showard9dbdcda2008-10-14 17:34:36 +0000547 query = models.HostQueueEntry.query_objects(filter_data)
showard0c185192009-01-16 03:07:57 +0000548 query = query.filter(complete=False)
showarddc817512008-11-12 18:16:41 +0000549 models.AclGroup.check_abort_permissions(query)
showard9dbdcda2008-10-14 17:34:36 +0000550 host_queue_entries = list(query.select_related())
showard2bab8f42008-11-12 18:15:22 +0000551 rpc_utils.check_abort_synchronous_jobs(host_queue_entries)
mblighe8819cd2008-02-15 16:48:40 +0000552
showard9dbdcda2008-10-14 17:34:36 +0000553 user = thread_local.get_user()
554 for queue_entry in host_queue_entries:
555 queue_entry.abort(user)
showard9d821ab2008-07-11 16:54:29 +0000556
557
showard1ff7b2e2009-05-15 23:17:18 +0000558def reverify_hosts(**filter_data):
559 """\
560 Schedules a set of hosts for verify.
561 """
562 hosts = models.Host.query_objects(filter_data)
563 models.AclGroup.check_for_acl_violation_hosts(hosts)
showard6d7b2ff2009-06-10 00:16:47 +0000564 models.SpecialTask.schedule_special_task(hosts,
showard2fe3f1d2009-07-06 20:19:11 +0000565 models.SpecialTask.Task.VERIFY)
showard1ff7b2e2009-05-15 23:17:18 +0000566
567
mblighe8819cd2008-02-15 16:48:40 +0000568def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000569 """\
570 Extra filter args for get_jobs:
571 -not_yet_run: Include only jobs that have not yet started running.
572 -running: Include only jobs that have start running but for which not
573 all hosts have completed.
574 -finished: Include only jobs for which all hosts have completed (or
575 aborted).
576 At most one of these three fields should be specified.
577 """
578 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
579 running,
580 finished)
showard0957a842009-05-11 19:25:08 +0000581 job_dicts = []
582 jobs = list(models.Job.query_objects(filter_data))
583 models.Job.objects.populate_relationships(jobs, models.Label,
584 'dependencies')
585 for job in jobs:
586 job_dict = job.get_object_dict()
587 job_dict['dependencies'] = ','.join(label.name
588 for label in job.dependencies)
589 job_dicts.append(job_dict)
590 return rpc_utils.prepare_for_serialization(job_dicts)
mblighe8819cd2008-02-15 16:48:40 +0000591
592
593def get_num_jobs(not_yet_run=False, running=False, finished=False,
jadmanski0afbb632008-06-06 21:10:57 +0000594 **filter_data):
595 """\
596 See get_jobs() for documentation of extra filter parameters.
597 """
598 filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run,
599 running,
600 finished)
601 return models.Job.query_count(filter_data)
mblighe8819cd2008-02-15 16:48:40 +0000602
603
mblighe8819cd2008-02-15 16:48:40 +0000604def get_jobs_summary(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000605 """\
showarda8709c52008-07-03 19:44:54 +0000606 Like get_jobs(), but adds a 'status_counts' field, which is a dictionary
jadmanski0afbb632008-06-06 21:10:57 +0000607 mapping status strings to the number of hosts currently with that
608 status, i.e. {'Queued' : 4, 'Running' : 2}.
609 """
610 jobs = get_jobs(**filter_data)
611 ids = [job['id'] for job in jobs]
612 all_status_counts = models.Job.objects.get_status_counts(ids)
613 for job in jobs:
614 job['status_counts'] = all_status_counts[job['id']]
615 return rpc_utils.prepare_for_serialization(jobs)
mblighe8819cd2008-02-15 16:48:40 +0000616
617
showarda965cef2009-05-15 23:17:41 +0000618def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None):
showarda8709c52008-07-03 19:44:54 +0000619 """\
620 Retrieves all the information needed to clone a job.
621 """
showarda8709c52008-07-03 19:44:54 +0000622 job = models.Job.objects.get(id=id)
showard29f7cd22009-04-29 21:16:24 +0000623 job_info = rpc_utils.get_job_info(job,
showarda965cef2009-05-15 23:17:41 +0000624 preserve_metahosts,
625 queue_entry_filter_data)
showard945072f2008-09-03 20:34:59 +0000626
showardd9992fe2008-07-31 02:15:03 +0000627 host_dicts = []
showard29f7cd22009-04-29 21:16:24 +0000628 for host in job_info['hosts']:
629 host_dict = get_hosts(id=host.id)[0]
630 other_labels = host_dict['labels']
631 if host_dict['platform']:
632 other_labels.remove(host_dict['platform'])
633 host_dict['other_labels'] = ', '.join(other_labels)
showardd9992fe2008-07-31 02:15:03 +0000634 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000635
showard29f7cd22009-04-29 21:16:24 +0000636 for host in job_info['one_time_hosts']:
637 host_dict = dict(hostname=host.hostname,
638 id=host.id,
639 platform='(one-time host)',
640 locked_text='')
641 host_dicts.append(host_dict)
showarda8709c52008-07-03 19:44:54 +0000642
showard4d077562009-05-08 18:24:36 +0000643 # convert keys from Label objects to strings (names of labels)
showard29f7cd22009-04-29 21:16:24 +0000644 meta_host_counts = dict((meta_host.name, count) for meta_host, count
showard4d077562009-05-08 18:24:36 +0000645 in job_info['meta_host_counts'].iteritems())
showard29f7cd22009-04-29 21:16:24 +0000646
647 info = dict(job=job.get_object_dict(),
648 meta_host_counts=meta_host_counts,
649 hosts=host_dicts)
650 info['job']['dependencies'] = job_info['dependencies']
651 if job_info['atomic_group']:
652 info['atomic_group_name'] = (job_info['atomic_group']).name
653 else:
654 info['atomic_group_name'] = None
showarda8709c52008-07-03 19:44:54 +0000655
656 return rpc_utils.prepare_for_serialization(info)
657
658
showard34dc5fa2008-04-24 20:58:40 +0000659# host queue entries
660
661def get_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000662 """\
showardc92da832009-04-07 18:14:34 +0000663 @returns A sequence of nested dictionaries of host and job information.
jadmanski0afbb632008-06-06 21:10:57 +0000664 """
showardc92da832009-04-07 18:14:34 +0000665 return rpc_utils.prepare_rows_as_nested_dicts(
666 models.HostQueueEntry.query_objects(filter_data),
667 ('host', 'atomic_group', 'job'))
showard34dc5fa2008-04-24 20:58:40 +0000668
669
670def get_num_host_queue_entries(**filter_data):
jadmanski0afbb632008-06-06 21:10:57 +0000671 """\
672 Get the number of host queue entries associated with this job.
673 """
674 return models.HostQueueEntry.query_count(filter_data)
showard34dc5fa2008-04-24 20:58:40 +0000675
676
showard1e935f12008-07-11 00:11:36 +0000677def get_hqe_percentage_complete(**filter_data):
678 """
showardc92da832009-04-07 18:14:34 +0000679 Computes the fraction of host queue entries matching the given filter data
showard1e935f12008-07-11 00:11:36 +0000680 that are complete.
681 """
682 query = models.HostQueueEntry.query_objects(filter_data)
683 complete_count = query.filter(complete=True).count()
684 total_count = query.count()
685 if total_count == 0:
686 return 1
687 return float(complete_count) / total_count
688
689
showard1a5a4082009-07-28 20:01:37 +0000690# special tasks
691
692def get_special_tasks(**filter_data):
693 return rpc_utils.prepare_rows_as_nested_dicts(
694 models.SpecialTask.query_objects(filter_data),
695 ('host', 'queue_entry'))
696
697
showardc0ac3a72009-07-08 21:14:45 +0000698# support for host detail view
699
700def get_host_queue_entries_and_special_tasks(hostname, query_start=None,
701 query_limit=None):
702 """
703 @returns an interleaved list of HostQueueEntries and SpecialTasks,
704 in approximate run order. each dict contains keys for type, host,
705 job, status, started_on, execution_path, and ID.
706 """
707 total_limit = None
708 if query_limit is not None:
709 total_limit = query_start + query_limit
710 filter_data = {'host__hostname': hostname,
711 'query_limit': total_limit,
712 'sort_by': ['-id']}
713
714 queue_entries = list(models.HostQueueEntry.query_objects(filter_data))
715 special_tasks = list(models.SpecialTask.query_objects(filter_data))
716
717 interleaved_entries = rpc_utils.interleave_entries(queue_entries,
718 special_tasks)
719 if query_start is not None:
720 interleaved_entries = interleaved_entries[query_start:]
721 if query_limit is not None:
722 interleaved_entries = interleaved_entries[:query_limit]
723 return rpc_utils.prepare_for_serialization(interleaved_entries)
724
725
726def get_num_host_queue_entries_and_special_tasks(hostname):
727 filter_data = {'host__hostname': hostname}
728 return (models.HostQueueEntry.query_count(filter_data)
729 + models.SpecialTask.query_count(filter_data))
730
731
showard29f7cd22009-04-29 21:16:24 +0000732# recurring run
733
734def get_recurring(**filter_data):
735 return rpc_utils.prepare_rows_as_nested_dicts(
736 models.RecurringRun.query_objects(filter_data),
737 ('job', 'owner'))
738
739
740def get_num_recurring(**filter_data):
741 return models.RecurringRun.query_count(filter_data)
742
743
744def delete_recurring_runs(**filter_data):
745 to_delete = models.RecurringRun.query_objects(filter_data)
746 to_delete.delete()
747
748
749def create_recurring_run(job_id, start_date, loop_period, loop_count):
750 owner = thread_local.get_user().login
751 job = models.Job.objects.get(id=job_id)
752 return job.create_recurring_job(start_date=start_date,
753 loop_period=loop_period,
754 loop_count=loop_count,
755 owner=owner)
756
757
mblighe8819cd2008-02-15 16:48:40 +0000758# other
759
showarde0b63622008-08-04 20:58:47 +0000760def echo(data=""):
761 """\
762 Returns a passed in string. For doing a basic test to see if RPC calls
763 can successfully be made.
764 """
765 return data
766
767
showardb7a52fd2009-04-27 20:10:56 +0000768def get_motd():
769 """\
770 Returns the message of the day as a string.
771 """
772 return rpc_utils.get_motd()
773
774
mblighe8819cd2008-02-15 16:48:40 +0000775def get_static_data():
jadmanski0afbb632008-06-06 21:10:57 +0000776 """\
777 Returns a dictionary containing a bunch of data that shouldn't change
778 often and is otherwise inaccessible. This includes:
showardc92da832009-04-07 18:14:34 +0000779
780 priorities: List of job priority choices.
781 default_priority: Default priority value for new jobs.
782 users: Sorted list of all users.
783 labels: Sorted list of all labels.
784 atomic_groups: Sorted list of all atomic groups.
785 tests: Sorted list of all tests.
786 profilers: Sorted list of all profilers.
787 current_user: Logged-in username.
788 host_statuses: Sorted list of possible Host statuses.
789 job_statuses: Sorted list of possible HostQueueEntry statuses.
790 job_timeout_default: The default job timeout length in hours.
showarda1e74b32009-05-12 17:32:04 +0000791 parse_failed_repair_default: Default value for the parse_failed_repair job
792 option.
showardc92da832009-04-07 18:14:34 +0000793 reboot_before_options: A list of valid RebootBefore string enums.
794 reboot_after_options: A list of valid RebootAfter string enums.
795 motd: Server's message of the day.
796 status_dictionary: A mapping from one word job status names to a more
797 informative description.
jadmanski0afbb632008-06-06 21:10:57 +0000798 """
showard21baa452008-10-21 00:08:39 +0000799
800 job_fields = models.Job.get_field_dict()
801
jadmanski0afbb632008-06-06 21:10:57 +0000802 result = {}
803 result['priorities'] = models.Job.Priority.choices()
showard21baa452008-10-21 00:08:39 +0000804 default_priority = job_fields['priority'].default
jadmanski0afbb632008-06-06 21:10:57 +0000805 default_string = models.Job.Priority.get_string(default_priority)
806 result['default_priority'] = default_string
807 result['users'] = get_users(sort_by=['login'])
808 result['labels'] = get_labels(sort_by=['-platform', 'name'])
showardc92da832009-04-07 18:14:34 +0000809 result['atomic_groups'] = get_atomic_groups(sort_by=['name'])
jadmanski0afbb632008-06-06 21:10:57 +0000810 result['tests'] = get_tests(sort_by=['name'])
showard2b9a88b2008-06-13 20:55:03 +0000811 result['profilers'] = get_profilers(sort_by=['name'])
showard0fc38302008-10-23 00:44:07 +0000812 result['current_user'] = rpc_utils.prepare_for_serialization(
813 thread_local.get_user().get_object_dict())
showard2b9a88b2008-06-13 20:55:03 +0000814 result['host_statuses'] = sorted(models.Host.Status.names)
mbligh5a198b92008-12-11 19:33:29 +0000815 result['job_statuses'] = sorted(models.HostQueueEntry.Status.names)
showardb1e51872008-10-07 11:08:18 +0000816 result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT
showard12f3e322009-05-13 21:27:42 +0000817 result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS
showarda1e74b32009-05-12 17:32:04 +0000818 result['parse_failed_repair_default'] = bool(
819 models.Job.DEFAULT_PARSE_FAILED_REPAIR)
showard0fc38302008-10-23 00:44:07 +0000820 result['reboot_before_options'] = models.RebootBefore.names
821 result['reboot_after_options'] = models.RebootAfter.names
showard8fbae652009-01-20 23:23:10 +0000822 result['motd'] = rpc_utils.get_motd()
showard8ac29b42008-07-17 17:01:55 +0000823
showardd3dc1992009-04-22 21:01:40 +0000824 result['status_dictionary'] = {"Aborted": "Aborted",
showard8ac29b42008-07-17 17:01:55 +0000825 "Verifying": "Verifying Host",
826 "Pending": "Waiting on other hosts",
827 "Running": "Running autoserv",
828 "Completed": "Autoserv completed",
829 "Failed": "Failed to complete",
showardd823b362008-07-24 16:35:46 +0000830 "Queued": "Queued",
showard5deb6772008-11-04 21:54:33 +0000831 "Starting": "Next in host's queue",
832 "Stopped": "Other host(s) failed verify",
showardd3dc1992009-04-22 21:01:40 +0000833 "Parsing": "Awaiting parse of final results",
showard29f7cd22009-04-29 21:16:24 +0000834 "Gathering": "Gathering log files",
showard8cc058f2009-09-08 16:26:33 +0000835 "Template": "Template job for recurring run",
836 "Waiting": "Waiting for scheduler action"}
jadmanski0afbb632008-06-06 21:10:57 +0000837 return result
showard29f7cd22009-04-29 21:16:24 +0000838
839
840def get_server_time():
841 return datetime.datetime.now().strftime("%Y-%m-%d %H:%M")