blob: 89a7144cb2471aa289e4f6d2f94d18920580b97c [file] [log] [blame]
showard1a5a4082009-07-28 20:01:37 +00001#!/usr/bin/python
Justin Giorgi67ad67d2016-06-29 14:41:04 -07002#pylint: disable-msg=C0111
showardb6d16622009-05-26 19:35:29 +00003
Jiaxi Luoaac54572014-06-04 13:57:02 -07004import datetime
Allen Licdd00f22017-02-01 18:01:52 -08005import mox
Justin Giorgi67ad67d2016-06-29 14:41:04 -07006import unittest
beepse19d3032013-05-30 09:22:07 -07007
Justin Giorgi67ad67d2016-06-29 14:41:04 -07008import common
Aviv Keshet3dd8beb2013-05-13 17:36:04 -07009from autotest_lib.client.common_lib import control_data
Alex Miller4a193692013-08-21 13:59:01 -070010from autotest_lib.client.common_lib import error
Allen Licdd00f22017-02-01 18:01:52 -080011from autotest_lib.client.common_lib import global_config
Shuqian Zhao54a5b672016-05-11 22:12:17 +000012from autotest_lib.client.common_lib import priorities
Allen Licdd00f22017-02-01 18:01:52 -080013from autotest_lib.client.common_lib.cros import dev_server
Jiaxi Luoaac54572014-06-04 13:57:02 -070014from autotest_lib.client.common_lib.test_utils import mock
Allen Licdd00f22017-02-01 18:01:52 -080015from autotest_lib.frontend import setup_django_environment
16from autotest_lib.frontend.afe import frontend_test_utils
17from autotest_lib.frontend.afe import model_attributes
18from autotest_lib.frontend.afe import model_logic
19from autotest_lib.frontend.afe import models
20from autotest_lib.frontend.afe import rpc_interface
21from autotest_lib.frontend.afe import rpc_utils
Jakob Juelich50e91f72014-10-01 12:43:23 -070022from autotest_lib.server import frontend
Fang Deng0cb2a3b2015-12-10 17:59:00 -080023from autotest_lib.server import utils as server_utils
Allen Licdd00f22017-02-01 18:01:52 -080024from autotest_lib.server.cros import provision
25from autotest_lib.server.cros.dynamic_suite import constants
26from autotest_lib.server.cros.dynamic_suite import control_file_getter
MK Ryu9651ca52015-06-08 17:48:22 -070027from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
showardb6d16622009-05-26 19:35:29 +000028
Aviv Keshet3dd8beb2013-05-13 17:36:04 -070029CLIENT = control_data.CONTROL_TYPE_NAMES.CLIENT
30SERVER = control_data.CONTROL_TYPE_NAMES.SERVER
showardb6d16622009-05-26 19:35:29 +000031
32_hqe_status = models.HostQueueEntry.Status
33
34
35class RpcInterfaceTest(unittest.TestCase,
36 frontend_test_utils.FrontendTestMixin):
37 def setUp(self):
38 self._frontend_common_setup()
Jiaxi Luoaac54572014-06-04 13:57:02 -070039 self.god = mock.mock_god()
showardb6d16622009-05-26 19:35:29 +000040
41
42 def tearDown(self):
Jiaxi Luoaac54572014-06-04 13:57:02 -070043 self.god.unstub_all()
showardb6d16622009-05-26 19:35:29 +000044 self._frontend_common_teardown()
Jakob Juelich50e91f72014-10-01 12:43:23 -070045 global_config.global_config.reset_config_values()
showardb6d16622009-05-26 19:35:29 +000046
47
showarda5288b42009-07-28 20:06:08 +000048 def test_validation(self):
49 # non-number for a numeric field
50 self.assertRaises(model_logic.ValidationError,
51 rpc_interface.add_atomic_group, name='foo',
52 max_number_of_machines='bar')
53 # omit a required field
54 self.assertRaises(model_logic.ValidationError, rpc_interface.add_label,
55 name=None)
56 # violate uniqueness constraint
57 self.assertRaises(model_logic.ValidationError, rpc_interface.add_host,
58 hostname='host1')
59
60
showardcafd16e2009-05-29 18:37:49 +000061 def test_multiple_platforms(self):
62 platform2 = models.Label.objects.create(name='platform2', platform=True)
63 self.assertRaises(model_logic.ValidationError,
Prashanth Balasubramanian5949b4a2014-11-23 12:58:30 -080064 rpc_interface. label_add_hosts, id='platform2',
65 hosts=['host1', 'host2'])
showardcafd16e2009-05-29 18:37:49 +000066 self.assertRaises(model_logic.ValidationError,
MK Ryufbb002c2015-06-08 14:13:16 -070067 rpc_interface.host_add_labels,
68 id='host1', labels=['platform2'])
showardcafd16e2009-05-29 18:37:49 +000069 # make sure the platform didn't get added
70 platforms = rpc_interface.get_labels(
71 host__hostname__in=['host1', 'host2'], platform=True)
72 self.assertEquals(len(platforms), 1)
73 self.assertEquals(platforms[0]['name'], 'myplatform')
74
75
showarda5288b42009-07-28 20:06:08 +000076 def _check_hostnames(self, hosts, expected_hostnames):
77 self.assertEquals(set(host['hostname'] for host in hosts),
78 set(expected_hostnames))
79
80
81 def test_get_hosts(self):
82 hosts = rpc_interface.get_hosts()
83 self._check_hostnames(hosts, [host.hostname for host in self.hosts])
84
85 hosts = rpc_interface.get_hosts(hostname='host1')
86 self._check_hostnames(hosts, ['host1'])
showard7e67b432010-01-20 01:13:04 +000087 host = hosts[0]
88 self.assertEquals(sorted(host['labels']), ['label1', 'myplatform'])
89 self.assertEquals(host['platform'], 'myplatform')
90 self.assertEquals(host['atomic_group'], None)
91 self.assertEquals(host['acls'], ['my_acl'])
92 self.assertEquals(host['attributes'], {})
showarda5288b42009-07-28 20:06:08 +000093
94
95 def test_get_hosts_multiple_labels(self):
96 hosts = rpc_interface.get_hosts(
97 multiple_labels=['myplatform', 'label1'])
98 self._check_hostnames(hosts, ['host1'])
99
100
101 def test_get_hosts_exclude_only_if_needed(self):
102 self.hosts[0].labels.add(self.label3)
103
104 hosts = rpc_interface.get_hosts(hostname__in=['host1', 'host2'],
105 exclude_only_if_needed_labels=True)
106 self._check_hostnames(hosts, ['host2'])
107
108
showard87cc38f2009-08-20 23:37:04 +0000109 def test_get_hosts_exclude_atomic_group_hosts(self):
110 hosts = rpc_interface.get_hosts(
111 exclude_atomic_group_hosts=True,
112 hostname__in=['host4', 'host5', 'host6'])
113 self._check_hostnames(hosts, ['host4'])
114
115
116 def test_get_hosts_exclude_both(self):
117 self.hosts[0].labels.add(self.label3)
118
119 hosts = rpc_interface.get_hosts(
120 hostname__in=['host1', 'host2', 'host5'],
121 exclude_only_if_needed_labels=True,
122 exclude_atomic_group_hosts=True)
123 self._check_hostnames(hosts, ['host2'])
124
125
showardc1a98d12010-01-15 00:22:22 +0000126 def test_job_keyvals(self):
127 keyval_dict = {'mykey': 'myvalue'}
Allen Li352b86a2016-12-14 12:11:27 -0800128 job_id = rpc_interface.create_job(name='test',
129 priority=priorities.Priority.DEFAULT,
showardc1a98d12010-01-15 00:22:22 +0000130 control_file='foo',
Aviv Keshet3dd8beb2013-05-13 17:36:04 -0700131 control_type=CLIENT,
showardc1a98d12010-01-15 00:22:22 +0000132 hosts=['host1'],
133 keyvals=keyval_dict)
134 jobs = rpc_interface.get_jobs(id=job_id)
135 self.assertEquals(len(jobs), 1)
136 self.assertEquals(jobs[0]['keyvals'], keyval_dict)
137
138
Aviv Keshetcd1ff9b2013-03-01 14:55:19 -0800139 def test_test_retry(self):
Allen Li352b86a2016-12-14 12:11:27 -0800140 job_id = rpc_interface.create_job(name='flake',
141 priority=priorities.Priority.DEFAULT,
Aviv Keshetcd1ff9b2013-03-01 14:55:19 -0800142 control_file='foo',
Aviv Keshet3dd8beb2013-05-13 17:36:04 -0700143 control_type=CLIENT,
Aviv Keshetcd1ff9b2013-03-01 14:55:19 -0800144 hosts=['host1'],
145 test_retry=10)
146 jobs = rpc_interface.get_jobs(id=job_id)
147 self.assertEquals(len(jobs), 1)
148 self.assertEquals(jobs[0]['test_retry'], 10)
149
150
showardb6d16622009-05-26 19:35:29 +0000151 def test_get_jobs_summary(self):
showardc0ac3a72009-07-08 21:14:45 +0000152 job = self._create_job(hosts=xrange(1, 4))
showardb6d16622009-05-26 19:35:29 +0000153 entries = list(job.hostqueueentry_set.all())
154 entries[1].status = _hqe_status.FAILED
155 entries[1].save()
156 entries[2].status = _hqe_status.FAILED
157 entries[2].aborted = True
158 entries[2].save()
159
Jiaxi Luoaac54572014-06-04 13:57:02 -0700160 # Mock up tko_rpc_interface.get_status_counts.
161 self.god.stub_function_to_return(rpc_interface.tko_rpc_interface,
162 'get_status_counts',
163 None)
164
showardb6d16622009-05-26 19:35:29 +0000165 job_summaries = rpc_interface.get_jobs_summary(id=job.id)
166 self.assertEquals(len(job_summaries), 1)
167 summary = job_summaries[0]
168 self.assertEquals(summary['status_counts'], {'Queued': 1,
169 'Failed': 2})
170
171
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700172 def _check_job_ids(self, actual_job_dicts, expected_jobs):
173 self.assertEquals(
174 set(job_dict['id'] for job_dict in actual_job_dicts),
175 set(job.id for job in expected_jobs))
176
177
178 def test_get_jobs_status_filters(self):
showard6c65d252009-10-01 18:45:22 +0000179 HqeStatus = models.HostQueueEntry.Status
180 def create_two_host_job():
181 return self._create_job(hosts=[1, 2])
182 def set_hqe_statuses(job, first_status, second_status):
183 entries = job.hostqueueentry_set.all()
184 entries[0].update_object(status=first_status)
185 entries[1].update_object(status=second_status)
186
187 queued = create_two_host_job()
188
189 queued_and_running = create_two_host_job()
190 set_hqe_statuses(queued_and_running, HqeStatus.QUEUED,
191 HqeStatus.RUNNING)
192
193 running_and_complete = create_two_host_job()
194 set_hqe_statuses(running_and_complete, HqeStatus.RUNNING,
195 HqeStatus.COMPLETED)
196
197 complete = create_two_host_job()
198 set_hqe_statuses(complete, HqeStatus.COMPLETED, HqeStatus.COMPLETED)
199
200 started_but_inactive = create_two_host_job()
201 set_hqe_statuses(started_but_inactive, HqeStatus.QUEUED,
202 HqeStatus.COMPLETED)
203
204 parsing = create_two_host_job()
205 set_hqe_statuses(parsing, HqeStatus.PARSING, HqeStatus.PARSING)
206
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700207 self._check_job_ids(rpc_interface.get_jobs(not_yet_run=True), [queued])
208 self._check_job_ids(rpc_interface.get_jobs(running=True),
showard6c65d252009-10-01 18:45:22 +0000209 [queued_and_running, running_and_complete,
210 started_but_inactive, parsing])
Jiaxi Luo15cbf372014-07-01 19:20:20 -0700211 self._check_job_ids(rpc_interface.get_jobs(finished=True), [complete])
212
213
214 def test_get_jobs_type_filters(self):
215 self.assertRaises(AssertionError, rpc_interface.get_jobs,
216 suite=True, sub=True)
217 self.assertRaises(AssertionError, rpc_interface.get_jobs,
218 suite=True, standalone=True)
219 self.assertRaises(AssertionError, rpc_interface.get_jobs,
220 standalone=True, sub=True)
221
222 parent_job = self._create_job(hosts=[1])
223 child_jobs = self._create_job(hosts=[1, 2],
224 parent_job_id=parent_job.id)
225 standalone_job = self._create_job(hosts=[1])
226
227 self._check_job_ids(rpc_interface.get_jobs(suite=True), [parent_job])
228 self._check_job_ids(rpc_interface.get_jobs(sub=True), [child_jobs])
229 self._check_job_ids(rpc_interface.get_jobs(standalone=True),
230 [standalone_job])
showard6c65d252009-10-01 18:45:22 +0000231
232
showarda5288b42009-07-28 20:06:08 +0000233 def _create_job_helper(self, **kwargs):
Allen Li352b86a2016-12-14 12:11:27 -0800234 return rpc_interface.create_job(name='test',
235 priority=priorities.Priority.DEFAULT,
MK Ryue301eb72015-06-25 12:51:02 -0700236 control_file='control file',
237 control_type=SERVER, **kwargs)
showarda5288b42009-07-28 20:06:08 +0000238
239
showard2924b0a2009-06-18 23:16:15 +0000240 def test_one_time_hosts(self):
showarda5288b42009-07-28 20:06:08 +0000241 job = self._create_job_helper(one_time_hosts=['testhost'])
showard2924b0a2009-06-18 23:16:15 +0000242 host = models.Host.objects.get(hostname='testhost')
243 self.assertEquals(host.invalid, True)
244 self.assertEquals(host.labels.count(), 0)
245 self.assertEquals(host.aclgroup_set.count(), 0)
246
247
showard09d80f92009-11-19 01:01:19 +0000248 def test_create_job_duplicate_hosts(self):
249 self.assertRaises(model_logic.ValidationError, self._create_job_helper,
250 hosts=[1, 1])
251
252
Alex Miller4a193692013-08-21 13:59:01 -0700253 def test_create_unrunnable_metahost_job(self):
254 self.assertRaises(error.NoEligibleHostException,
255 self._create_job_helper, meta_hosts=['unused'])
256
257
showarda9545c02009-12-18 22:44:26 +0000258 def test_create_hostless_job(self):
259 job_id = self._create_job_helper(hostless=True)
260 job = models.Job.objects.get(pk=job_id)
261 queue_entries = job.hostqueueentry_set.all()
262 self.assertEquals(len(queue_entries), 1)
263 self.assertEquals(queue_entries[0].host, None)
264 self.assertEquals(queue_entries[0].meta_host, None)
265 self.assertEquals(queue_entries[0].atomic_group, None)
266
267
showard1a5a4082009-07-28 20:01:37 +0000268 def _setup_special_tasks(self):
showardc0ac3a72009-07-08 21:14:45 +0000269 host = self.hosts[0]
270
271 job1 = self._create_job(hosts=[1])
272 job2 = self._create_job(hosts=[1])
273
274 entry1 = job1.hostqueueentry_set.all()[0]
275 entry1.update_object(started_on=datetime.datetime(2009, 1, 2),
showardd1195652009-12-08 22:21:02 +0000276 execution_subdir='host1')
showardc0ac3a72009-07-08 21:14:45 +0000277 entry2 = job2.hostqueueentry_set.all()[0]
278 entry2.update_object(started_on=datetime.datetime(2009, 1, 3),
showardd1195652009-12-08 22:21:02 +0000279 execution_subdir='host1')
showardc0ac3a72009-07-08 21:14:45 +0000280
showard1a5a4082009-07-28 20:01:37 +0000281 self.task1 = models.SpecialTask.objects.create(
showardc0ac3a72009-07-08 21:14:45 +0000282 host=host, task=models.SpecialTask.Task.VERIFY,
283 time_started=datetime.datetime(2009, 1, 1), # ran before job 1
jamesren76fcf192010-04-21 20:39:50 +0000284 is_complete=True, requested_by=models.User.current_user())
showard1a5a4082009-07-28 20:01:37 +0000285 self.task2 = models.SpecialTask.objects.create(
showardc0ac3a72009-07-08 21:14:45 +0000286 host=host, task=models.SpecialTask.Task.VERIFY,
287 queue_entry=entry2, # ran with job 2
jamesren76fcf192010-04-21 20:39:50 +0000288 is_active=True, requested_by=models.User.current_user())
showard1a5a4082009-07-28 20:01:37 +0000289 self.task3 = models.SpecialTask.objects.create(
jamesren76fcf192010-04-21 20:39:50 +0000290 host=host, task=models.SpecialTask.Task.VERIFY,
291 requested_by=models.User.current_user()) # not yet run
showardc0ac3a72009-07-08 21:14:45 +0000292
showard1a5a4082009-07-28 20:01:37 +0000293
294 def test_get_special_tasks(self):
295 self._setup_special_tasks()
296 tasks = rpc_interface.get_special_tasks(host__hostname='host1',
297 queue_entry__isnull=True)
298 self.assertEquals(len(tasks), 2)
299 self.assertEquals(tasks[0]['task'], models.SpecialTask.Task.VERIFY)
300 self.assertEquals(tasks[0]['is_active'], False)
301 self.assertEquals(tasks[0]['is_complete'], True)
302
303
304 def test_get_latest_special_task(self):
305 # a particular usage of get_special_tasks()
306 self._setup_special_tasks()
307 self.task2.time_started = datetime.datetime(2009, 1, 2)
308 self.task2.save()
309
310 tasks = rpc_interface.get_special_tasks(
311 host__hostname='host1', task=models.SpecialTask.Task.VERIFY,
312 time_started__isnull=False, sort_by=['-time_started'],
313 query_limit=1)
314 self.assertEquals(len(tasks), 1)
315 self.assertEquals(tasks[0]['id'], 2)
316
317
318 def _common_entry_check(self, entry_dict):
319 self.assertEquals(entry_dict['host']['hostname'], 'host1')
320 self.assertEquals(entry_dict['job']['id'], 2)
321
322
323 def test_get_host_queue_entries_and_special_tasks(self):
324 self._setup_special_tasks()
325
MK Ryu0c1a37d2015-04-30 12:00:55 -0700326 host = self.hosts[0].id
showardc0ac3a72009-07-08 21:14:45 +0000327 entries_and_tasks = (
Jiaxi Luo79ce6422014-06-13 17:08:09 -0700328 rpc_interface.get_host_queue_entries_and_special_tasks(host))
showardc0ac3a72009-07-08 21:14:45 +0000329
330 paths = [entry['execution_path'] for entry in entries_and_tasks]
331 self.assertEquals(paths, ['hosts/host1/3-verify',
showardfd8b89f2010-01-20 19:06:30 +0000332 '2-autotest_system/host1',
showardc0ac3a72009-07-08 21:14:45 +0000333 'hosts/host1/2-verify',
showardfd8b89f2010-01-20 19:06:30 +0000334 '1-autotest_system/host1',
showardc0ac3a72009-07-08 21:14:45 +0000335 'hosts/host1/1-verify'])
336
337 verify2 = entries_and_tasks[2]
338 self._common_entry_check(verify2)
339 self.assertEquals(verify2['type'], 'Verify')
340 self.assertEquals(verify2['status'], 'Running')
341 self.assertEquals(verify2['execution_path'], 'hosts/host1/2-verify')
342
343 entry2 = entries_and_tasks[1]
344 self._common_entry_check(entry2)
345 self.assertEquals(entry2['type'], 'Job')
346 self.assertEquals(entry2['status'], 'Queued')
347 self.assertEquals(entry2['started_on'], '2009-01-03 00:00:00')
348
349
showard8aa84fc2009-09-16 17:17:55 +0000350 def test_view_invalid_host(self):
351 # RPCs used by View Host page should work for invalid hosts
352 self._create_job_helper(hosts=[1])
Jiaxi Luo79ce6422014-06-13 17:08:09 -0700353 host = self.hosts[0]
354 host.delete()
showard8aa84fc2009-09-16 17:17:55 +0000355
356 self.assertEquals(1, rpc_interface.get_num_hosts(hostname='host1',
357 valid_only=False))
358 data = rpc_interface.get_hosts(hostname='host1', valid_only=False)
359 self.assertEquals(1, len(data))
360
361 self.assertEquals(1, rpc_interface.get_num_host_queue_entries(
362 host__hostname='host1'))
363 data = rpc_interface.get_host_queue_entries(host__hostname='host1')
364 self.assertEquals(1, len(data))
365
366 count = rpc_interface.get_num_host_queue_entries_and_special_tasks(
MK Ryu0c1a37d2015-04-30 12:00:55 -0700367 host=host.id)
showard8aa84fc2009-09-16 17:17:55 +0000368 self.assertEquals(1, count)
369 data = rpc_interface.get_host_queue_entries_and_special_tasks(
MK Ryu0c1a37d2015-04-30 12:00:55 -0700370 host=host.id)
showard8aa84fc2009-09-16 17:17:55 +0000371 self.assertEquals(1, len(data))
372
373
showard9bb960b2009-11-19 01:02:11 +0000374 def test_reverify_hosts(self):
mbligh4e545a52009-12-19 05:30:39 +0000375 hostname_list = rpc_interface.reverify_hosts(id__in=[1, 2])
376 self.assertEquals(hostname_list, ['host1', 'host2'])
showard9bb960b2009-11-19 01:02:11 +0000377 tasks = rpc_interface.get_special_tasks()
378 self.assertEquals(len(tasks), 2)
379 self.assertEquals(set(task['host']['id'] for task in tasks),
380 set([1, 2]))
381
382 task = tasks[0]
383 self.assertEquals(task['task'], models.SpecialTask.Task.VERIFY)
showardfd8b89f2010-01-20 19:06:30 +0000384 self.assertEquals(task['requested_by'], 'autotest_system')
showard9bb960b2009-11-19 01:02:11 +0000385
386
Simran Basi73dae552013-02-25 14:57:46 -0800387 def test_repair_hosts(self):
388 hostname_list = rpc_interface.repair_hosts(id__in=[1, 2])
389 self.assertEquals(hostname_list, ['host1', 'host2'])
390 tasks = rpc_interface.get_special_tasks()
391 self.assertEquals(len(tasks), 2)
392 self.assertEquals(set(task['host']['id'] for task in tasks),
393 set([1, 2]))
394
395 task = tasks[0]
396 self.assertEquals(task['task'], models.SpecialTask.Task.REPAIR)
397 self.assertEquals(task['requested_by'], 'autotest_system')
398
399
Shuqian Zhao54a5b672016-05-11 22:12:17 +0000400 def test_parameterized_job(self):
401 global_config.global_config.override_config_value(
402 'AUTOTEST_WEB', 'parameterized_jobs', 'True')
403
404 string_type = model_attributes.ParameterTypes.STRING
405
406 test = models.Test.objects.create(
407 name='test', test_type=control_data.CONTROL_TYPE.SERVER)
408 test_parameter = test.testparameter_set.create(name='key')
409 profiler = models.Profiler.objects.create(name='profiler')
410
411 kernels = ({'version': 'version', 'cmdline': 'cmdline'},)
412 profilers = ('profiler',)
413 profiler_parameters = {'profiler': {'key': ('value', string_type)}}
414 job_parameters = {'key': ('value', string_type)}
415
416 job_id = rpc_interface.create_parameterized_job(
417 name='job', priority=priorities.Priority.DEFAULT, test='test',
418 parameters=job_parameters, kernel=kernels, label='label1',
419 profilers=profilers, profiler_parameters=profiler_parameters,
420 profile_only=False, hosts=('host1',))
421 parameterized_job = models.Job.smart_get(job_id).parameterized_job
422
423 self.assertEqual(parameterized_job.test, test)
424 self.assertEqual(parameterized_job.label, self.labels[0])
425 self.assertEqual(parameterized_job.kernels.count(), 1)
426 self.assertEqual(parameterized_job.profilers.count(), 1)
427
428 kernel = models.Kernel.objects.get(**kernels[0])
429 self.assertEqual(parameterized_job.kernels.all()[0], kernel)
430 self.assertEqual(parameterized_job.profilers.all()[0], profiler)
431
432 parameterized_profiler = models.ParameterizedJobProfiler.objects.get(
433 parameterized_job=parameterized_job, profiler=profiler)
434 profiler_parameters_obj = (
435 models.ParameterizedJobProfilerParameter.objects.get(
436 parameterized_job_profiler=parameterized_profiler))
437 self.assertEqual(profiler_parameters_obj.parameter_name, 'key')
438 self.assertEqual(profiler_parameters_obj.parameter_value, 'value')
439 self.assertEqual(profiler_parameters_obj.parameter_type, string_type)
440
441 self.assertEqual(
442 parameterized_job.parameterizedjobparameter_set.count(), 1)
443 parameters_obj = (
444 parameterized_job.parameterizedjobparameter_set.all()[0])
445 self.assertEqual(parameters_obj.test_parameter, test_parameter)
446 self.assertEqual(parameters_obj.parameter_value, 'value')
447 self.assertEqual(parameters_obj.parameter_type, string_type)
448
449
Jakob Juelich50e91f72014-10-01 12:43:23 -0700450 def _modify_host_helper(self, on_shard=False, host_on_shard=False):
451 shard_hostname = 'shard1'
452 if on_shard:
453 global_config.global_config.override_config_value(
454 'SHARD', 'shard_hostname', shard_hostname)
455
456 host = models.Host.objects.all()[0]
457 if host_on_shard:
458 shard = models.Shard.objects.create(hostname=shard_hostname)
459 host.shard = shard
460 host.save()
461
462 self.assertFalse(host.locked)
463
464 self.god.stub_class_method(frontend.AFE, 'run')
465
466 if host_on_shard and not on_shard:
467 mock_afe = self.god.create_mock_class_obj(
MK Ryu9651ca52015-06-08 17:48:22 -0700468 frontend_wrappers.RetryingAFE, 'MockAFE')
469 self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700470
MK Ryu9651ca52015-06-08 17:48:22 -0700471 mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
MK Ryu0a9c82e2015-09-17 17:54:01 -0700472 server=shard_hostname, user=None)
MK Ryu33889612015-09-04 14:32:35 -0700473 mock_afe2.run.expect_call('modify_host_local', id=host.id,
MK Ryud53e1492015-12-15 12:09:03 -0800474 locked=True, lock_reason='_modify_host_helper lock',
475 lock_time=datetime.datetime(2015, 12, 15))
MK Ryu33889612015-09-04 14:32:35 -0700476 elif on_shard:
477 mock_afe = self.god.create_mock_class_obj(
478 frontend_wrappers.RetryingAFE, 'MockAFE')
479 self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
480
481 mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
Fang Deng0cb2a3b2015-12-10 17:59:00 -0800482 server=server_utils.get_global_afe_hostname(), user=None)
MK Ryu33889612015-09-04 14:32:35 -0700483 mock_afe2.run.expect_call('modify_host', id=host.id,
MK Ryud53e1492015-12-15 12:09:03 -0800484 locked=True, lock_reason='_modify_host_helper lock',
485 lock_time=datetime.datetime(2015, 12, 15))
Jakob Juelich50e91f72014-10-01 12:43:23 -0700486
Matthew Sartori68186332015-04-27 17:19:53 -0700487 rpc_interface.modify_host(id=host.id, locked=True,
MK Ryud53e1492015-12-15 12:09:03 -0800488 lock_reason='_modify_host_helper lock',
489 lock_time=datetime.datetime(2015, 12, 15))
Jakob Juelich50e91f72014-10-01 12:43:23 -0700490
491 host = models.Host.objects.get(pk=host.id)
MK Ryu33889612015-09-04 14:32:35 -0700492 if on_shard:
493 # modify_host on shard does nothing but routing the RPC to master.
494 self.assertFalse(host.locked)
495 else:
496 self.assertTrue(host.locked)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700497 self.god.check_playback()
498
499
500 def test_modify_host_on_master_host_on_master(self):
MK Ryu33889612015-09-04 14:32:35 -0700501 """Call modify_host to master for host in master."""
Jakob Juelich50e91f72014-10-01 12:43:23 -0700502 self._modify_host_helper()
503
504
505 def test_modify_host_on_master_host_on_shard(self):
MK Ryu33889612015-09-04 14:32:35 -0700506 """Call modify_host to master for host in shard."""
Jakob Juelich50e91f72014-10-01 12:43:23 -0700507 self._modify_host_helper(host_on_shard=True)
508
509
510 def test_modify_host_on_shard(self):
MK Ryu33889612015-09-04 14:32:35 -0700511 """Call modify_host to shard for host in shard."""
Jakob Juelich50e91f72014-10-01 12:43:23 -0700512 self._modify_host_helper(on_shard=True, host_on_shard=True)
513
514
515 def test_modify_hosts_on_master_host_on_shard(self):
516 """Ensure calls to modify_hosts are correctly forwarded to shards."""
517 host1 = models.Host.objects.all()[0]
518 host2 = models.Host.objects.all()[1]
519
520 shard1 = models.Shard.objects.create(hostname='shard1')
521 host1.shard = shard1
522 host1.save()
523
524 shard2 = models.Shard.objects.create(hostname='shard2')
525 host2.shard = shard2
526 host2.save()
527
528 self.assertFalse(host1.locked)
529 self.assertFalse(host2.locked)
530
MK Ryu9651ca52015-06-08 17:48:22 -0700531 mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
MK Ryu33889612015-09-04 14:32:35 -0700532 'MockAFE')
MK Ryu9651ca52015-06-08 17:48:22 -0700533 self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700534
535 # The statuses of one host might differ on master and shard.
536 # Filters are always applied on the master. So the host on the shard
537 # will be affected no matter what his status is.
538 filters_to_use = {'status': 'Ready'}
539
MK Ryu0a9c82e2015-09-17 17:54:01 -0700540 mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
541 server='shard2', user=None)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700542 mock_afe2.run.expect_call(
MK Ryu33889612015-09-04 14:32:35 -0700543 'modify_hosts_local',
Jakob Juelich50e91f72014-10-01 12:43:23 -0700544 host_filter_data={'id__in': [shard1.id, shard2.id]},
Matthew Sartori68186332015-04-27 17:19:53 -0700545 update_data={'locked': True,
MK Ryud53e1492015-12-15 12:09:03 -0800546 'lock_reason': 'Testing forward to shard',
547 'lock_time' : datetime.datetime(2015, 12, 15) })
Jakob Juelich50e91f72014-10-01 12:43:23 -0700548
MK Ryu0a9c82e2015-09-17 17:54:01 -0700549 mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
550 server='shard1', user=None)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700551 mock_afe1.run.expect_call(
MK Ryu33889612015-09-04 14:32:35 -0700552 'modify_hosts_local',
Jakob Juelich50e91f72014-10-01 12:43:23 -0700553 host_filter_data={'id__in': [shard1.id, shard2.id]},
Matthew Sartori68186332015-04-27 17:19:53 -0700554 update_data={'locked': True,
MK Ryud53e1492015-12-15 12:09:03 -0800555 'lock_reason': 'Testing forward to shard',
556 'lock_time' : datetime.datetime(2015, 12, 15)})
Jakob Juelich50e91f72014-10-01 12:43:23 -0700557
MK Ryud53e1492015-12-15 12:09:03 -0800558 rpc_interface.modify_hosts(
559 host_filter_data={'status': 'Ready'},
560 update_data={'locked': True,
561 'lock_reason': 'Testing forward to shard',
562 'lock_time' : datetime.datetime(2015, 12, 15) })
Jakob Juelich50e91f72014-10-01 12:43:23 -0700563
564 host1 = models.Host.objects.get(pk=host1.id)
565 self.assertTrue(host1.locked)
566 host2 = models.Host.objects.get(pk=host2.id)
567 self.assertTrue(host2.locked)
568 self.god.check_playback()
569
570
571 def test_delete_host(self):
572 """Ensure an RPC is made on delete a host, if it is on a shard."""
573 host1 = models.Host.objects.all()[0]
574 shard1 = models.Shard.objects.create(hostname='shard1')
575 host1.shard = shard1
576 host1.save()
577 host1_id = host1.id
578
MK Ryu9651ca52015-06-08 17:48:22 -0700579 mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
Jakob Juelich50e91f72014-10-01 12:43:23 -0700580 'MockAFE')
MK Ryu9651ca52015-06-08 17:48:22 -0700581 self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700582
MK Ryu0a9c82e2015-09-17 17:54:01 -0700583 mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
584 server='shard1', user=None)
Jakob Juelich50e91f72014-10-01 12:43:23 -0700585 mock_afe1.run.expect_call('delete_host', id=host1.id)
586
587 rpc_interface.delete_host(id=host1.id)
588
589 self.assertRaises(models.Host.DoesNotExist,
590 models.Host.smart_get, host1_id)
591
592 self.god.check_playback()
593
594
MK Ryu8e2c2d02016-01-06 15:24:38 -0800595 def test_modify_label(self):
596 label1 = models.Label.objects.all()[0]
597 self.assertEqual(label1.invalid, 0)
598
599 host2 = models.Host.objects.all()[1]
600 shard1 = models.Shard.objects.create(hostname='shard1')
601 host2.shard = shard1
602 host2.labels.add(label1)
603 host2.save()
604
605 mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
606 'MockAFE')
607 self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
608
609 mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
610 server='shard1', user=None)
611 mock_afe1.run.expect_call('modify_label', id=label1.id, invalid=1)
612
613 rpc_interface.modify_label(label1.id, invalid=1)
614
615 self.assertEqual(models.Label.objects.all()[0].invalid, 1)
616 self.god.check_playback()
617
618
619 def test_delete_label(self):
620 label1 = models.Label.objects.all()[0]
621
622 host2 = models.Host.objects.all()[1]
623 shard1 = models.Shard.objects.create(hostname='shard1')
624 host2.shard = shard1
625 host2.labels.add(label1)
626 host2.save()
627
628 mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
629 'MockAFE')
630 self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
631
632 mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
633 server='shard1', user=None)
634 mock_afe1.run.expect_call('delete_label', id=label1.id)
635
636 rpc_interface.delete_label(id=label1.id)
637
638 self.assertRaises(models.Label.DoesNotExist,
639 models.Label.smart_get, label1.id)
640 self.god.check_playback()
641
642
Michael Tang6dc174e2016-05-31 23:13:42 -0700643 def test_get_image_for_job_parameterized(self):
644 test = models.Test.objects.create(
645 name='name', author='author', test_class='class',
646 test_category='category',
647 test_type=control_data.CONTROL_TYPE.SERVER, path='path')
648 parameterized_job = models.ParameterizedJob.objects.create(test=test)
649 job = self._create_job(hosts=[1])
650 job.parameterized_job = parameterized_job
651 self.god.stub_function_to_return(rpc_interface,
652 'get_parameterized_autoupdate_image_url', 'cool-image')
653 image = rpc_interface._get_image_for_job(job, True)
654 self.assertEquals('cool-image', image)
655 self.god.check_playback()
656
657
658 def test_get_image_for_job_with_keyval_build(self):
659 keyval_dict = {'build': 'cool-image'}
Allen Li352b86a2016-12-14 12:11:27 -0800660 job_id = rpc_interface.create_job(name='test',
661 priority=priorities.Priority.DEFAULT,
Michael Tang6dc174e2016-05-31 23:13:42 -0700662 control_file='foo',
663 control_type=CLIENT,
664 hosts=['host1'],
665 keyvals=keyval_dict)
666 job = models.Job.objects.get(id=job_id)
667 self.assertIsNotNone(job)
668 image = rpc_interface._get_image_for_job(job, True)
669 self.assertEquals('cool-image', image)
670
671
672 def test_get_image_for_job_with_keyval_builds(self):
673 keyval_dict = {'builds': {'cros-version': 'cool-image'}}
Allen Li352b86a2016-12-14 12:11:27 -0800674 job_id = rpc_interface.create_job(name='test',
675 priority=priorities.Priority.DEFAULT,
Michael Tang6dc174e2016-05-31 23:13:42 -0700676 control_file='foo',
677 control_type=CLIENT,
678 hosts=['host1'],
679 keyvals=keyval_dict)
680 job = models.Job.objects.get(id=job_id)
681 self.assertIsNotNone(job)
682 image = rpc_interface._get_image_for_job(job, True)
683 self.assertEquals('cool-image', image)
684
685
686 def test_get_image_for_job_with_control_build(self):
687 CONTROL_FILE = """build='cool-image'
688 """
Allen Li352b86a2016-12-14 12:11:27 -0800689 job_id = rpc_interface.create_job(name='test',
690 priority=priorities.Priority.DEFAULT,
Michael Tang6dc174e2016-05-31 23:13:42 -0700691 control_file='foo',
692 control_type=CLIENT,
693 hosts=['host1'])
694 job = models.Job.objects.get(id=job_id)
695 self.assertIsNotNone(job)
696 job.control_file = CONTROL_FILE
697 image = rpc_interface._get_image_for_job(job, True)
698 self.assertEquals('cool-image', image)
699
700
701 def test_get_image_for_job_with_control_builds(self):
702 CONTROL_FILE = """builds={'cros-version': 'cool-image'}
703 """
Allen Li352b86a2016-12-14 12:11:27 -0800704 job_id = rpc_interface.create_job(name='test',
705 priority=priorities.Priority.DEFAULT,
Michael Tang6dc174e2016-05-31 23:13:42 -0700706 control_file='foo',
707 control_type=CLIENT,
708 hosts=['host1'])
709 job = models.Job.objects.get(id=job_id)
710 self.assertIsNotNone(job)
711 job.control_file = CONTROL_FILE
712 image = rpc_interface._get_image_for_job(job, True)
713 self.assertEquals('cool-image', image)
714
715
Allen Licdd00f22017-02-01 18:01:52 -0800716class ExtraRpcInterfaceTest(mox.MoxTestBase,
717 frontend_test_utils.FrontendTestMixin):
718 """Unit tests for functions originally in site_rpc_interface.py.
719
720 @var _NAME: fake suite name.
721 @var _BOARD: fake board to reimage.
722 @var _BUILD: fake build with which to reimage.
723 @var _PRIORITY: fake priority with which to reimage.
724 """
725 _NAME = 'name'
726 _BOARD = 'link'
727 _BUILD = 'link-release/R36-5812.0.0'
728 _BUILDS = {provision.CROS_VERSION_PREFIX: _BUILD}
729 _PRIORITY = priorities.Priority.DEFAULT
730 _TIMEOUT = 24
731
732
733 def setUp(self):
734 super(ExtraRpcInterfaceTest, self).setUp()
735 self._SUITE_NAME = rpc_interface.canonicalize_suite_name(
736 self._NAME)
737 self.dev_server = self.mox.CreateMock(dev_server.ImageServer)
738 self._frontend_common_setup(fill_data=False)
739
740
741 def tearDown(self):
742 self._frontend_common_teardown()
743
744
745 def _setupDevserver(self):
746 self.mox.StubOutClassWithMocks(dev_server, 'ImageServer')
747 dev_server.resolve(self._BUILD).AndReturn(self.dev_server)
748
749
750 def _mockDevServerGetter(self, get_control_file=True):
751 self._setupDevserver()
752 if get_control_file:
753 self.getter = self.mox.CreateMock(
754 control_file_getter.DevServerGetter)
755 self.mox.StubOutWithMock(control_file_getter.DevServerGetter,
756 'create')
757 control_file_getter.DevServerGetter.create(
758 mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(self.getter)
759
760
761 def _mockRpcUtils(self, to_return, control_file_substring=''):
762 """Fake out the autotest rpc_utils module with a mockable class.
763
764 @param to_return: the value that rpc_utils.create_job_common() should
765 be mocked out to return.
766 @param control_file_substring: A substring that is expected to appear
767 in the control file output string that
768 is passed to create_job_common.
769 Default: ''
770 """
771 download_started_time = constants.DOWNLOAD_STARTED_TIME
772 payload_finished_time = constants.PAYLOAD_FINISHED_TIME
773 self.mox.StubOutWithMock(rpc_utils, 'create_job_common')
774 rpc_utils.create_job_common(mox.And(mox.StrContains(self._NAME),
775 mox.StrContains(self._BUILD)),
776 priority=self._PRIORITY,
777 timeout_mins=self._TIMEOUT*60,
778 max_runtime_mins=self._TIMEOUT*60,
779 control_type='Server',
780 control_file=mox.And(mox.StrContains(self._BOARD),
781 mox.StrContains(self._BUILD),
782 mox.StrContains(
783 control_file_substring)),
784 hostless=True,
785 keyvals=mox.And(mox.In(download_started_time),
786 mox.In(payload_finished_time))
787 ).AndReturn(to_return)
788
789
790 def testStageBuildFail(self):
791 """Ensure that a failure to stage the desired build fails the RPC."""
792 self._setupDevserver()
793
794 self.dev_server.hostname = 'mox_url'
795 self.dev_server.stage_artifacts(
796 image=self._BUILD, artifacts=['test_suites']).AndRaise(
797 dev_server.DevServerException())
798 self.mox.ReplayAll()
799 self.assertRaises(error.StageControlFileFailure,
800 rpc_interface.create_suite_job,
801 name=self._NAME,
802 board=self._BOARD,
803 builds=self._BUILDS,
804 pool=None)
805
806
807 def testGetControlFileFail(self):
808 """Ensure that a failure to get needed control file fails the RPC."""
809 self._mockDevServerGetter()
810
811 self.dev_server.hostname = 'mox_url'
812 self.dev_server.stage_artifacts(
813 image=self._BUILD, artifacts=['test_suites']).AndReturn(True)
814
815 self.getter.get_control_file_contents_by_name(
816 self._SUITE_NAME).AndReturn(None)
817 self.mox.ReplayAll()
818 self.assertRaises(error.ControlFileEmpty,
819 rpc_interface.create_suite_job,
820 name=self._NAME,
821 board=self._BOARD,
822 builds=self._BUILDS,
823 pool=None)
824
825
826 def testGetControlFileListFail(self):
827 """Ensure that a failure to get needed control file fails the RPC."""
828 self._mockDevServerGetter()
829
830 self.dev_server.hostname = 'mox_url'
831 self.dev_server.stage_artifacts(
832 image=self._BUILD, artifacts=['test_suites']).AndReturn(True)
833
834 self.getter.get_control_file_contents_by_name(
835 self._SUITE_NAME).AndRaise(error.NoControlFileList())
836 self.mox.ReplayAll()
837 self.assertRaises(error.NoControlFileList,
838 rpc_interface.create_suite_job,
839 name=self._NAME,
840 board=self._BOARD,
841 builds=self._BUILDS,
842 pool=None)
843
844
845 def testBadNumArgument(self):
846 """Ensure we handle bad values for the |num| argument."""
847 self.assertRaises(error.SuiteArgumentException,
848 rpc_interface.create_suite_job,
849 name=self._NAME,
850 board=self._BOARD,
851 builds=self._BUILDS,
852 pool=None,
853 num='goo')
854 self.assertRaises(error.SuiteArgumentException,
855 rpc_interface.create_suite_job,
856 name=self._NAME,
857 board=self._BOARD,
858 builds=self._BUILDS,
859 pool=None,
860 num=[])
861 self.assertRaises(error.SuiteArgumentException,
862 rpc_interface.create_suite_job,
863 name=self._NAME,
864 board=self._BOARD,
865 builds=self._BUILDS,
866 pool=None,
867 num='5')
868
869
870
871 def testCreateSuiteJobFail(self):
872 """Ensure that failure to schedule the suite job fails the RPC."""
873 self._mockDevServerGetter()
874
875 self.dev_server.hostname = 'mox_url'
876 self.dev_server.stage_artifacts(
877 image=self._BUILD, artifacts=['test_suites']).AndReturn(True)
878
879 self.getter.get_control_file_contents_by_name(
880 self._SUITE_NAME).AndReturn('f')
881
882 self.dev_server.url().AndReturn('mox_url')
883 self._mockRpcUtils(-1)
884 self.mox.ReplayAll()
885 self.assertEquals(
886 rpc_interface.create_suite_job(name=self._NAME,
887 board=self._BOARD,
888 builds=self._BUILDS, pool=None),
889 -1)
890
891
892 def testCreateSuiteJobSuccess(self):
893 """Ensures that success results in a successful RPC."""
894 self._mockDevServerGetter()
895
896 self.dev_server.hostname = 'mox_url'
897 self.dev_server.stage_artifacts(
898 image=self._BUILD, artifacts=['test_suites']).AndReturn(True)
899
900 self.getter.get_control_file_contents_by_name(
901 self._SUITE_NAME).AndReturn('f')
902
903 self.dev_server.url().AndReturn('mox_url')
904 job_id = 5
905 self._mockRpcUtils(job_id)
906 self.mox.ReplayAll()
907 self.assertEquals(
908 rpc_interface.create_suite_job(name=self._NAME,
909 board=self._BOARD,
910 builds=self._BUILDS,
911 pool=None),
912 job_id)
913
914
915 def testCreateSuiteJobNoHostCheckSuccess(self):
916 """Ensures that success results in a successful RPC."""
917 self._mockDevServerGetter()
918
919 self.dev_server.hostname = 'mox_url'
920 self.dev_server.stage_artifacts(
921 image=self._BUILD, artifacts=['test_suites']).AndReturn(True)
922
923 self.getter.get_control_file_contents_by_name(
924 self._SUITE_NAME).AndReturn('f')
925
926 self.dev_server.url().AndReturn('mox_url')
927 job_id = 5
928 self._mockRpcUtils(job_id)
929 self.mox.ReplayAll()
930 self.assertEquals(
931 rpc_interface.create_suite_job(name=self._NAME,
932 board=self._BOARD,
933 builds=self._BUILDS,
934 pool=None, check_hosts=False),
935 job_id)
936
937 def testCreateSuiteIntegerNum(self):
938 """Ensures that success results in a successful RPC."""
939 self._mockDevServerGetter()
940
941 self.dev_server.hostname = 'mox_url'
942 self.dev_server.stage_artifacts(
943 image=self._BUILD, artifacts=['test_suites']).AndReturn(True)
944
945 self.getter.get_control_file_contents_by_name(
946 self._SUITE_NAME).AndReturn('f')
947
948 self.dev_server.url().AndReturn('mox_url')
949 job_id = 5
950 self._mockRpcUtils(job_id, control_file_substring='num=17')
951 self.mox.ReplayAll()
952 self.assertEquals(
953 rpc_interface.create_suite_job(name=self._NAME,
954 board=self._BOARD,
955 builds=self._BUILDS,
956 pool=None,
957 check_hosts=False,
958 num=17),
959 job_id)
960
961
962 def testCreateSuiteJobControlFileSupplied(self):
963 """Ensure we can supply the control file to create_suite_job."""
964 self._mockDevServerGetter(get_control_file=False)
965
966 self.dev_server.hostname = 'mox_url'
967 self.dev_server.stage_artifacts(
968 image=self._BUILD, artifacts=['test_suites']).AndReturn(True)
969 self.dev_server.url().AndReturn('mox_url')
970 job_id = 5
971 self._mockRpcUtils(job_id)
972 self.mox.ReplayAll()
973 self.assertEquals(
974 rpc_interface.create_suite_job(name='%s/%s' % (self._NAME,
975 self._BUILD),
976 board=None,
977 builds=self._BUILDS,
978 pool=None,
979 control_file='CONTROL FILE'),
980 job_id)
981
982
983 def _get_records_for_sending_to_master(self):
984 return [{'control_file': 'foo',
985 'control_type': 1,
986 'created_on': datetime.datetime(2014, 8, 21),
987 'drone_set': None,
988 'email_list': '',
989 'max_runtime_hrs': 72,
990 'max_runtime_mins': 1440,
991 'name': 'dummy',
992 'owner': 'autotest_system',
993 'parse_failed_repair': True,
994 'priority': 40,
995 'reboot_after': 0,
996 'reboot_before': 1,
997 'run_reset': True,
998 'run_verify': False,
999 'synch_count': 0,
1000 'test_retry': 10,
1001 'timeout': 24,
1002 'timeout_mins': 1440,
1003 'id': 1
1004 }], [{
1005 'aborted': False,
1006 'active': False,
1007 'complete': False,
1008 'deleted': False,
1009 'execution_subdir': '',
1010 'finished_on': None,
1011 'started_on': None,
1012 'status': 'Queued',
1013 'id': 1
1014 }]
1015
1016
1017 def _do_heartbeat_and_assert_response(self, shard_hostname='shard1',
1018 upload_jobs=(), upload_hqes=(),
1019 known_jobs=(), known_hosts=(),
1020 **kwargs):
1021 known_job_ids = [job.id for job in known_jobs]
1022 known_host_ids = [host.id for host in known_hosts]
1023 known_host_statuses = [host.status for host in known_hosts]
1024
1025 retval = rpc_interface.shard_heartbeat(
1026 shard_hostname=shard_hostname,
1027 jobs=upload_jobs, hqes=upload_hqes,
1028 known_job_ids=known_job_ids, known_host_ids=known_host_ids,
1029 known_host_statuses=known_host_statuses)
1030
1031 self._assert_shard_heartbeat_response(shard_hostname, retval,
1032 **kwargs)
1033
1034 return shard_hostname
1035
1036
1037 def _assert_shard_heartbeat_response(self, shard_hostname, retval, jobs=[],
1038 hosts=[], hqes=[]):
1039
1040 retval_hosts, retval_jobs = retval['hosts'], retval['jobs']
1041
1042 expected_jobs = [
1043 (job.id, job.name, shard_hostname) for job in jobs]
1044 returned_jobs = [(job['id'], job['name'], job['shard']['hostname'])
1045 for job in retval_jobs]
1046 self.assertEqual(returned_jobs, expected_jobs)
1047
1048 expected_hosts = [(host.id, host.hostname) for host in hosts]
1049 returned_hosts = [(host['id'], host['hostname'])
1050 for host in retval_hosts]
1051 self.assertEqual(returned_hosts, expected_hosts)
1052
1053 retval_hqes = []
1054 for job in retval_jobs:
1055 retval_hqes += job['hostqueueentry_set']
1056
1057 expected_hqes = [(hqe.id) for hqe in hqes]
1058 returned_hqes = [(hqe['id']) for hqe in retval_hqes]
1059 self.assertEqual(returned_hqes, expected_hqes)
1060
1061
1062 def _send_records_to_master_helper(
1063 self, jobs, hqes, shard_hostname='host1',
1064 exception_to_throw=error.UnallowedRecordsSentToMaster, aborted=False):
1065 job_id = rpc_interface.create_job(
1066 name='dummy',
1067 priority=self._PRIORITY,
1068 control_file='foo',
1069 control_type=SERVER,
1070 test_retry=10, hostless=True)
1071 job = models.Job.objects.get(pk=job_id)
1072 shard = models.Shard.objects.create(hostname='host1')
1073 job.shard = shard
1074 job.save()
1075
1076 if aborted:
1077 job.hostqueueentry_set.update(aborted=True)
1078 job.shard = None
1079 job.save()
1080
1081 hqe = job.hostqueueentry_set.all()[0]
1082 if not exception_to_throw:
1083 self._do_heartbeat_and_assert_response(
1084 shard_hostname=shard_hostname,
1085 upload_jobs=jobs, upload_hqes=hqes)
1086 else:
1087 self.assertRaises(
1088 exception_to_throw,
1089 self._do_heartbeat_and_assert_response,
1090 shard_hostname=shard_hostname,
1091 upload_jobs=jobs, upload_hqes=hqes)
1092
1093
1094 def testSendingRecordsToMaster(self):
1095 """Send records to the master and ensure they are persisted."""
1096 jobs, hqes = self._get_records_for_sending_to_master()
1097 hqes[0]['status'] = 'Completed'
1098 self._send_records_to_master_helper(
1099 jobs=jobs, hqes=hqes, exception_to_throw=None)
1100
1101 # Check the entry was actually written to db
1102 self.assertEqual(models.HostQueueEntry.objects.all()[0].status,
1103 'Completed')
1104
1105
1106 def testSendingRecordsToMasterAbortedOnMaster(self):
1107 """Send records to the master and ensure they are persisted."""
1108 jobs, hqes = self._get_records_for_sending_to_master()
1109 hqes[0]['status'] = 'Completed'
1110 self._send_records_to_master_helper(
1111 jobs=jobs, hqes=hqes, exception_to_throw=None, aborted=True)
1112
1113 # Check the entry was actually written to db
1114 self.assertEqual(models.HostQueueEntry.objects.all()[0].status,
1115 'Completed')
1116
1117
1118 def testSendingRecordsToMasterJobAssignedToDifferentShard(self):
1119 """Ensure records that belong to a different shard are rejected."""
1120 jobs, hqes = self._get_records_for_sending_to_master()
1121 models.Shard.objects.create(hostname='other_shard')
1122 self._send_records_to_master_helper(
1123 jobs=jobs, hqes=hqes, shard_hostname='other_shard')
1124
1125
1126 def testSendingRecordsToMasterJobHqeWithoutJob(self):
1127 """Ensure update for hqe without update for it's job gets rejected."""
1128 _, hqes = self._get_records_for_sending_to_master()
1129 self._send_records_to_master_helper(
1130 jobs=[], hqes=hqes)
1131
1132
1133 def testSendingRecordsToMasterNotExistingJob(self):
1134 """Ensure update for non existing job gets rejected."""
1135 jobs, hqes = self._get_records_for_sending_to_master()
1136 jobs[0]['id'] = 3
1137
1138 self._send_records_to_master_helper(
1139 jobs=jobs, hqes=hqes)
1140
1141
1142 def _createShardAndHostWithLabel(self, shard_hostname='shard1',
1143 host_hostname='host1',
1144 label_name='board:lumpy'):
1145 label = models.Label.objects.create(name=label_name)
1146
1147 shard = models.Shard.objects.create(hostname=shard_hostname)
1148 shard.labels.add(label)
1149
1150 host = models.Host.objects.create(hostname=host_hostname, leased=False)
1151 host.labels.add(label)
1152
1153 return shard, host, label
1154
1155
1156 def _createJobForLabel(self, label):
1157 job_id = rpc_interface.create_job(name='dummy', priority=self._PRIORITY,
1158 control_file='foo',
1159 control_type=CLIENT,
1160 meta_hosts=[label.name],
1161 dependencies=(label.name,))
1162 return models.Job.objects.get(id=job_id)
1163
1164
1165 def testShardHeartbeatFetchHostlessJob(self):
1166 """Create a hostless job and ensure it's not assigned to a shard."""
1167 shard1, host1, lumpy_label = self._createShardAndHostWithLabel(
1168 'shard1', 'host1', 'board:lumpy')
1169
1170 label2 = models.Label.objects.create(name='bluetooth', platform=False)
1171
1172 job1 = self._create_job(hostless=True)
1173
1174 # Hostless jobs should be executed by the global scheduler.
1175 self._do_heartbeat_and_assert_response(hosts=[host1])
1176
1177
1178 def testShardRetrieveJobs(self):
1179 """Create jobs and retrieve them."""
1180 # should never be returned by heartbeat
1181 leased_host = models.Host.objects.create(hostname='leased_host',
1182 leased=True)
1183
1184 shard1, host1, lumpy_label = self._createShardAndHostWithLabel()
1185 shard2, host2, grumpy_label = self._createShardAndHostWithLabel(
1186 'shard2', 'host2', 'board:grumpy')
1187
1188 leased_host.labels.add(lumpy_label)
1189
1190 job1 = self._createJobForLabel(lumpy_label)
1191
1192 job2 = self._createJobForLabel(grumpy_label)
1193
1194 job_completed = self._createJobForLabel(lumpy_label)
1195 # Job is already being run, so don't sync it
1196 job_completed.hostqueueentry_set.update(complete=True)
1197 job_completed.hostqueueentry_set.create(complete=False)
1198
1199 job_active = self._createJobForLabel(lumpy_label)
1200 # Job is already started, so don't sync it
1201 job_active.hostqueueentry_set.update(active=True)
1202 job_active.hostqueueentry_set.create(complete=False, active=False)
1203
1204 self._do_heartbeat_and_assert_response(
1205 jobs=[job1], hosts=[host1], hqes=job1.hostqueueentry_set.all())
1206
1207 self._do_heartbeat_and_assert_response(
1208 shard_hostname=shard2.hostname,
1209 jobs=[job2], hosts=[host2], hqes=job2.hostqueueentry_set.all())
1210
1211 host3 = models.Host.objects.create(hostname='host3', leased=False)
1212 host3.labels.add(lumpy_label)
1213
1214 self._do_heartbeat_and_assert_response(
1215 known_jobs=[job1], known_hosts=[host1], hosts=[host3])
1216
1217
1218 def testResendJobsAfterFailedHeartbeat(self):
1219 """Create jobs, retrieve them, fail on client, fetch them again."""
1220 shard1, host1, lumpy_label = self._createShardAndHostWithLabel()
1221
1222 job1 = self._createJobForLabel(lumpy_label)
1223
1224 self._do_heartbeat_and_assert_response(
1225 jobs=[job1],
1226 hqes=job1.hostqueueentry_set.all(), hosts=[host1])
1227
1228 # Make sure it's resubmitted by sending last_job=None again
1229 self._do_heartbeat_and_assert_response(
1230 known_hosts=[host1],
1231 jobs=[job1], hqes=job1.hostqueueentry_set.all(), hosts=[])
1232
1233 # Now it worked, make sure it's not sent again
1234 self._do_heartbeat_and_assert_response(
1235 known_jobs=[job1], known_hosts=[host1])
1236
1237 job1 = models.Job.objects.get(pk=job1.id)
1238 job1.hostqueueentry_set.all().update(complete=True)
1239
1240 # Job is completed, make sure it's not sent again
1241 self._do_heartbeat_and_assert_response(
1242 known_hosts=[host1])
1243
1244 job2 = self._createJobForLabel(lumpy_label)
1245
1246 # job2's creation was later, it should be returned now.
1247 self._do_heartbeat_and_assert_response(
1248 known_hosts=[host1],
1249 jobs=[job2], hqes=job2.hostqueueentry_set.all())
1250
1251 self._do_heartbeat_and_assert_response(
1252 known_jobs=[job2], known_hosts=[host1])
1253
1254 job2 = models.Job.objects.get(pk=job2.pk)
1255 job2.hostqueueentry_set.update(aborted=True)
1256 # Setting a job to a complete status will set the shard_id to None in
1257 # scheduler_models. We have to emulate that here, because we use Django
1258 # models in tests.
1259 job2.shard = None
1260 job2.save()
1261
1262 self._do_heartbeat_and_assert_response(
1263 known_jobs=[job2], known_hosts=[host1],
1264 jobs=[job2],
1265 hqes=job2.hostqueueentry_set.all())
1266
1267 models.Test.objects.create(name='platform_BootPerfServer:shard',
1268 test_type=1)
1269 self.mox.StubOutWithMock(server_utils, 'read_file')
1270 server_utils.read_file(mox.IgnoreArg()).AndReturn('')
1271 self.mox.ReplayAll()
1272 rpc_interface.delete_shard(hostname=shard1.hostname)
1273
1274 self.assertRaises(
1275 models.Shard.DoesNotExist, models.Shard.objects.get, pk=shard1.id)
1276
1277 job1 = models.Job.objects.get(pk=job1.id)
1278 lumpy_label = models.Label.objects.get(pk=lumpy_label.id)
1279 host1 = models.Host.objects.get(pk=host1.id)
1280 super_job = models.Job.objects.get(priority=priorities.Priority.SUPER)
1281 super_job_host = models.HostQueueEntry.objects.get(
1282 job_id=super_job.id)
1283
1284 self.assertIsNone(job1.shard)
1285 self.assertEqual(len(lumpy_label.shard_set.all()), 0)
1286 self.assertIsNone(host1.shard)
1287 self.assertIsNotNone(super_job)
1288 self.assertEqual(super_job_host.host_id, host1.id)
1289
1290
1291 def testCreateListShard(self):
1292 """Retrieve a list of all shards."""
1293 lumpy_label = models.Label.objects.create(name='board:lumpy',
1294 platform=True)
1295 stumpy_label = models.Label.objects.create(name='board:stumpy',
1296 platform=True)
1297 peppy_label = models.Label.objects.create(name='board:peppy',
1298 platform=True)
1299
1300 shard_id = rpc_interface.add_shard(
1301 hostname='host1', labels='board:lumpy,board:stumpy')
1302 self.assertRaises(error.RPCException,
1303 rpc_interface.add_shard,
1304 hostname='host1', labels='board:lumpy,board:stumpy')
1305 self.assertRaises(model_logic.ValidationError,
1306 rpc_interface.add_shard,
1307 hostname='host1', labels='board:peppy')
1308 shard = models.Shard.objects.get(pk=shard_id)
1309 self.assertEqual(shard.hostname, 'host1')
1310 self.assertEqual(shard.labels.values_list('pk')[0], (lumpy_label.id,))
1311 self.assertEqual(shard.labels.values_list('pk')[1], (stumpy_label.id,))
1312
1313 self.assertEqual(rpc_interface.get_shards(),
1314 [{'labels': ['board:lumpy','board:stumpy'],
1315 'hostname': 'host1',
1316 'id': 1}])
1317
1318
1319 def testAddBoardsToShard(self):
1320 """Add boards to a given shard."""
1321 shard1, host1, lumpy_label = self._createShardAndHostWithLabel()
1322 stumpy_label = models.Label.objects.create(name='board:stumpy',
1323 platform=True)
1324 shard_id = rpc_interface.add_board_to_shard(
1325 hostname='shard1', labels='board:stumpy')
1326 # Test whether raise exception when board label does not exist.
1327 self.assertRaises(models.Label.DoesNotExist,
1328 rpc_interface.add_board_to_shard,
1329 hostname='shard1', labels='board:test')
1330 # Test whether raise exception when board already sharded.
1331 self.assertRaises(error.RPCException,
1332 rpc_interface.add_board_to_shard,
1333 hostname='shard1', labels='board:lumpy')
1334 shard = models.Shard.objects.get(pk=shard_id)
1335 self.assertEqual(shard.hostname, 'shard1')
1336 self.assertEqual(shard.labels.values_list('pk')[0], (lumpy_label.id,))
1337 self.assertEqual(shard.labels.values_list('pk')[1], (stumpy_label.id,))
1338
1339 self.assertEqual(rpc_interface.get_shards(),
1340 [{'labels': ['board:lumpy','board:stumpy'],
1341 'hostname': 'shard1',
1342 'id': 1}])
1343
1344
1345 def testResendHostsAfterFailedHeartbeat(self):
1346 """Check that master accepts resending updated records after failure."""
1347 shard1, host1, lumpy_label = self._createShardAndHostWithLabel()
1348
1349 # Send the host
1350 self._do_heartbeat_and_assert_response(hosts=[host1])
1351
1352 # Send it again because previous one didn't persist correctly
1353 self._do_heartbeat_and_assert_response(hosts=[host1])
1354
1355 # Now it worked, make sure it isn't sent again
1356 self._do_heartbeat_and_assert_response(known_hosts=[host1])
1357
1358
showardb6d16622009-05-26 19:35:29 +00001359if __name__ == '__main__':
1360 unittest.main()