blob: f4cdcbbf453e0db750b5daf05c798d9cbe629ba2 [file] [log] [blame]
showard34ab0992009-10-05 22:47:57 +00001#!/usr/bin/python
2
showard4a604792009-10-20 23:49:10 +00003import logging, os, unittest
showard34ab0992009-10-05 22:47:57 +00004import common
showard7b2d7cb2009-10-28 19:53:03 +00005from autotest_lib.client.common_lib import enum, global_config, host_protections
showard34ab0992009-10-05 22:47:57 +00006from autotest_lib.database import database_connection
7from autotest_lib.frontend import setup_django_environment
showardb8900452009-10-12 20:31:01 +00008from autotest_lib.frontend.afe import frontend_test_utils, models
showard34ab0992009-10-05 22:47:57 +00009from autotest_lib.scheduler import drone_manager, email_manager, monitor_db
10
11# translations necessary for scheduler queries to work with SQLite
12_re_translator = database_connection.TranslatingDatabase.make_regexp_translator
13_DB_TRANSLATORS = (
14 _re_translator(r'NOW\(\)', 'time("now")'),
15 # older SQLite doesn't support group_concat, so just don't bother until
16 # it arises in an important query
17 _re_translator(r'GROUP_CONCAT\((.*?)\)', r'\1'),
18)
19
showard4a604792009-10-20 23:49:10 +000020HqeStatus = models.HostQueueEntry.Status
21HostStatus = models.Host.Status
22
showard34ab0992009-10-05 22:47:57 +000023class NullMethodObject(object):
24 _NULL_METHODS = ()
25
26 def __init__(self):
27 def null_method(*args, **kwargs):
28 pass
29
30 for method_name in self._NULL_METHODS:
31 setattr(self, method_name, null_method)
32
33class MockGlobalConfig(object):
34 def __init__(self):
35 self._config_info = {}
36
37
38 def set_config_value(self, section, key, value):
39 self._config_info[(section, key)] = value
40
41
42 def get_config_value(self, section, key, type=str,
43 default=None, allow_blank=False):
44 identifier = (section, key)
45 if identifier not in self._config_info:
46 raise RuntimeError('Unset global config value: %s' % (identifier,))
47 return self._config_info[identifier]
48
49
showardf85a0b72009-10-07 20:48:45 +000050# the SpecialTask names here must match the suffixes used on the SpecialTask
51# results directories
52_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
53 'parse')
54
55
showard34ab0992009-10-05 22:47:57 +000056class MockDroneManager(NullMethodObject):
showard65db3932009-10-28 19:54:35 +000057 """
58 Public attributes:
59 max_runnable_processes_value: value returned by max_runnable_processes().
60 tests can change this to activate throttling.
61 """
showard4a604792009-10-20 23:49:10 +000062 _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository',
63 'copy_results_on_drone')
64
65 class _DummyPidfileId(object):
66 """
67 Object to represent pidfile IDs that is opaque to the scheduler code but
68 still debugging-friendly for us.
69 """
70 def __init__(self, debug_string):
71 self._debug_string = debug_string
72
73
74 def __str__(self):
75 return self._debug_string
76
showard34ab0992009-10-05 22:47:57 +000077
78 def __init__(self):
79 super(MockDroneManager, self).__init__()
showard65db3932009-10-28 19:54:35 +000080 self.max_runnable_processes_value = 100
81
showard34ab0992009-10-05 22:47:57 +000082 # maps result_dir to set of tuples (file_path, file_contents)
83 self._attached_files = {}
84 # maps pidfile IDs to PidfileContents
85 self._pidfiles = {}
86 # pidfile IDs that haven't been created yet
87 self._future_pidfiles = []
showardf85a0b72009-10-07 20:48:45 +000088 # maps _PidfileType to the most recently created pidfile ID of that type
89 self._last_pidfile_id = {}
showard34ab0992009-10-05 22:47:57 +000090 # maps (working_directory, pidfile_name) to pidfile IDs
91 self._pidfile_index = {}
showardf85a0b72009-10-07 20:48:45 +000092 # maps process to pidfile IDs
93 self._process_index = {}
94 # tracks pidfiles of processes that have been killed
95 self._killed_pidfiles = set()
showard4a604792009-10-20 23:49:10 +000096 # pidfile IDs that have just been unregistered (so will disappear on the
97 # next cycle)
98 self._unregistered_pidfiles = set()
showard34ab0992009-10-05 22:47:57 +000099
100
101 # utility APIs for use by the test
102
showardf85a0b72009-10-07 20:48:45 +0000103 def finish_process(self, pidfile_type, exit_status=0):
104 pidfile_id = self._last_pidfile_id[pidfile_type]
105 self._set_pidfile_exit_status(pidfile_id, exit_status)
showard34ab0992009-10-05 22:47:57 +0000106
107
showard65db3932009-10-28 19:54:35 +0000108 def finish_specific_process(self, working_directory, pidfile_name):
109 pidfile_id = self._pidfile_index[(working_directory, pidfile_name)]
110 self._set_pidfile_exit_status(pidfile_id, 0)
111
112
showard34ab0992009-10-05 22:47:57 +0000113 def _set_pidfile_exit_status(self, pidfile_id, exit_status):
showardf85a0b72009-10-07 20:48:45 +0000114 assert pidfile_id is not None
showard34ab0992009-10-05 22:47:57 +0000115 contents = self._pidfiles[pidfile_id]
116 contents.exit_status = exit_status
117 contents.num_tests_failed = 0
118
119
showardf85a0b72009-10-07 20:48:45 +0000120 def was_last_process_killed(self, pidfile_type):
121 pidfile_id = self._last_pidfile_id[pidfile_type]
122 return pidfile_id in self._killed_pidfiles
123
124
showard4a604792009-10-20 23:49:10 +0000125 def running_pidfile_ids(self):
126 return [str(pidfile_id) for pidfile_id, pidfile_contents
127 in self._pidfiles.iteritems()
128 if pidfile_contents.process is not None
129 and pidfile_contents.exit_status is None]
130
131
showard34ab0992009-10-05 22:47:57 +0000132 # DroneManager emulation APIs for use by monitor_db
133
134 def get_orphaned_autoserv_processes(self):
135 return set()
136
137
138 def total_running_processes(self):
139 return 0
140
141
showard9bb960b2009-11-19 01:02:11 +0000142 def max_runnable_processes(self, username):
showard65db3932009-10-28 19:54:35 +0000143 return self.max_runnable_processes_value
showard34ab0992009-10-05 22:47:57 +0000144
145
showard4a604792009-10-20 23:49:10 +0000146 def refresh(self):
147 for pidfile_id in self._unregistered_pidfiles:
148 # intentionally handle non-registered pidfiles silently
149 self._pidfiles.pop(pidfile_id, None)
150 self._unregistered_pidfiles = set()
151
152
showard34ab0992009-10-05 22:47:57 +0000153 def execute_actions(self):
154 # executing an "execute_command" causes a pidfile to be created
155 for pidfile_id in self._future_pidfiles:
156 # Process objects are opaque to monitor_db
showardf85a0b72009-10-07 20:48:45 +0000157 process = object()
158 self._pidfiles[pidfile_id].process = process
159 self._process_index[process] = pidfile_id
showard34ab0992009-10-05 22:47:57 +0000160 self._future_pidfiles = []
161
162
163 def attach_file_to_execution(self, result_dir, file_contents,
164 file_path=None):
165 self._attached_files.setdefault(result_dir, set()).add((file_path,
166 file_contents))
167 return 'attach_path'
168
169
showardf85a0b72009-10-07 20:48:45 +0000170 def _initialize_pidfile(self, pidfile_id):
171 if pidfile_id not in self._pidfiles:
172 self._pidfiles[pidfile_id] = drone_manager.PidfileContents()
173
174
175 _pidfile_type_map = {
176 monitor_db._AUTOSERV_PID_FILE: _PidfileType.JOB,
177 monitor_db._CRASHINFO_PID_FILE: _PidfileType.GATHER,
178 monitor_db._PARSER_PID_FILE: _PidfileType.PARSE,
179 }
180
181
182 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):
183 if working_directory.startswith('hosts/'):
184 # such paths look like hosts/host1/1-verify, we'll grab the end
185 type_string = working_directory.rsplit('-', 1)[1]
186 pidfile_type = _PidfileType.get_value(type_string)
187 else:
188 pidfile_type = self._pidfile_type_map[pidfile_name]
189 self._last_pidfile_id[pidfile_type] = pidfile_id
190
191
showard34ab0992009-10-05 22:47:57 +0000192 def execute_command(self, command, working_directory, pidfile_name,
showard9bb960b2009-11-19 01:02:11 +0000193 log_file=None, paired_with_pidfile=None,
194 username=None):
showard4a604792009-10-20 23:49:10 +0000195 pidfile_id = self._DummyPidfileId(
196 self._get_pidfile_debug_string(working_directory, pidfile_name))
showard34ab0992009-10-05 22:47:57 +0000197 self._future_pidfiles.append(pidfile_id)
showardf85a0b72009-10-07 20:48:45 +0000198 self._initialize_pidfile(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000199 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id
showardf85a0b72009-10-07 20:48:45 +0000200 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)
showard34ab0992009-10-05 22:47:57 +0000201 return pidfile_id
202
203
showard4a604792009-10-20 23:49:10 +0000204 def _get_pidfile_debug_string(self, working_directory, pidfile_name):
205 return os.path.join(working_directory, pidfile_name)
206
207
showard34ab0992009-10-05 22:47:57 +0000208 def get_pidfile_contents(self, pidfile_id, use_second_read=False):
showard4a604792009-10-20 23:49:10 +0000209 if pidfile_id not in self._pidfiles:
210 print 'Request for nonexistent pidfile %s' % pidfile_id
211 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())
showard34ab0992009-10-05 22:47:57 +0000212
213
214 def is_process_running(self, process):
215 return True
216
217
218 def register_pidfile(self, pidfile_id):
showardf85a0b72009-10-07 20:48:45 +0000219 self._initialize_pidfile(pidfile_id)
220
221
222 def unregister_pidfile(self, pidfile_id):
showard4a604792009-10-20 23:49:10 +0000223 self._unregistered_pidfiles.add(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000224
225
226 def absolute_path(self, path):
227 return 'absolute/' + path
228
229
230 def write_lines_to_file(self, file_path, lines, paired_with_process=None):
231 # TODO: record this
232 pass
233
234
235 def get_pidfile_id_from(self, execution_tag, pidfile_name):
showard4a604792009-10-20 23:49:10 +0000236 debug_string = ('Nonexistent pidfile: '
237 + self._get_pidfile_debug_string(execution_tag,
238 pidfile_name))
239 return self._pidfile_index.get((execution_tag, pidfile_name),
240 self._DummyPidfileId(debug_string))
showard34ab0992009-10-05 22:47:57 +0000241
242
showardf85a0b72009-10-07 20:48:45 +0000243 def kill_process(self, process):
244 pidfile_id = self._process_index[process]
245 self._killed_pidfiles.add(pidfile_id)
246 self._set_pidfile_exit_status(pidfile_id, 271)
247
248
showard34ab0992009-10-05 22:47:57 +0000249class MockEmailManager(NullMethodObject):
250 _NULL_METHODS = ('send_queued_emails', 'send_email')
251
showardf85a0b72009-10-07 20:48:45 +0000252 def enqueue_notify_email(self, subject, message):
253 logging.warn('enqueue_notify_email: %s', subject)
254 logging.warn(message)
255
showard34ab0992009-10-05 22:47:57 +0000256
257class SchedulerFunctionalTest(unittest.TestCase,
258 frontend_test_utils.FrontendTestMixin):
259 # some number of ticks after which the scheduler is presumed to have
260 # stabilized, given no external changes
261 _A_LOT_OF_TICKS = 10
262
263 def setUp(self):
264 self._frontend_common_setup()
265 self._set_stubs()
266 self._set_global_config_values()
267 self.dispatcher = monitor_db.Dispatcher()
268
269 logging.basicConfig(level=logging.DEBUG)
270
271
272 def tearDown(self):
273 self._frontend_common_teardown()
274
275
276 def _set_stubs(self):
277 self.mock_config = MockGlobalConfig()
278 self.god.stub_with(global_config, 'global_config', self.mock_config)
279
280 self.mock_drone_manager = MockDroneManager()
281 self.god.stub_with(monitor_db, '_drone_manager',
282 self.mock_drone_manager)
283
284 self.mock_email_manager = MockEmailManager()
285 self.god.stub_with(email_manager, 'manager', self.mock_email_manager)
286
287 self._database = (
288 database_connection.TranslatingDatabase.get_test_database(
289 file_path=self._test_db_file,
290 translators=_DB_TRANSLATORS))
291 self._database.connect(db_type='django')
292 self.god.stub_with(monitor_db, '_db', self._database)
293
294
295 def _set_global_config_values(self):
296 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
297 1)
298
299
300 def _initialize_test(self):
301 self.dispatcher.initialize()
302
303
304 def _run_dispatcher(self):
305 for _ in xrange(self._A_LOT_OF_TICKS):
306 self.dispatcher.tick()
307
308
309 def test_idle(self):
showardb8900452009-10-12 20:31:01 +0000310 self._initialize_test()
showard34ab0992009-10-05 22:47:57 +0000311 self._run_dispatcher()
312
313
showardb8900452009-10-12 20:31:01 +0000314 def _assert_process_executed(self, working_directory, pidfile_name):
315 process_was_executed = self.mock_drone_manager.was_process_executed(
316 'hosts/host1/1-verify', monitor_db._AUTOSERV_PID_FILE)
317 self.assert_(process_was_executed,
318 '%s/%s not executed' % (working_directory, pidfile_name))
319
320
showard4a604792009-10-20 23:49:10 +0000321 def _check_statuses(self, queue_entry, queue_entry_status, host_status):
322 # update from DB
323 queue_entry = models.HostQueueEntry.objects.get(id=queue_entry.id)
324 self.assertEquals(queue_entry.status, queue_entry_status)
325 self.assertEquals(queue_entry.host.status, host_status)
326
327
showard7b2d7cb2009-10-28 19:53:03 +0000328 def _check_host_status(self, host, status):
329 # update from DB
330 host = models.Host.objects.get(id=host.id)
331 self.assertEquals(host.status, status)
332
333
showard4a604792009-10-20 23:49:10 +0000334 def _run_pre_job_verify(self, queue_entry):
335 self._run_dispatcher() # launches verify
336 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
337 HostStatus.VERIFYING)
338 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
339
340
showard34ab0992009-10-05 22:47:57 +0000341 def test_simple_job(self):
showardb8900452009-10-12 20:31:01 +0000342 self._initialize_test()
343 job, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000344 self._run_pre_job_verify(queue_entry)
showard34ab0992009-10-05 22:47:57 +0000345 self._run_dispatcher() # launches job
showard4a604792009-10-20 23:49:10 +0000346 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
347 self._finish_job(queue_entry)
348 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
349 self._assert_nothing_is_running()
showardb8900452009-10-12 20:31:01 +0000350
351
showard4a604792009-10-20 23:49:10 +0000352 def _setup_for_pre_job_cleanup(self):
353 self._initialize_test()
354 job, queue_entry = self._make_job_and_queue_entry()
355 job.reboot_before = models.RebootBefore.ALWAYS
356 job.save()
357 return queue_entry
358
359
360 def _run_pre_job_cleanup_job(self, queue_entry):
361 self._run_dispatcher() # cleanup
362 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
363 HostStatus.CLEANING)
364 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
365 self._run_dispatcher() # verify
366 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
367 self._run_dispatcher() # job
368 self._finish_job(queue_entry)
369
370
371 def test_pre_job_cleanup(self):
372 queue_entry = self._setup_for_pre_job_cleanup()
373 self._run_pre_job_cleanup_job(queue_entry)
374
375
376 def _run_pre_job_cleanup_one_failure(self):
377 queue_entry = self._setup_for_pre_job_cleanup()
378 self._run_dispatcher() # cleanup
379 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
380 exit_status=256)
381 self._run_dispatcher() # repair
382 self._check_statuses(queue_entry, HqeStatus.QUEUED,
383 HostStatus.REPAIRING)
384 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
385 return queue_entry
386
387
388 def test_pre_job_cleanup_failure(self):
389 queue_entry = self._run_pre_job_cleanup_one_failure()
390 # from here the job should run as normal
391 self._run_pre_job_cleanup_job(queue_entry)
392
393
394 def test_pre_job_cleanup_double_failure(self):
395 # TODO (showard): this test isn't perfect. in reality, when the second
396 # cleanup fails, it copies its results over to the job directory using
397 # copy_results_on_drone() and then parses them. since we don't handle
398 # that, there appear to be no results at the job directory. the
399 # scheduler handles this gracefully, parsing gets effectively skipped,
400 # and this test passes as is. but we ought to properly test that
401 # behavior.
402 queue_entry = self._run_pre_job_cleanup_one_failure()
403 self._run_dispatcher() # second cleanup
404 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
405 exit_status=256)
406 self._run_dispatcher()
407 self._check_statuses(queue_entry, HqeStatus.FAILED,
408 HostStatus.REPAIR_FAILED)
409 # nothing else should run
410 self._assert_nothing_is_running()
411
412
413 def _assert_nothing_is_running(self):
414 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])
415
416
showard7b2d7cb2009-10-28 19:53:03 +0000417 def _setup_for_post_job_cleanup(self):
showard4a604792009-10-20 23:49:10 +0000418 self._initialize_test()
419 job, queue_entry = self._make_job_and_queue_entry()
420 job.reboot_after = models.RebootAfter.ALWAYS
421 job.save()
showard7b2d7cb2009-10-28 19:53:03 +0000422 return queue_entry
showard4a604792009-10-20 23:49:10 +0000423
showard7b2d7cb2009-10-28 19:53:03 +0000424
425 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
426 include_verify=True):
427 if include_verify:
428 self._run_pre_job_verify(queue_entry)
showard4a604792009-10-20 23:49:10 +0000429 self._run_dispatcher() # job
430 self.mock_drone_manager.finish_process(_PidfileType.JOB)
431 self._run_dispatcher() # parsing + cleanup
432 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
433 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
434 exit_status=256)
435 self._run_dispatcher() # repair, HQE unaffected
showard4a604792009-10-20 23:49:10 +0000436 return queue_entry
437
438
439 def test_post_job_cleanup_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000440 queue_entry = self._setup_for_post_job_cleanup()
441 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
442 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
443 HostStatus.REPAIRING)
showard4a604792009-10-20 23:49:10 +0000444 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
445 self._run_dispatcher()
446 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
447
448
449 def test_post_job_cleanup_failure_repair_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000450 queue_entry = self._setup_for_post_job_cleanup()
451 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
showard4a604792009-10-20 23:49:10 +0000452 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,
453 exit_status=256)
454 self._run_dispatcher()
455 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
456 HostStatus.REPAIR_FAILED)
457
458
459 def _finish_job(self, queue_entry):
showardf85a0b72009-10-07 20:48:45 +0000460 self.mock_drone_manager.finish_process(_PidfileType.JOB)
showard34ab0992009-10-05 22:47:57 +0000461 self._run_dispatcher() # launches parsing + cleanup
showard4a604792009-10-20 23:49:10 +0000462 self._check_statuses(queue_entry, HqeStatus.PARSING,
463 HostStatus.CLEANING)
showardf85a0b72009-10-07 20:48:45 +0000464 self._finish_parsing_and_cleanup()
465
466
467 def _finish_parsing_and_cleanup(self):
468 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
469 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
showard34ab0992009-10-05 22:47:57 +0000470 self._run_dispatcher()
471
472
showard7b2d7cb2009-10-28 19:53:03 +0000473 def _create_reverify_request(self):
474 host = self.hosts[0]
475 models.SpecialTask.objects.create(host=host,
showard9bb960b2009-11-19 01:02:11 +0000476 task=models.SpecialTask.Task.VERIFY,
477 requested_by=self.user)
showard7b2d7cb2009-10-28 19:53:03 +0000478 return host
479
480
481 def test_requested_reverify(self):
482 host = self._create_reverify_request()
483 self._run_dispatcher()
484 self._check_host_status(host, HostStatus.VERIFYING)
485 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
486 self._run_dispatcher()
487 self._check_host_status(host, HostStatus.READY)
488
489
490 def test_requested_reverify_failure(self):
491 host = self._create_reverify_request()
492 self._run_dispatcher()
493 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
494 exit_status=256)
495 self._run_dispatcher() # repair
496 self._check_host_status(host, HostStatus.REPAIRING)
497 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
498 self._run_dispatcher()
499 self._check_host_status(host, HostStatus.READY)
500
501
502 def _setup_for_do_not_verify(self):
503 self._initialize_test()
504 job, queue_entry = self._make_job_and_queue_entry()
505 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY
506 queue_entry.host.save()
507 return queue_entry
508
509
510 def test_do_not_verify_job(self):
511 queue_entry = self._setup_for_do_not_verify()
512 self._run_dispatcher() # runs job directly
513 self._finish_job(queue_entry)
514
515
516 def test_do_not_verify_job_with_cleanup(self):
517 queue_entry = self._setup_for_do_not_verify()
518 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
519 queue_entry.job.save()
520
521 self._run_dispatcher() # cleanup
522 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
523 self._run_dispatcher() # job
524 self._finish_job(queue_entry)
525
526
527 def test_do_not_verify_pre_job_cleanup_failure(self):
528 queue_entry = self._setup_for_do_not_verify()
529 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
530 queue_entry.job.save()
531
532 self._run_dispatcher() # cleanup
533 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
534 exit_status=256)
535 self._run_dispatcher() # failure ignored; job runs
536 self._finish_job(queue_entry)
537
538
539 def test_do_not_verify_post_job_cleanup_failure(self):
540 queue_entry = self._setup_for_do_not_verify()
541
542 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,
543 include_verify=False)
544 # failure ignored, host still set to Ready
545 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
546 self._run_dispatcher() # nothing else runs
547 self._assert_nothing_is_running()
548
549
550 def test_do_not_verify_requested_reverify_failure(self):
551 host = self._create_reverify_request()
552 host.protection = host_protections.Protection.DO_NOT_VERIFY
553 host.save()
554
555 self._run_dispatcher()
556 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
557 exit_status=256)
558 self._run_dispatcher()
559 self._check_host_status(host, HostStatus.READY) # ignore failure
560 self._assert_nothing_is_running()
561
562
showardf85a0b72009-10-07 20:48:45 +0000563 def test_job_abort_in_verify(self):
showardb8900452009-10-12 20:31:01 +0000564 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000565 job = self._create_job(hosts=[1])
566 self._run_dispatcher() # launches verify
567 job.hostqueueentry_set.update(aborted=True)
568 self._run_dispatcher() # kills verify, launches cleanup
569 self.assert_(self.mock_drone_manager.was_last_process_killed(
570 _PidfileType.VERIFY))
571 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
572 self._run_dispatcher()
573
574
575 def test_job_abort(self):
showardb8900452009-10-12 20:31:01 +0000576 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000577 job = self._create_job(hosts=[1])
578 job.run_verify = False
579 job.save()
580
581 self._run_dispatcher() # launches job
582 job.hostqueueentry_set.update(aborted=True)
583 self._run_dispatcher() # kills job, launches gathering
584 self.assert_(self.mock_drone_manager.was_last_process_killed(
585 _PidfileType.JOB))
586 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
587 self._run_dispatcher() # launches parsing + cleanup
588 self._finish_parsing_and_cleanup()
589
590
591 def test_no_pidfile_leaking(self):
showardb8900452009-10-12 20:31:01 +0000592 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000593 self.test_simple_job()
594 self.assertEquals(self.mock_drone_manager._pidfiles, {})
595
596 self.test_job_abort_in_verify()
597 self.assertEquals(self.mock_drone_manager._pidfiles, {})
598
599 self.test_job_abort()
600 self.assertEquals(self.mock_drone_manager._pidfiles, {})
601
602
showardb8900452009-10-12 20:31:01 +0000603 def _make_job_and_queue_entry(self):
604 job = self._create_job(hosts=[1])
605 queue_entry = job.hostqueueentry_set.all()[0]
606 return job, queue_entry
607
608
609 def test_recover_running_no_process(self):
610 # recovery should re-execute a Running HQE if no process is found
611 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000612 queue_entry.status = HqeStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000613 queue_entry.execution_subdir = '1-myuser/host1'
614 queue_entry.save()
showard4a604792009-10-20 23:49:10 +0000615 queue_entry.host.status = HostStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000616 queue_entry.host.save()
617
618 self._initialize_test()
619 self._run_dispatcher()
showard4a604792009-10-20 23:49:10 +0000620 self._finish_job(queue_entry)
showardb8900452009-10-12 20:31:01 +0000621
622
623 def test_recover_verifying_hqe_no_special_task(self):
624 # recovery should fail on a Verifing HQE with no corresponding
625 # Verify or Cleanup SpecialTask
626 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000627 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000628 queue_entry.save()
629
630 # make some dummy SpecialTasks that shouldn't count
631 models.SpecialTask.objects.create(host=queue_entry.host,
632 task=models.SpecialTask.Task.VERIFY)
633 models.SpecialTask.objects.create(host=queue_entry.host,
634 task=models.SpecialTask.Task.CLEANUP,
635 queue_entry=queue_entry,
636 is_complete=True)
637
638 self.assertRaises(monitor_db.SchedulerError, self._initialize_test)
639
640
641 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):
642 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000643 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000644 queue_entry.save()
645
646 special_task = models.SpecialTask.objects.create(
647 host=queue_entry.host, task=task, queue_entry=queue_entry)
648
649 self._initialize_test()
650 self._run_dispatcher()
651 self.mock_drone_manager.finish_process(pidfile_type)
652 self._run_dispatcher()
653 # don't bother checking the rest of the job execution, as long as the
654 # SpecialTask ran
655
656
657 def test_recover_verifying_hqe_with_cleanup(self):
658 # recover an HQE that was in pre-job cleanup
659 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,
660 _PidfileType.CLEANUP)
661
662
663 def test_recover_verifying_hqe_with_verify(self):
664 # recover an HQE that was in pre-job verify
665 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,
666 _PidfileType.VERIFY)
667
668
showarda21b9492009-11-04 20:43:18 +0000669 def test_recover_pending_hqes_with_group(self):
670 # recover a group of HQEs that are in Pending, in the same group (e.g.,
671 # in a job with atomic hosts)
672 job = self._create_job(hosts=[1,2], atomic_group=1)
673 job.save()
674
675 job.hostqueueentry_set.all().update(status=HqeStatus.PENDING)
676
677 self._initialize_test()
678 for queue_entry in job.hostqueueentry_set.all():
679 self.assertEquals(queue_entry.status, HqeStatus.STARTING)
680
681
showard65db3932009-10-28 19:54:35 +0000682 def test_job_scheduled_just_after_abort(self):
683 # test a pretty obscure corner case where a job is aborted while queued,
684 # another job is ready to run, and throttling is active. the post-abort
685 # cleanup must not be pre-empted by the second job.
686 job1, queue_entry1 = self._make_job_and_queue_entry()
687 job2, queue_entry2 = self._make_job_and_queue_entry()
688
689 self.mock_drone_manager.max_runnable_processes_value = 0
690 self._run_dispatcher() # schedule job1, but won't start verify
691 job1.hostqueueentry_set.update(aborted=True)
692 self.mock_drone_manager.max_runnable_processes_value = 100
693 self._run_dispatcher() # cleanup must run here, not verify for job2
694 self._check_statuses(queue_entry1, HqeStatus.ABORTED,
695 HostStatus.CLEANING)
696 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
697 self._run_dispatcher() # now verify starts for job2
698 self._check_statuses(queue_entry2, HqeStatus.VERIFYING,
699 HostStatus.VERIFYING)
700
701
702 def _test_job_scheduled_just_after_abort_2(self):
703 # test a pretty obscure corner case where a job is aborted while queued,
704 # another job is ready to run, and throttling is active. the post-abort
705 # cleanup must not be pre-empted by the second job.
706 job1, _ = self._make_job_and_queue_entry()
707 job2 = self._create_job(hosts=[1,2])
708 job2.synch_count = 2
709 job2.save()
710
711 self.mock_drone_manager.max_runnable_processes_value = 0
712 self._run_dispatcher() # schedule job1, but won't start verify
713 job1.hostqueueentry_set.update(aborted=True)
714 self.mock_drone_manager.max_runnable_processes_value = 100
715 self._run_dispatcher()
716 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
717 self._run_dispatcher()
718 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
719 self._run_dispatcher()
720 self.mock_drone_manager.finish_specific_process(
721 'hosts/host2/2-verify', monitor_db._AUTOSERV_PID_FILE)
722 self._run_dispatcher()
723
724
725 def test_reverify_interrupting_pre_job(self):
726 # ensure things behave sanely if a reverify is scheduled in the middle
727 # of pre-job actions
728 _, queue_entry = self._make_job_and_queue_entry()
729
730 self._run_dispatcher() # pre-job verify
731 self._create_reverify_request()
732 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
733 exit_status=256)
734 self._run_dispatcher() # repair
735 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
736 self._run_dispatcher() # reverify runs now
737 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
738 self._run_dispatcher() # pre-job verify
739 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
740 self._run_dispatcher() # and job runs...
741 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
742 self._finish_job(queue_entry) # reverify has been deleted
743 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
744 HostStatus.READY)
745 self._assert_nothing_is_running()
746
747
748 def test_reverify_while_job_running(self):
749 # once a job is running, a reverify must not be allowed to preempt
750 # Gathering
751 _, queue_entry = self._make_job_and_queue_entry()
752 self._run_pre_job_verify(queue_entry)
753 self._run_dispatcher() # job runs
754 self._create_reverify_request()
755 # make job end with a signal, so gathering will run
756 self.mock_drone_manager.finish_process(_PidfileType.JOB,
757 exit_status=271)
758 self._run_dispatcher() # gathering must start
759 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
760 self._run_dispatcher() # parsing and cleanup
761 self._finish_parsing_and_cleanup()
762 self._run_dispatcher() # now reverify runs
763 self._check_statuses(queue_entry, HqeStatus.FAILED,
764 HostStatus.VERIFYING)
765 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
766 self._run_dispatcher()
767 self._check_host_status(queue_entry.host, HostStatus.READY)
768
769
770 def test_reverify_while_host_pending(self):
771 # ensure that if a reverify is scheduled while a host is in Pending, it
772 # won't run until the host is actually free
773 job = self._create_job(hosts=[1,2])
774 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')
775 job.synch_count = 2
776 job.save()
777
778 host2 = self.hosts[1]
779 host2.locked = True
780 host2.save()
781
782 self._run_dispatcher() # verify host1
783 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
784 self._run_dispatcher() # host1 Pending
785 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
786 self._create_reverify_request()
787 self._run_dispatcher() # nothing should happen here
788 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
789
790 # now let the job run
791 host2.locked = False
792 host2.save()
793 self._run_dispatcher() # verify host2
794 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
795 self._run_dispatcher() # run job
796 self._finish_job(queue_entry)
797 # need to explicitly finish host1's post-job cleanup
798 self.mock_drone_manager.finish_specific_process(
799 'hosts/host1/4-cleanup', monitor_db._AUTOSERV_PID_FILE)
800 self._run_dispatcher()
801 # the reverify should now be running
802 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
803 HostStatus.VERIFYING)
804 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
805 self._run_dispatcher()
806 self._check_host_status(queue_entry.host, HostStatus.READY)
807
808
showard34ab0992009-10-05 22:47:57 +0000809if __name__ == '__main__':
810 unittest.main()