blob: 1008b370ca12e2367e88239e4ef74498ebdef9d3 [file] [log] [blame]
showard34ab0992009-10-05 22:47:57 +00001#!/usr/bin/python
2
showard4a604792009-10-20 23:49:10 +00003import logging, os, unittest
showard34ab0992009-10-05 22:47:57 +00004import common
showard7b2d7cb2009-10-28 19:53:03 +00005from autotest_lib.client.common_lib import enum, global_config, host_protections
showard34ab0992009-10-05 22:47:57 +00006from autotest_lib.database import database_connection
7from autotest_lib.frontend import setup_django_environment
showardb8900452009-10-12 20:31:01 +00008from autotest_lib.frontend.afe import frontend_test_utils, models
showard34ab0992009-10-05 22:47:57 +00009from autotest_lib.scheduler import drone_manager, email_manager, monitor_db
10
11# translations necessary for scheduler queries to work with SQLite
12_re_translator = database_connection.TranslatingDatabase.make_regexp_translator
13_DB_TRANSLATORS = (
14 _re_translator(r'NOW\(\)', 'time("now")'),
15 # older SQLite doesn't support group_concat, so just don't bother until
16 # it arises in an important query
17 _re_translator(r'GROUP_CONCAT\((.*?)\)', r'\1'),
18)
19
showard4a604792009-10-20 23:49:10 +000020HqeStatus = models.HostQueueEntry.Status
21HostStatus = models.Host.Status
22
showard34ab0992009-10-05 22:47:57 +000023class NullMethodObject(object):
24 _NULL_METHODS = ()
25
26 def __init__(self):
27 def null_method(*args, **kwargs):
28 pass
29
30 for method_name in self._NULL_METHODS:
31 setattr(self, method_name, null_method)
32
33class MockGlobalConfig(object):
34 def __init__(self):
35 self._config_info = {}
36
37
38 def set_config_value(self, section, key, value):
39 self._config_info[(section, key)] = value
40
41
42 def get_config_value(self, section, key, type=str,
43 default=None, allow_blank=False):
44 identifier = (section, key)
45 if identifier not in self._config_info:
46 raise RuntimeError('Unset global config value: %s' % (identifier,))
47 return self._config_info[identifier]
48
49
showardf85a0b72009-10-07 20:48:45 +000050# the SpecialTask names here must match the suffixes used on the SpecialTask
51# results directories
52_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
53 'parse')
54
55
showard34ab0992009-10-05 22:47:57 +000056class MockDroneManager(NullMethodObject):
showard4a604792009-10-20 23:49:10 +000057 _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository',
58 'copy_results_on_drone')
59
60 class _DummyPidfileId(object):
61 """
62 Object to represent pidfile IDs that is opaque to the scheduler code but
63 still debugging-friendly for us.
64 """
65 def __init__(self, debug_string):
66 self._debug_string = debug_string
67
68
69 def __str__(self):
70 return self._debug_string
71
showard34ab0992009-10-05 22:47:57 +000072
73 def __init__(self):
74 super(MockDroneManager, self).__init__()
75 # maps result_dir to set of tuples (file_path, file_contents)
76 self._attached_files = {}
77 # maps pidfile IDs to PidfileContents
78 self._pidfiles = {}
79 # pidfile IDs that haven't been created yet
80 self._future_pidfiles = []
showardf85a0b72009-10-07 20:48:45 +000081 # maps _PidfileType to the most recently created pidfile ID of that type
82 self._last_pidfile_id = {}
showard34ab0992009-10-05 22:47:57 +000083 # maps (working_directory, pidfile_name) to pidfile IDs
84 self._pidfile_index = {}
showardf85a0b72009-10-07 20:48:45 +000085 # maps process to pidfile IDs
86 self._process_index = {}
87 # tracks pidfiles of processes that have been killed
88 self._killed_pidfiles = set()
showard4a604792009-10-20 23:49:10 +000089 # pidfile IDs that have just been unregistered (so will disappear on the
90 # next cycle)
91 self._unregistered_pidfiles = set()
showard34ab0992009-10-05 22:47:57 +000092
93
94 # utility APIs for use by the test
95
showardf85a0b72009-10-07 20:48:45 +000096 def finish_process(self, pidfile_type, exit_status=0):
97 pidfile_id = self._last_pidfile_id[pidfile_type]
98 self._set_pidfile_exit_status(pidfile_id, exit_status)
showard34ab0992009-10-05 22:47:57 +000099
100
showard34ab0992009-10-05 22:47:57 +0000101 def _set_pidfile_exit_status(self, pidfile_id, exit_status):
showardf85a0b72009-10-07 20:48:45 +0000102 assert pidfile_id is not None
showard34ab0992009-10-05 22:47:57 +0000103 contents = self._pidfiles[pidfile_id]
104 contents.exit_status = exit_status
105 contents.num_tests_failed = 0
106
107
showardf85a0b72009-10-07 20:48:45 +0000108 def was_last_process_killed(self, pidfile_type):
109 pidfile_id = self._last_pidfile_id[pidfile_type]
110 return pidfile_id in self._killed_pidfiles
111
112
showard4a604792009-10-20 23:49:10 +0000113 def running_pidfile_ids(self):
114 return [str(pidfile_id) for pidfile_id, pidfile_contents
115 in self._pidfiles.iteritems()
116 if pidfile_contents.process is not None
117 and pidfile_contents.exit_status is None]
118
119
showard34ab0992009-10-05 22:47:57 +0000120 # DroneManager emulation APIs for use by monitor_db
121
122 def get_orphaned_autoserv_processes(self):
123 return set()
124
125
126 def total_running_processes(self):
127 return 0
128
129
130 def max_runnable_processes(self):
131 return 100
132
133
showard4a604792009-10-20 23:49:10 +0000134 def refresh(self):
135 for pidfile_id in self._unregistered_pidfiles:
136 # intentionally handle non-registered pidfiles silently
137 self._pidfiles.pop(pidfile_id, None)
138 self._unregistered_pidfiles = set()
139
140
showard34ab0992009-10-05 22:47:57 +0000141 def execute_actions(self):
142 # executing an "execute_command" causes a pidfile to be created
143 for pidfile_id in self._future_pidfiles:
144 # Process objects are opaque to monitor_db
showardf85a0b72009-10-07 20:48:45 +0000145 process = object()
146 self._pidfiles[pidfile_id].process = process
147 self._process_index[process] = pidfile_id
showard34ab0992009-10-05 22:47:57 +0000148 self._future_pidfiles = []
149
150
151 def attach_file_to_execution(self, result_dir, file_contents,
152 file_path=None):
153 self._attached_files.setdefault(result_dir, set()).add((file_path,
154 file_contents))
155 return 'attach_path'
156
157
showardf85a0b72009-10-07 20:48:45 +0000158 def _initialize_pidfile(self, pidfile_id):
159 if pidfile_id not in self._pidfiles:
160 self._pidfiles[pidfile_id] = drone_manager.PidfileContents()
161
162
163 _pidfile_type_map = {
164 monitor_db._AUTOSERV_PID_FILE: _PidfileType.JOB,
165 monitor_db._CRASHINFO_PID_FILE: _PidfileType.GATHER,
166 monitor_db._PARSER_PID_FILE: _PidfileType.PARSE,
167 }
168
169
170 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):
171 if working_directory.startswith('hosts/'):
172 # such paths look like hosts/host1/1-verify, we'll grab the end
173 type_string = working_directory.rsplit('-', 1)[1]
174 pidfile_type = _PidfileType.get_value(type_string)
175 else:
176 pidfile_type = self._pidfile_type_map[pidfile_name]
177 self._last_pidfile_id[pidfile_type] = pidfile_id
178
179
showard34ab0992009-10-05 22:47:57 +0000180 def execute_command(self, command, working_directory, pidfile_name,
181 log_file=None, paired_with_pidfile=None):
showard4a604792009-10-20 23:49:10 +0000182 pidfile_id = self._DummyPidfileId(
183 self._get_pidfile_debug_string(working_directory, pidfile_name))
showard34ab0992009-10-05 22:47:57 +0000184 self._future_pidfiles.append(pidfile_id)
showardf85a0b72009-10-07 20:48:45 +0000185 self._initialize_pidfile(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000186 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id
showardf85a0b72009-10-07 20:48:45 +0000187 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)
showard34ab0992009-10-05 22:47:57 +0000188 return pidfile_id
189
190
showard4a604792009-10-20 23:49:10 +0000191 def _get_pidfile_debug_string(self, working_directory, pidfile_name):
192 return os.path.join(working_directory, pidfile_name)
193
194
showard34ab0992009-10-05 22:47:57 +0000195 def get_pidfile_contents(self, pidfile_id, use_second_read=False):
showard4a604792009-10-20 23:49:10 +0000196 if pidfile_id not in self._pidfiles:
197 print 'Request for nonexistent pidfile %s' % pidfile_id
198 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())
showard34ab0992009-10-05 22:47:57 +0000199
200
201 def is_process_running(self, process):
202 return True
203
204
205 def register_pidfile(self, pidfile_id):
showardf85a0b72009-10-07 20:48:45 +0000206 self._initialize_pidfile(pidfile_id)
207
208
209 def unregister_pidfile(self, pidfile_id):
showard4a604792009-10-20 23:49:10 +0000210 self._unregistered_pidfiles.add(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000211
212
213 def absolute_path(self, path):
214 return 'absolute/' + path
215
216
217 def write_lines_to_file(self, file_path, lines, paired_with_process=None):
218 # TODO: record this
219 pass
220
221
222 def get_pidfile_id_from(self, execution_tag, pidfile_name):
showard4a604792009-10-20 23:49:10 +0000223 debug_string = ('Nonexistent pidfile: '
224 + self._get_pidfile_debug_string(execution_tag,
225 pidfile_name))
226 return self._pidfile_index.get((execution_tag, pidfile_name),
227 self._DummyPidfileId(debug_string))
showard34ab0992009-10-05 22:47:57 +0000228
229
showardf85a0b72009-10-07 20:48:45 +0000230 def kill_process(self, process):
231 pidfile_id = self._process_index[process]
232 self._killed_pidfiles.add(pidfile_id)
233 self._set_pidfile_exit_status(pidfile_id, 271)
234
235
showard34ab0992009-10-05 22:47:57 +0000236class MockEmailManager(NullMethodObject):
237 _NULL_METHODS = ('send_queued_emails', 'send_email')
238
showardf85a0b72009-10-07 20:48:45 +0000239 def enqueue_notify_email(self, subject, message):
240 logging.warn('enqueue_notify_email: %s', subject)
241 logging.warn(message)
242
showard34ab0992009-10-05 22:47:57 +0000243
244class SchedulerFunctionalTest(unittest.TestCase,
245 frontend_test_utils.FrontendTestMixin):
246 # some number of ticks after which the scheduler is presumed to have
247 # stabilized, given no external changes
248 _A_LOT_OF_TICKS = 10
249
250 def setUp(self):
251 self._frontend_common_setup()
252 self._set_stubs()
253 self._set_global_config_values()
254 self.dispatcher = monitor_db.Dispatcher()
255
256 logging.basicConfig(level=logging.DEBUG)
257
258
259 def tearDown(self):
260 self._frontend_common_teardown()
261
262
263 def _set_stubs(self):
264 self.mock_config = MockGlobalConfig()
265 self.god.stub_with(global_config, 'global_config', self.mock_config)
266
267 self.mock_drone_manager = MockDroneManager()
268 self.god.stub_with(monitor_db, '_drone_manager',
269 self.mock_drone_manager)
270
271 self.mock_email_manager = MockEmailManager()
272 self.god.stub_with(email_manager, 'manager', self.mock_email_manager)
273
274 self._database = (
275 database_connection.TranslatingDatabase.get_test_database(
276 file_path=self._test_db_file,
277 translators=_DB_TRANSLATORS))
278 self._database.connect(db_type='django')
279 self.god.stub_with(monitor_db, '_db', self._database)
280
281
282 def _set_global_config_values(self):
283 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
284 1)
285
286
287 def _initialize_test(self):
288 self.dispatcher.initialize()
289
290
291 def _run_dispatcher(self):
292 for _ in xrange(self._A_LOT_OF_TICKS):
293 self.dispatcher.tick()
294
295
296 def test_idle(self):
showardb8900452009-10-12 20:31:01 +0000297 self._initialize_test()
showard34ab0992009-10-05 22:47:57 +0000298 self._run_dispatcher()
299
300
showardb8900452009-10-12 20:31:01 +0000301 def _assert_process_executed(self, working_directory, pidfile_name):
302 process_was_executed = self.mock_drone_manager.was_process_executed(
303 'hosts/host1/1-verify', monitor_db._AUTOSERV_PID_FILE)
304 self.assert_(process_was_executed,
305 '%s/%s not executed' % (working_directory, pidfile_name))
306
307
showard4a604792009-10-20 23:49:10 +0000308 def _check_statuses(self, queue_entry, queue_entry_status, host_status):
309 # update from DB
310 queue_entry = models.HostQueueEntry.objects.get(id=queue_entry.id)
311 self.assertEquals(queue_entry.status, queue_entry_status)
312 self.assertEquals(queue_entry.host.status, host_status)
313
314
showard7b2d7cb2009-10-28 19:53:03 +0000315 def _check_host_status(self, host, status):
316 # update from DB
317 host = models.Host.objects.get(id=host.id)
318 self.assertEquals(host.status, status)
319
320
showard4a604792009-10-20 23:49:10 +0000321 def _run_pre_job_verify(self, queue_entry):
322 self._run_dispatcher() # launches verify
323 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
324 HostStatus.VERIFYING)
325 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
326
327
showard34ab0992009-10-05 22:47:57 +0000328 def test_simple_job(self):
showardb8900452009-10-12 20:31:01 +0000329 self._initialize_test()
330 job, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000331 self._run_pre_job_verify(queue_entry)
showard34ab0992009-10-05 22:47:57 +0000332 self._run_dispatcher() # launches job
showard4a604792009-10-20 23:49:10 +0000333 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
334 self._finish_job(queue_entry)
335 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
336 self._assert_nothing_is_running()
showardb8900452009-10-12 20:31:01 +0000337
338
showard4a604792009-10-20 23:49:10 +0000339 def _setup_for_pre_job_cleanup(self):
340 self._initialize_test()
341 job, queue_entry = self._make_job_and_queue_entry()
342 job.reboot_before = models.RebootBefore.ALWAYS
343 job.save()
344 return queue_entry
345
346
347 def _run_pre_job_cleanup_job(self, queue_entry):
348 self._run_dispatcher() # cleanup
349 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
350 HostStatus.CLEANING)
351 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
352 self._run_dispatcher() # verify
353 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
354 self._run_dispatcher() # job
355 self._finish_job(queue_entry)
356
357
358 def test_pre_job_cleanup(self):
359 queue_entry = self._setup_for_pre_job_cleanup()
360 self._run_pre_job_cleanup_job(queue_entry)
361
362
363 def _run_pre_job_cleanup_one_failure(self):
364 queue_entry = self._setup_for_pre_job_cleanup()
365 self._run_dispatcher() # cleanup
366 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
367 exit_status=256)
368 self._run_dispatcher() # repair
369 self._check_statuses(queue_entry, HqeStatus.QUEUED,
370 HostStatus.REPAIRING)
371 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
372 return queue_entry
373
374
375 def test_pre_job_cleanup_failure(self):
376 queue_entry = self._run_pre_job_cleanup_one_failure()
377 # from here the job should run as normal
378 self._run_pre_job_cleanup_job(queue_entry)
379
380
381 def test_pre_job_cleanup_double_failure(self):
382 # TODO (showard): this test isn't perfect. in reality, when the second
383 # cleanup fails, it copies its results over to the job directory using
384 # copy_results_on_drone() and then parses them. since we don't handle
385 # that, there appear to be no results at the job directory. the
386 # scheduler handles this gracefully, parsing gets effectively skipped,
387 # and this test passes as is. but we ought to properly test that
388 # behavior.
389 queue_entry = self._run_pre_job_cleanup_one_failure()
390 self._run_dispatcher() # second cleanup
391 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
392 exit_status=256)
393 self._run_dispatcher()
394 self._check_statuses(queue_entry, HqeStatus.FAILED,
395 HostStatus.REPAIR_FAILED)
396 # nothing else should run
397 self._assert_nothing_is_running()
398
399
400 def _assert_nothing_is_running(self):
401 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])
402
403
showard7b2d7cb2009-10-28 19:53:03 +0000404 def _setup_for_post_job_cleanup(self):
showard4a604792009-10-20 23:49:10 +0000405 self._initialize_test()
406 job, queue_entry = self._make_job_and_queue_entry()
407 job.reboot_after = models.RebootAfter.ALWAYS
408 job.save()
showard7b2d7cb2009-10-28 19:53:03 +0000409 return queue_entry
showard4a604792009-10-20 23:49:10 +0000410
showard7b2d7cb2009-10-28 19:53:03 +0000411
412 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
413 include_verify=True):
414 if include_verify:
415 self._run_pre_job_verify(queue_entry)
showard4a604792009-10-20 23:49:10 +0000416 self._run_dispatcher() # job
417 self.mock_drone_manager.finish_process(_PidfileType.JOB)
418 self._run_dispatcher() # parsing + cleanup
419 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
420 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
421 exit_status=256)
422 self._run_dispatcher() # repair, HQE unaffected
showard4a604792009-10-20 23:49:10 +0000423 return queue_entry
424
425
426 def test_post_job_cleanup_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000427 queue_entry = self._setup_for_post_job_cleanup()
428 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
429 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
430 HostStatus.REPAIRING)
showard4a604792009-10-20 23:49:10 +0000431 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
432 self._run_dispatcher()
433 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
434
435
436 def test_post_job_cleanup_failure_repair_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000437 queue_entry = self._setup_for_post_job_cleanup()
438 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
showard4a604792009-10-20 23:49:10 +0000439 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,
440 exit_status=256)
441 self._run_dispatcher()
442 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
443 HostStatus.REPAIR_FAILED)
444
445
446 def _finish_job(self, queue_entry):
showardf85a0b72009-10-07 20:48:45 +0000447 self.mock_drone_manager.finish_process(_PidfileType.JOB)
showard34ab0992009-10-05 22:47:57 +0000448 self._run_dispatcher() # launches parsing + cleanup
showard4a604792009-10-20 23:49:10 +0000449 self._check_statuses(queue_entry, HqeStatus.PARSING,
450 HostStatus.CLEANING)
showardf85a0b72009-10-07 20:48:45 +0000451 self._finish_parsing_and_cleanup()
452
453
454 def _finish_parsing_and_cleanup(self):
455 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
456 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
showard34ab0992009-10-05 22:47:57 +0000457 self._run_dispatcher()
458
459
showard7b2d7cb2009-10-28 19:53:03 +0000460 def _create_reverify_request(self):
461 host = self.hosts[0]
462 models.SpecialTask.objects.create(host=host,
463 task=models.SpecialTask.Task.VERIFY)
464 return host
465
466
467 def test_requested_reverify(self):
468 host = self._create_reverify_request()
469 self._run_dispatcher()
470 self._check_host_status(host, HostStatus.VERIFYING)
471 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
472 self._run_dispatcher()
473 self._check_host_status(host, HostStatus.READY)
474
475
476 def test_requested_reverify_failure(self):
477 host = self._create_reverify_request()
478 self._run_dispatcher()
479 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
480 exit_status=256)
481 self._run_dispatcher() # repair
482 self._check_host_status(host, HostStatus.REPAIRING)
483 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
484 self._run_dispatcher()
485 self._check_host_status(host, HostStatus.READY)
486
487
488 def _setup_for_do_not_verify(self):
489 self._initialize_test()
490 job, queue_entry = self._make_job_and_queue_entry()
491 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY
492 queue_entry.host.save()
493 return queue_entry
494
495
496 def test_do_not_verify_job(self):
497 queue_entry = self._setup_for_do_not_verify()
498 self._run_dispatcher() # runs job directly
499 self._finish_job(queue_entry)
500
501
502 def test_do_not_verify_job_with_cleanup(self):
503 queue_entry = self._setup_for_do_not_verify()
504 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
505 queue_entry.job.save()
506
507 self._run_dispatcher() # cleanup
508 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
509 self._run_dispatcher() # job
510 self._finish_job(queue_entry)
511
512
513 def test_do_not_verify_pre_job_cleanup_failure(self):
514 queue_entry = self._setup_for_do_not_verify()
515 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
516 queue_entry.job.save()
517
518 self._run_dispatcher() # cleanup
519 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
520 exit_status=256)
521 self._run_dispatcher() # failure ignored; job runs
522 self._finish_job(queue_entry)
523
524
525 def test_do_not_verify_post_job_cleanup_failure(self):
526 queue_entry = self._setup_for_do_not_verify()
527
528 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,
529 include_verify=False)
530 # failure ignored, host still set to Ready
531 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
532 self._run_dispatcher() # nothing else runs
533 self._assert_nothing_is_running()
534
535
536 def test_do_not_verify_requested_reverify_failure(self):
537 host = self._create_reverify_request()
538 host.protection = host_protections.Protection.DO_NOT_VERIFY
539 host.save()
540
541 self._run_dispatcher()
542 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
543 exit_status=256)
544 self._run_dispatcher()
545 self._check_host_status(host, HostStatus.READY) # ignore failure
546 self._assert_nothing_is_running()
547
548
showardf85a0b72009-10-07 20:48:45 +0000549 def test_job_abort_in_verify(self):
showardb8900452009-10-12 20:31:01 +0000550 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000551 job = self._create_job(hosts=[1])
552 self._run_dispatcher() # launches verify
553 job.hostqueueentry_set.update(aborted=True)
554 self._run_dispatcher() # kills verify, launches cleanup
555 self.assert_(self.mock_drone_manager.was_last_process_killed(
556 _PidfileType.VERIFY))
557 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
558 self._run_dispatcher()
559
560
561 def test_job_abort(self):
showardb8900452009-10-12 20:31:01 +0000562 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000563 job = self._create_job(hosts=[1])
564 job.run_verify = False
565 job.save()
566
567 self._run_dispatcher() # launches job
568 job.hostqueueentry_set.update(aborted=True)
569 self._run_dispatcher() # kills job, launches gathering
570 self.assert_(self.mock_drone_manager.was_last_process_killed(
571 _PidfileType.JOB))
572 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
573 self._run_dispatcher() # launches parsing + cleanup
574 self._finish_parsing_and_cleanup()
575
576
577 def test_no_pidfile_leaking(self):
showardb8900452009-10-12 20:31:01 +0000578 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000579 self.test_simple_job()
580 self.assertEquals(self.mock_drone_manager._pidfiles, {})
581
582 self.test_job_abort_in_verify()
583 self.assertEquals(self.mock_drone_manager._pidfiles, {})
584
585 self.test_job_abort()
586 self.assertEquals(self.mock_drone_manager._pidfiles, {})
587
588
showardb8900452009-10-12 20:31:01 +0000589 def _make_job_and_queue_entry(self):
590 job = self._create_job(hosts=[1])
591 queue_entry = job.hostqueueentry_set.all()[0]
592 return job, queue_entry
593
594
595 def test_recover_running_no_process(self):
596 # recovery should re-execute a Running HQE if no process is found
597 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000598 queue_entry.status = HqeStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000599 queue_entry.execution_subdir = '1-myuser/host1'
600 queue_entry.save()
showard4a604792009-10-20 23:49:10 +0000601 queue_entry.host.status = HostStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000602 queue_entry.host.save()
603
604 self._initialize_test()
605 self._run_dispatcher()
showard4a604792009-10-20 23:49:10 +0000606 self._finish_job(queue_entry)
showardb8900452009-10-12 20:31:01 +0000607
608
609 def test_recover_verifying_hqe_no_special_task(self):
610 # recovery should fail on a Verifing HQE with no corresponding
611 # Verify or Cleanup SpecialTask
612 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000613 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000614 queue_entry.save()
615
616 # make some dummy SpecialTasks that shouldn't count
617 models.SpecialTask.objects.create(host=queue_entry.host,
618 task=models.SpecialTask.Task.VERIFY)
619 models.SpecialTask.objects.create(host=queue_entry.host,
620 task=models.SpecialTask.Task.CLEANUP,
621 queue_entry=queue_entry,
622 is_complete=True)
623
624 self.assertRaises(monitor_db.SchedulerError, self._initialize_test)
625
626
627 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):
628 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000629 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000630 queue_entry.save()
631
632 special_task = models.SpecialTask.objects.create(
633 host=queue_entry.host, task=task, queue_entry=queue_entry)
634
635 self._initialize_test()
636 self._run_dispatcher()
637 self.mock_drone_manager.finish_process(pidfile_type)
638 self._run_dispatcher()
639 # don't bother checking the rest of the job execution, as long as the
640 # SpecialTask ran
641
642
643 def test_recover_verifying_hqe_with_cleanup(self):
644 # recover an HQE that was in pre-job cleanup
645 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,
646 _PidfileType.CLEANUP)
647
648
649 def test_recover_verifying_hqe_with_verify(self):
650 # recover an HQE that was in pre-job verify
651 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,
652 _PidfileType.VERIFY)
653
654
showard34ab0992009-10-05 22:47:57 +0000655if __name__ == '__main__':
656 unittest.main()