blob: 858bb52af6907c987500820278b3501955d1b469 [file] [log] [blame]
showard34ab0992009-10-05 22:47:57 +00001#!/usr/bin/python
2
showard4a604792009-10-20 23:49:10 +00003import logging, os, unittest
showard34ab0992009-10-05 22:47:57 +00004import common
showard7b2d7cb2009-10-28 19:53:03 +00005from autotest_lib.client.common_lib import enum, global_config, host_protections
showard34ab0992009-10-05 22:47:57 +00006from autotest_lib.database import database_connection
7from autotest_lib.frontend import setup_django_environment
showardb8900452009-10-12 20:31:01 +00008from autotest_lib.frontend.afe import frontend_test_utils, models
showard34ab0992009-10-05 22:47:57 +00009from autotest_lib.scheduler import drone_manager, email_manager, monitor_db
10
11# translations necessary for scheduler queries to work with SQLite
12_re_translator = database_connection.TranslatingDatabase.make_regexp_translator
13_DB_TRANSLATORS = (
14 _re_translator(r'NOW\(\)', 'time("now")'),
15 # older SQLite doesn't support group_concat, so just don't bother until
16 # it arises in an important query
17 _re_translator(r'GROUP_CONCAT\((.*?)\)', r'\1'),
18)
19
showard4a604792009-10-20 23:49:10 +000020HqeStatus = models.HostQueueEntry.Status
21HostStatus = models.Host.Status
22
showard34ab0992009-10-05 22:47:57 +000023class NullMethodObject(object):
24 _NULL_METHODS = ()
25
26 def __init__(self):
27 def null_method(*args, **kwargs):
28 pass
29
30 for method_name in self._NULL_METHODS:
31 setattr(self, method_name, null_method)
32
33class MockGlobalConfig(object):
34 def __init__(self):
35 self._config_info = {}
36
37
38 def set_config_value(self, section, key, value):
39 self._config_info[(section, key)] = value
40
41
42 def get_config_value(self, section, key, type=str,
43 default=None, allow_blank=False):
44 identifier = (section, key)
45 if identifier not in self._config_info:
46 raise RuntimeError('Unset global config value: %s' % (identifier,))
47 return self._config_info[identifier]
48
49
showardf85a0b72009-10-07 20:48:45 +000050# the SpecialTask names here must match the suffixes used on the SpecialTask
51# results directories
52_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
53 'parse')
54
55
showard34ab0992009-10-05 22:47:57 +000056class MockDroneManager(NullMethodObject):
showard65db3932009-10-28 19:54:35 +000057 """
58 Public attributes:
59 max_runnable_processes_value: value returned by max_runnable_processes().
60 tests can change this to activate throttling.
61 """
showard4a604792009-10-20 23:49:10 +000062 _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository',
63 'copy_results_on_drone')
64
65 class _DummyPidfileId(object):
66 """
67 Object to represent pidfile IDs that is opaque to the scheduler code but
68 still debugging-friendly for us.
69 """
showard418785b2009-11-23 20:19:59 +000070 def __init__(self, debug_string, num_processes):
showard4a604792009-10-20 23:49:10 +000071 self._debug_string = debug_string
showard418785b2009-11-23 20:19:59 +000072 self._num_processes = num_processes
showard4a604792009-10-20 23:49:10 +000073
74
75 def __str__(self):
76 return self._debug_string
77
showard34ab0992009-10-05 22:47:57 +000078
showard418785b2009-11-23 20:19:59 +000079 def __repr__(self):
80 return '<_DummyPidfileId: %s>' % str(self)
81
82
showard34ab0992009-10-05 22:47:57 +000083 def __init__(self):
84 super(MockDroneManager, self).__init__()
showard418785b2009-11-23 20:19:59 +000085 self.process_capacity = 100
showard65db3932009-10-28 19:54:35 +000086
showard34ab0992009-10-05 22:47:57 +000087 # maps result_dir to set of tuples (file_path, file_contents)
88 self._attached_files = {}
89 # maps pidfile IDs to PidfileContents
90 self._pidfiles = {}
91 # pidfile IDs that haven't been created yet
92 self._future_pidfiles = []
showardf85a0b72009-10-07 20:48:45 +000093 # maps _PidfileType to the most recently created pidfile ID of that type
94 self._last_pidfile_id = {}
showard34ab0992009-10-05 22:47:57 +000095 # maps (working_directory, pidfile_name) to pidfile IDs
96 self._pidfile_index = {}
showardf85a0b72009-10-07 20:48:45 +000097 # maps process to pidfile IDs
98 self._process_index = {}
99 # tracks pidfiles of processes that have been killed
100 self._killed_pidfiles = set()
showard4a604792009-10-20 23:49:10 +0000101 # pidfile IDs that have just been unregistered (so will disappear on the
102 # next cycle)
103 self._unregistered_pidfiles = set()
showard34ab0992009-10-05 22:47:57 +0000104
105
106 # utility APIs for use by the test
107
showardf85a0b72009-10-07 20:48:45 +0000108 def finish_process(self, pidfile_type, exit_status=0):
109 pidfile_id = self._last_pidfile_id[pidfile_type]
110 self._set_pidfile_exit_status(pidfile_id, exit_status)
showard34ab0992009-10-05 22:47:57 +0000111
112
showard65db3932009-10-28 19:54:35 +0000113 def finish_specific_process(self, working_directory, pidfile_name):
114 pidfile_id = self._pidfile_index[(working_directory, pidfile_name)]
115 self._set_pidfile_exit_status(pidfile_id, 0)
116
117
showard34ab0992009-10-05 22:47:57 +0000118 def _set_pidfile_exit_status(self, pidfile_id, exit_status):
showardf85a0b72009-10-07 20:48:45 +0000119 assert pidfile_id is not None
showard34ab0992009-10-05 22:47:57 +0000120 contents = self._pidfiles[pidfile_id]
121 contents.exit_status = exit_status
122 contents.num_tests_failed = 0
123
124
showardf85a0b72009-10-07 20:48:45 +0000125 def was_last_process_killed(self, pidfile_type):
126 pidfile_id = self._last_pidfile_id[pidfile_type]
127 return pidfile_id in self._killed_pidfiles
128
129
showard418785b2009-11-23 20:19:59 +0000130 def nonfinished_pidfile_ids(self):
131 return [pidfile_id for pidfile_id, pidfile_contents
showard4a604792009-10-20 23:49:10 +0000132 in self._pidfiles.iteritems()
showard418785b2009-11-23 20:19:59 +0000133 if pidfile_contents.exit_status is None]
134
135
136 def running_pidfile_ids(self):
137 return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids()
138 if self._pidfiles[pidfile_id].process is not None]
showard4a604792009-10-20 23:49:10 +0000139
140
showard34ab0992009-10-05 22:47:57 +0000141 # DroneManager emulation APIs for use by monitor_db
142
143 def get_orphaned_autoserv_processes(self):
144 return set()
145
146
147 def total_running_processes(self):
showard418785b2009-11-23 20:19:59 +0000148 return sum(pidfile_id._num_processes
149 for pidfile_id in self.nonfinished_pidfile_ids())
showard34ab0992009-10-05 22:47:57 +0000150
151
showard9bb960b2009-11-19 01:02:11 +0000152 def max_runnable_processes(self, username):
showard418785b2009-11-23 20:19:59 +0000153 return self.process_capacity - self.total_running_processes()
showard34ab0992009-10-05 22:47:57 +0000154
155
showard4a604792009-10-20 23:49:10 +0000156 def refresh(self):
157 for pidfile_id in self._unregistered_pidfiles:
158 # intentionally handle non-registered pidfiles silently
159 self._pidfiles.pop(pidfile_id, None)
160 self._unregistered_pidfiles = set()
161
162
showard34ab0992009-10-05 22:47:57 +0000163 def execute_actions(self):
164 # executing an "execute_command" causes a pidfile to be created
165 for pidfile_id in self._future_pidfiles:
166 # Process objects are opaque to monitor_db
showardf85a0b72009-10-07 20:48:45 +0000167 process = object()
168 self._pidfiles[pidfile_id].process = process
169 self._process_index[process] = pidfile_id
showard34ab0992009-10-05 22:47:57 +0000170 self._future_pidfiles = []
171
172
173 def attach_file_to_execution(self, result_dir, file_contents,
174 file_path=None):
175 self._attached_files.setdefault(result_dir, set()).add((file_path,
176 file_contents))
177 return 'attach_path'
178
179
showardf85a0b72009-10-07 20:48:45 +0000180 def _initialize_pidfile(self, pidfile_id):
181 if pidfile_id not in self._pidfiles:
182 self._pidfiles[pidfile_id] = drone_manager.PidfileContents()
183
184
185 _pidfile_type_map = {
186 monitor_db._AUTOSERV_PID_FILE: _PidfileType.JOB,
187 monitor_db._CRASHINFO_PID_FILE: _PidfileType.GATHER,
188 monitor_db._PARSER_PID_FILE: _PidfileType.PARSE,
189 }
190
191
192 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):
193 if working_directory.startswith('hosts/'):
194 # such paths look like hosts/host1/1-verify, we'll grab the end
195 type_string = working_directory.rsplit('-', 1)[1]
196 pidfile_type = _PidfileType.get_value(type_string)
197 else:
198 pidfile_type = self._pidfile_type_map[pidfile_name]
199 self._last_pidfile_id[pidfile_type] = pidfile_id
200
201
showard34ab0992009-10-05 22:47:57 +0000202 def execute_command(self, command, working_directory, pidfile_name,
showard418785b2009-11-23 20:19:59 +0000203 num_processes, log_file=None, paired_with_pidfile=None,
showard9bb960b2009-11-19 01:02:11 +0000204 username=None):
showard4a604792009-10-20 23:49:10 +0000205 pidfile_id = self._DummyPidfileId(
showard418785b2009-11-23 20:19:59 +0000206 self._get_pidfile_debug_string(working_directory, pidfile_name),
207 num_processes)
showard34ab0992009-10-05 22:47:57 +0000208 self._future_pidfiles.append(pidfile_id)
showardf85a0b72009-10-07 20:48:45 +0000209 self._initialize_pidfile(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000210 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id
showardf85a0b72009-10-07 20:48:45 +0000211 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)
showard34ab0992009-10-05 22:47:57 +0000212 return pidfile_id
213
214
showard4a604792009-10-20 23:49:10 +0000215 def _get_pidfile_debug_string(self, working_directory, pidfile_name):
216 return os.path.join(working_directory, pidfile_name)
217
218
showard34ab0992009-10-05 22:47:57 +0000219 def get_pidfile_contents(self, pidfile_id, use_second_read=False):
showard4a604792009-10-20 23:49:10 +0000220 if pidfile_id not in self._pidfiles:
221 print 'Request for nonexistent pidfile %s' % pidfile_id
222 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())
showard34ab0992009-10-05 22:47:57 +0000223
224
225 def is_process_running(self, process):
226 return True
227
228
229 def register_pidfile(self, pidfile_id):
showardf85a0b72009-10-07 20:48:45 +0000230 self._initialize_pidfile(pidfile_id)
231
232
233 def unregister_pidfile(self, pidfile_id):
showard4a604792009-10-20 23:49:10 +0000234 self._unregistered_pidfiles.add(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000235
236
237 def absolute_path(self, path):
238 return 'absolute/' + path
239
240
241 def write_lines_to_file(self, file_path, lines, paired_with_process=None):
242 # TODO: record this
243 pass
244
245
246 def get_pidfile_id_from(self, execution_tag, pidfile_name):
showard4a604792009-10-20 23:49:10 +0000247 debug_string = ('Nonexistent pidfile: '
248 + self._get_pidfile_debug_string(execution_tag,
249 pidfile_name))
250 return self._pidfile_index.get((execution_tag, pidfile_name),
showard418785b2009-11-23 20:19:59 +0000251 self._DummyPidfileId(debug_string,
252 num_processes=0))
showard34ab0992009-10-05 22:47:57 +0000253
254
showardf85a0b72009-10-07 20:48:45 +0000255 def kill_process(self, process):
256 pidfile_id = self._process_index[process]
257 self._killed_pidfiles.add(pidfile_id)
258 self._set_pidfile_exit_status(pidfile_id, 271)
259
260
showard34ab0992009-10-05 22:47:57 +0000261class MockEmailManager(NullMethodObject):
262 _NULL_METHODS = ('send_queued_emails', 'send_email')
263
showardf85a0b72009-10-07 20:48:45 +0000264 def enqueue_notify_email(self, subject, message):
265 logging.warn('enqueue_notify_email: %s', subject)
266 logging.warn(message)
267
showard34ab0992009-10-05 22:47:57 +0000268
269class SchedulerFunctionalTest(unittest.TestCase,
270 frontend_test_utils.FrontendTestMixin):
271 # some number of ticks after which the scheduler is presumed to have
272 # stabilized, given no external changes
273 _A_LOT_OF_TICKS = 10
274
275 def setUp(self):
276 self._frontend_common_setup()
277 self._set_stubs()
278 self._set_global_config_values()
279 self.dispatcher = monitor_db.Dispatcher()
280
281 logging.basicConfig(level=logging.DEBUG)
282
283
284 def tearDown(self):
285 self._frontend_common_teardown()
286
287
288 def _set_stubs(self):
289 self.mock_config = MockGlobalConfig()
290 self.god.stub_with(global_config, 'global_config', self.mock_config)
291
292 self.mock_drone_manager = MockDroneManager()
293 self.god.stub_with(monitor_db, '_drone_manager',
294 self.mock_drone_manager)
295
296 self.mock_email_manager = MockEmailManager()
297 self.god.stub_with(email_manager, 'manager', self.mock_email_manager)
298
299 self._database = (
300 database_connection.TranslatingDatabase.get_test_database(
301 file_path=self._test_db_file,
302 translators=_DB_TRANSLATORS))
303 self._database.connect(db_type='django')
304 self.god.stub_with(monitor_db, '_db', self._database)
305
306
307 def _set_global_config_values(self):
308 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
309 1)
310
311
312 def _initialize_test(self):
313 self.dispatcher.initialize()
314
315
316 def _run_dispatcher(self):
317 for _ in xrange(self._A_LOT_OF_TICKS):
318 self.dispatcher.tick()
319
320
321 def test_idle(self):
showardb8900452009-10-12 20:31:01 +0000322 self._initialize_test()
showard34ab0992009-10-05 22:47:57 +0000323 self._run_dispatcher()
324
325
showardb8900452009-10-12 20:31:01 +0000326 def _assert_process_executed(self, working_directory, pidfile_name):
327 process_was_executed = self.mock_drone_manager.was_process_executed(
328 'hosts/host1/1-verify', monitor_db._AUTOSERV_PID_FILE)
329 self.assert_(process_was_executed,
330 '%s/%s not executed' % (working_directory, pidfile_name))
331
332
showard418785b2009-11-23 20:19:59 +0000333 def _check_statuses(self, queue_entry, queue_entry_status,
334 host_status=None):
showard4a604792009-10-20 23:49:10 +0000335 # update from DB
336 queue_entry = models.HostQueueEntry.objects.get(id=queue_entry.id)
337 self.assertEquals(queue_entry.status, queue_entry_status)
showard418785b2009-11-23 20:19:59 +0000338 if host_status:
339 self.assertEquals(queue_entry.host.status, host_status)
showard4a604792009-10-20 23:49:10 +0000340
341
showard7b2d7cb2009-10-28 19:53:03 +0000342 def _check_host_status(self, host, status):
343 # update from DB
344 host = models.Host.objects.get(id=host.id)
345 self.assertEquals(host.status, status)
346
347
showard4a604792009-10-20 23:49:10 +0000348 def _run_pre_job_verify(self, queue_entry):
349 self._run_dispatcher() # launches verify
350 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
351 HostStatus.VERIFYING)
352 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
353
354
showard34ab0992009-10-05 22:47:57 +0000355 def test_simple_job(self):
showardb8900452009-10-12 20:31:01 +0000356 self._initialize_test()
357 job, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000358 self._run_pre_job_verify(queue_entry)
showard34ab0992009-10-05 22:47:57 +0000359 self._run_dispatcher() # launches job
showard4a604792009-10-20 23:49:10 +0000360 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
361 self._finish_job(queue_entry)
362 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
363 self._assert_nothing_is_running()
showardb8900452009-10-12 20:31:01 +0000364
365
showard4a604792009-10-20 23:49:10 +0000366 def _setup_for_pre_job_cleanup(self):
367 self._initialize_test()
368 job, queue_entry = self._make_job_and_queue_entry()
369 job.reboot_before = models.RebootBefore.ALWAYS
370 job.save()
371 return queue_entry
372
373
374 def _run_pre_job_cleanup_job(self, queue_entry):
375 self._run_dispatcher() # cleanup
376 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
377 HostStatus.CLEANING)
378 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
379 self._run_dispatcher() # verify
380 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
381 self._run_dispatcher() # job
382 self._finish_job(queue_entry)
383
384
385 def test_pre_job_cleanup(self):
386 queue_entry = self._setup_for_pre_job_cleanup()
387 self._run_pre_job_cleanup_job(queue_entry)
388
389
390 def _run_pre_job_cleanup_one_failure(self):
391 queue_entry = self._setup_for_pre_job_cleanup()
392 self._run_dispatcher() # cleanup
393 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
394 exit_status=256)
395 self._run_dispatcher() # repair
396 self._check_statuses(queue_entry, HqeStatus.QUEUED,
397 HostStatus.REPAIRING)
398 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
399 return queue_entry
400
401
402 def test_pre_job_cleanup_failure(self):
403 queue_entry = self._run_pre_job_cleanup_one_failure()
404 # from here the job should run as normal
405 self._run_pre_job_cleanup_job(queue_entry)
406
407
408 def test_pre_job_cleanup_double_failure(self):
409 # TODO (showard): this test isn't perfect. in reality, when the second
410 # cleanup fails, it copies its results over to the job directory using
411 # copy_results_on_drone() and then parses them. since we don't handle
412 # that, there appear to be no results at the job directory. the
413 # scheduler handles this gracefully, parsing gets effectively skipped,
414 # and this test passes as is. but we ought to properly test that
415 # behavior.
416 queue_entry = self._run_pre_job_cleanup_one_failure()
417 self._run_dispatcher() # second cleanup
418 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
419 exit_status=256)
420 self._run_dispatcher()
421 self._check_statuses(queue_entry, HqeStatus.FAILED,
422 HostStatus.REPAIR_FAILED)
423 # nothing else should run
424 self._assert_nothing_is_running()
425
426
427 def _assert_nothing_is_running(self):
428 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])
429
430
showard7b2d7cb2009-10-28 19:53:03 +0000431 def _setup_for_post_job_cleanup(self):
showard4a604792009-10-20 23:49:10 +0000432 self._initialize_test()
433 job, queue_entry = self._make_job_and_queue_entry()
434 job.reboot_after = models.RebootAfter.ALWAYS
435 job.save()
showard7b2d7cb2009-10-28 19:53:03 +0000436 return queue_entry
showard4a604792009-10-20 23:49:10 +0000437
showard7b2d7cb2009-10-28 19:53:03 +0000438
439 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
440 include_verify=True):
441 if include_verify:
442 self._run_pre_job_verify(queue_entry)
showard4a604792009-10-20 23:49:10 +0000443 self._run_dispatcher() # job
444 self.mock_drone_manager.finish_process(_PidfileType.JOB)
445 self._run_dispatcher() # parsing + cleanup
446 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
447 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
448 exit_status=256)
449 self._run_dispatcher() # repair, HQE unaffected
showard4a604792009-10-20 23:49:10 +0000450 return queue_entry
451
452
453 def test_post_job_cleanup_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000454 queue_entry = self._setup_for_post_job_cleanup()
455 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
456 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
457 HostStatus.REPAIRING)
showard4a604792009-10-20 23:49:10 +0000458 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
459 self._run_dispatcher()
460 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
461
462
463 def test_post_job_cleanup_failure_repair_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000464 queue_entry = self._setup_for_post_job_cleanup()
465 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
showard4a604792009-10-20 23:49:10 +0000466 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,
467 exit_status=256)
468 self._run_dispatcher()
469 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
470 HostStatus.REPAIR_FAILED)
471
472
473 def _finish_job(self, queue_entry):
showardf85a0b72009-10-07 20:48:45 +0000474 self.mock_drone_manager.finish_process(_PidfileType.JOB)
showard34ab0992009-10-05 22:47:57 +0000475 self._run_dispatcher() # launches parsing + cleanup
showard4a604792009-10-20 23:49:10 +0000476 self._check_statuses(queue_entry, HqeStatus.PARSING,
477 HostStatus.CLEANING)
showardf85a0b72009-10-07 20:48:45 +0000478 self._finish_parsing_and_cleanup()
479
480
481 def _finish_parsing_and_cleanup(self):
482 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
483 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
showard34ab0992009-10-05 22:47:57 +0000484 self._run_dispatcher()
485
486
showard7b2d7cb2009-10-28 19:53:03 +0000487 def _create_reverify_request(self):
488 host = self.hosts[0]
489 models.SpecialTask.objects.create(host=host,
showard9bb960b2009-11-19 01:02:11 +0000490 task=models.SpecialTask.Task.VERIFY,
491 requested_by=self.user)
showard7b2d7cb2009-10-28 19:53:03 +0000492 return host
493
494
495 def test_requested_reverify(self):
496 host = self._create_reverify_request()
497 self._run_dispatcher()
498 self._check_host_status(host, HostStatus.VERIFYING)
499 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
500 self._run_dispatcher()
501 self._check_host_status(host, HostStatus.READY)
502
503
504 def test_requested_reverify_failure(self):
505 host = self._create_reverify_request()
506 self._run_dispatcher()
507 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
508 exit_status=256)
509 self._run_dispatcher() # repair
510 self._check_host_status(host, HostStatus.REPAIRING)
511 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
512 self._run_dispatcher()
513 self._check_host_status(host, HostStatus.READY)
514
515
516 def _setup_for_do_not_verify(self):
517 self._initialize_test()
518 job, queue_entry = self._make_job_and_queue_entry()
519 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY
520 queue_entry.host.save()
521 return queue_entry
522
523
524 def test_do_not_verify_job(self):
525 queue_entry = self._setup_for_do_not_verify()
526 self._run_dispatcher() # runs job directly
527 self._finish_job(queue_entry)
528
529
530 def test_do_not_verify_job_with_cleanup(self):
531 queue_entry = self._setup_for_do_not_verify()
532 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
533 queue_entry.job.save()
534
535 self._run_dispatcher() # cleanup
536 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
537 self._run_dispatcher() # job
538 self._finish_job(queue_entry)
539
540
541 def test_do_not_verify_pre_job_cleanup_failure(self):
542 queue_entry = self._setup_for_do_not_verify()
543 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
544 queue_entry.job.save()
545
546 self._run_dispatcher() # cleanup
547 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
548 exit_status=256)
549 self._run_dispatcher() # failure ignored; job runs
550 self._finish_job(queue_entry)
551
552
553 def test_do_not_verify_post_job_cleanup_failure(self):
554 queue_entry = self._setup_for_do_not_verify()
555
556 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,
557 include_verify=False)
558 # failure ignored, host still set to Ready
559 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
560 self._run_dispatcher() # nothing else runs
561 self._assert_nothing_is_running()
562
563
564 def test_do_not_verify_requested_reverify_failure(self):
565 host = self._create_reverify_request()
566 host.protection = host_protections.Protection.DO_NOT_VERIFY
567 host.save()
568
569 self._run_dispatcher()
570 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
571 exit_status=256)
572 self._run_dispatcher()
573 self._check_host_status(host, HostStatus.READY) # ignore failure
574 self._assert_nothing_is_running()
575
576
showardf85a0b72009-10-07 20:48:45 +0000577 def test_job_abort_in_verify(self):
showardb8900452009-10-12 20:31:01 +0000578 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000579 job = self._create_job(hosts=[1])
580 self._run_dispatcher() # launches verify
581 job.hostqueueentry_set.update(aborted=True)
582 self._run_dispatcher() # kills verify, launches cleanup
583 self.assert_(self.mock_drone_manager.was_last_process_killed(
584 _PidfileType.VERIFY))
585 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
586 self._run_dispatcher()
587
588
589 def test_job_abort(self):
showardb8900452009-10-12 20:31:01 +0000590 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000591 job = self._create_job(hosts=[1])
592 job.run_verify = False
593 job.save()
594
595 self._run_dispatcher() # launches job
596 job.hostqueueentry_set.update(aborted=True)
597 self._run_dispatcher() # kills job, launches gathering
598 self.assert_(self.mock_drone_manager.was_last_process_killed(
599 _PidfileType.JOB))
600 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
601 self._run_dispatcher() # launches parsing + cleanup
602 self._finish_parsing_and_cleanup()
603
604
605 def test_no_pidfile_leaking(self):
showardb8900452009-10-12 20:31:01 +0000606 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000607 self.test_simple_job()
608 self.assertEquals(self.mock_drone_manager._pidfiles, {})
609
610 self.test_job_abort_in_verify()
611 self.assertEquals(self.mock_drone_manager._pidfiles, {})
612
613 self.test_job_abort()
614 self.assertEquals(self.mock_drone_manager._pidfiles, {})
615
616
showardb8900452009-10-12 20:31:01 +0000617 def _make_job_and_queue_entry(self):
618 job = self._create_job(hosts=[1])
619 queue_entry = job.hostqueueentry_set.all()[0]
620 return job, queue_entry
621
622
623 def test_recover_running_no_process(self):
624 # recovery should re-execute a Running HQE if no process is found
625 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000626 queue_entry.status = HqeStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000627 queue_entry.execution_subdir = '1-myuser/host1'
628 queue_entry.save()
showard4a604792009-10-20 23:49:10 +0000629 queue_entry.host.status = HostStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000630 queue_entry.host.save()
631
632 self._initialize_test()
633 self._run_dispatcher()
showard4a604792009-10-20 23:49:10 +0000634 self._finish_job(queue_entry)
showardb8900452009-10-12 20:31:01 +0000635
636
637 def test_recover_verifying_hqe_no_special_task(self):
638 # recovery should fail on a Verifing HQE with no corresponding
639 # Verify or Cleanup SpecialTask
640 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000641 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000642 queue_entry.save()
643
644 # make some dummy SpecialTasks that shouldn't count
645 models.SpecialTask.objects.create(host=queue_entry.host,
646 task=models.SpecialTask.Task.VERIFY)
647 models.SpecialTask.objects.create(host=queue_entry.host,
648 task=models.SpecialTask.Task.CLEANUP,
649 queue_entry=queue_entry,
650 is_complete=True)
651
652 self.assertRaises(monitor_db.SchedulerError, self._initialize_test)
653
654
655 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):
656 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000657 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000658 queue_entry.save()
659
660 special_task = models.SpecialTask.objects.create(
661 host=queue_entry.host, task=task, queue_entry=queue_entry)
662
663 self._initialize_test()
664 self._run_dispatcher()
665 self.mock_drone_manager.finish_process(pidfile_type)
666 self._run_dispatcher()
667 # don't bother checking the rest of the job execution, as long as the
668 # SpecialTask ran
669
670
671 def test_recover_verifying_hqe_with_cleanup(self):
672 # recover an HQE that was in pre-job cleanup
673 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,
674 _PidfileType.CLEANUP)
675
676
677 def test_recover_verifying_hqe_with_verify(self):
678 # recover an HQE that was in pre-job verify
679 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,
680 _PidfileType.VERIFY)
681
682
showarda21b9492009-11-04 20:43:18 +0000683 def test_recover_pending_hqes_with_group(self):
684 # recover a group of HQEs that are in Pending, in the same group (e.g.,
685 # in a job with atomic hosts)
686 job = self._create_job(hosts=[1,2], atomic_group=1)
687 job.save()
688
689 job.hostqueueentry_set.all().update(status=HqeStatus.PENDING)
690
691 self._initialize_test()
692 for queue_entry in job.hostqueueentry_set.all():
693 self.assertEquals(queue_entry.status, HqeStatus.STARTING)
694
695
showard65db3932009-10-28 19:54:35 +0000696 def test_job_scheduled_just_after_abort(self):
697 # test a pretty obscure corner case where a job is aborted while queued,
698 # another job is ready to run, and throttling is active. the post-abort
699 # cleanup must not be pre-empted by the second job.
700 job1, queue_entry1 = self._make_job_and_queue_entry()
701 job2, queue_entry2 = self._make_job_and_queue_entry()
702
showard418785b2009-11-23 20:19:59 +0000703 self.mock_drone_manager.process_capacity = 0
showard65db3932009-10-28 19:54:35 +0000704 self._run_dispatcher() # schedule job1, but won't start verify
705 job1.hostqueueentry_set.update(aborted=True)
showard418785b2009-11-23 20:19:59 +0000706 self.mock_drone_manager.process_capacity = 100
showard65db3932009-10-28 19:54:35 +0000707 self._run_dispatcher() # cleanup must run here, not verify for job2
708 self._check_statuses(queue_entry1, HqeStatus.ABORTED,
709 HostStatus.CLEANING)
710 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
711 self._run_dispatcher() # now verify starts for job2
712 self._check_statuses(queue_entry2, HqeStatus.VERIFYING,
713 HostStatus.VERIFYING)
714
715
showard65db3932009-10-28 19:54:35 +0000716 def test_reverify_interrupting_pre_job(self):
717 # ensure things behave sanely if a reverify is scheduled in the middle
718 # of pre-job actions
719 _, queue_entry = self._make_job_and_queue_entry()
720
721 self._run_dispatcher() # pre-job verify
722 self._create_reverify_request()
723 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
724 exit_status=256)
725 self._run_dispatcher() # repair
726 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
727 self._run_dispatcher() # reverify runs now
728 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
729 self._run_dispatcher() # pre-job verify
730 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
731 self._run_dispatcher() # and job runs...
732 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
733 self._finish_job(queue_entry) # reverify has been deleted
734 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
735 HostStatus.READY)
736 self._assert_nothing_is_running()
737
738
739 def test_reverify_while_job_running(self):
740 # once a job is running, a reverify must not be allowed to preempt
741 # Gathering
742 _, queue_entry = self._make_job_and_queue_entry()
743 self._run_pre_job_verify(queue_entry)
744 self._run_dispatcher() # job runs
745 self._create_reverify_request()
746 # make job end with a signal, so gathering will run
747 self.mock_drone_manager.finish_process(_PidfileType.JOB,
748 exit_status=271)
749 self._run_dispatcher() # gathering must start
750 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
751 self._run_dispatcher() # parsing and cleanup
752 self._finish_parsing_and_cleanup()
753 self._run_dispatcher() # now reverify runs
754 self._check_statuses(queue_entry, HqeStatus.FAILED,
755 HostStatus.VERIFYING)
756 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
757 self._run_dispatcher()
758 self._check_host_status(queue_entry.host, HostStatus.READY)
759
760
761 def test_reverify_while_host_pending(self):
762 # ensure that if a reverify is scheduled while a host is in Pending, it
763 # won't run until the host is actually free
764 job = self._create_job(hosts=[1,2])
765 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')
766 job.synch_count = 2
767 job.save()
768
769 host2 = self.hosts[1]
770 host2.locked = True
771 host2.save()
772
773 self._run_dispatcher() # verify host1
774 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
775 self._run_dispatcher() # host1 Pending
776 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
777 self._create_reverify_request()
778 self._run_dispatcher() # nothing should happen here
779 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
780
781 # now let the job run
782 host2.locked = False
783 host2.save()
784 self._run_dispatcher() # verify host2
785 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
786 self._run_dispatcher() # run job
787 self._finish_job(queue_entry)
788 # need to explicitly finish host1's post-job cleanup
789 self.mock_drone_manager.finish_specific_process(
790 'hosts/host1/4-cleanup', monitor_db._AUTOSERV_PID_FILE)
791 self._run_dispatcher()
792 # the reverify should now be running
793 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
794 HostStatus.VERIFYING)
795 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
796 self._run_dispatcher()
797 self._check_host_status(queue_entry.host, HostStatus.READY)
798
799
showard418785b2009-11-23 20:19:59 +0000800 def test_throttling(self):
801 job = self._create_job(hosts=[1,2,3])
802 job.synch_count = 3
803 job.save()
804
805 queue_entries = list(job.hostqueueentry_set.all())
806 def _check_hqe_statuses(*statuses):
807 for queue_entry, status in zip(queue_entries, statuses):
808 self._check_statuses(queue_entry, status)
809
810 self.mock_drone_manager.process_capacity = 2
811 self._run_dispatcher() # verify runs on 1 and 2
812 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.VERIFYING,
813 HqeStatus.VERIFYING)
814 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)
815
816 self.mock_drone_manager.finish_specific_process(
817 'hosts/host1/1-verify', monitor_db._AUTOSERV_PID_FILE)
818 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
819 self._run_dispatcher() # verify runs on 3
820 _check_hqe_statuses(HqeStatus.PENDING, HqeStatus.PENDING,
821 HqeStatus.VERIFYING)
822
823 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
824 self._run_dispatcher() # job won't run due to throttling
825 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,
826 HqeStatus.STARTING)
827 self._assert_nothing_is_running()
828
829 self.mock_drone_manager.process_capacity = 3
830 self._run_dispatcher() # now job runs
831 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,
832 HqeStatus.RUNNING)
833
834 self.mock_drone_manager.process_capacity = 2
835 self.mock_drone_manager.finish_process(_PidfileType.JOB,
836 exit_status=271)
837 self._run_dispatcher() # gathering won't run due to throttling
838 _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,
839 HqeStatus.GATHERING)
840 self._assert_nothing_is_running()
841
842 self.mock_drone_manager.process_capacity = 3
843 self._run_dispatcher() # now gathering runs
844
845 self.mock_drone_manager.process_capacity = 0
846 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
847 self._run_dispatcher() # parsing runs despite throttling
848 _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,
849 HqeStatus.PARSING)
850
851
showard34ab0992009-10-05 22:47:57 +0000852if __name__ == '__main__':
853 unittest.main()