blob: f31cdf25eda04bcefd31cab84420c7a714bdd165 [file] [log] [blame]
showard34ab0992009-10-05 22:47:57 +00001#!/usr/bin/python
2
showard4a604792009-10-20 23:49:10 +00003import logging, os, unittest
showard34ab0992009-10-05 22:47:57 +00004import common
showard7b2d7cb2009-10-28 19:53:03 +00005from autotest_lib.client.common_lib import enum, global_config, host_protections
showard34ab0992009-10-05 22:47:57 +00006from autotest_lib.database import database_connection
7from autotest_lib.frontend import setup_django_environment
showardb8900452009-10-12 20:31:01 +00008from autotest_lib.frontend.afe import frontend_test_utils, models
showard34ab0992009-10-05 22:47:57 +00009from autotest_lib.scheduler import drone_manager, email_manager, monitor_db
10
11# translations necessary for scheduler queries to work with SQLite
12_re_translator = database_connection.TranslatingDatabase.make_regexp_translator
13_DB_TRANSLATORS = (
14 _re_translator(r'NOW\(\)', 'time("now")'),
showardd1195652009-12-08 22:21:02 +000015 _re_translator(r'LAST_INSERT_ID\(\)', 'LAST_INSERT_ROWID()'),
showard34ab0992009-10-05 22:47:57 +000016 # older SQLite doesn't support group_concat, so just don't bother until
17 # it arises in an important query
18 _re_translator(r'GROUP_CONCAT\((.*?)\)', r'\1'),
19)
20
showard4a604792009-10-20 23:49:10 +000021HqeStatus = models.HostQueueEntry.Status
22HostStatus = models.Host.Status
23
showard34ab0992009-10-05 22:47:57 +000024class NullMethodObject(object):
25 _NULL_METHODS = ()
26
27 def __init__(self):
28 def null_method(*args, **kwargs):
29 pass
30
31 for method_name in self._NULL_METHODS:
32 setattr(self, method_name, null_method)
33
34class MockGlobalConfig(object):
35 def __init__(self):
36 self._config_info = {}
37
38
39 def set_config_value(self, section, key, value):
40 self._config_info[(section, key)] = value
41
42
43 def get_config_value(self, section, key, type=str,
44 default=None, allow_blank=False):
45 identifier = (section, key)
46 if identifier not in self._config_info:
47 raise RuntimeError('Unset global config value: %s' % (identifier,))
48 return self._config_info[identifier]
49
50
showardf85a0b72009-10-07 20:48:45 +000051# the SpecialTask names here must match the suffixes used on the SpecialTask
52# results directories
53_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
54 'parse')
55
56
showardd1195652009-12-08 22:21:02 +000057_PIDFILE_TO_PIDFILE_TYPE = {
58 monitor_db._AUTOSERV_PID_FILE: _PidfileType.JOB,
59 monitor_db._CRASHINFO_PID_FILE: _PidfileType.GATHER,
60 monitor_db._PARSER_PID_FILE: _PidfileType.PARSE,
61 }
62
63
64_PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value
65 in _PIDFILE_TO_PIDFILE_TYPE.iteritems())
66
67
showard34ab0992009-10-05 22:47:57 +000068class MockDroneManager(NullMethodObject):
showard65db3932009-10-28 19:54:35 +000069 """
70 Public attributes:
71 max_runnable_processes_value: value returned by max_runnable_processes().
72 tests can change this to activate throttling.
73 """
showard4a604792009-10-20 23:49:10 +000074 _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository',
75 'copy_results_on_drone')
76
77 class _DummyPidfileId(object):
78 """
79 Object to represent pidfile IDs that is opaque to the scheduler code but
80 still debugging-friendly for us.
81 """
showardd1195652009-12-08 22:21:02 +000082 def __init__(self, working_directory, pidfile_name, num_processes=None):
83 self._working_directory = working_directory
84 self._pidfile_name = pidfile_name
showard418785b2009-11-23 20:19:59 +000085 self._num_processes = num_processes
showardd1195652009-12-08 22:21:02 +000086 self._paired_with_pidfile = None
87
88
89 def key(self):
90 """Key for MockDroneManager._pidfile_index"""
91 return (self._working_directory, self._pidfile_name)
showard4a604792009-10-20 23:49:10 +000092
93
94 def __str__(self):
showardd1195652009-12-08 22:21:02 +000095 return os.path.join(self._working_directory, self._pidfile_name)
showard4a604792009-10-20 23:49:10 +000096
showard34ab0992009-10-05 22:47:57 +000097
showard418785b2009-11-23 20:19:59 +000098 def __repr__(self):
99 return '<_DummyPidfileId: %s>' % str(self)
100
101
showard34ab0992009-10-05 22:47:57 +0000102 def __init__(self):
103 super(MockDroneManager, self).__init__()
showard418785b2009-11-23 20:19:59 +0000104 self.process_capacity = 100
showard65db3932009-10-28 19:54:35 +0000105
showard34ab0992009-10-05 22:47:57 +0000106 # maps result_dir to set of tuples (file_path, file_contents)
107 self._attached_files = {}
108 # maps pidfile IDs to PidfileContents
109 self._pidfiles = {}
110 # pidfile IDs that haven't been created yet
111 self._future_pidfiles = []
showardf85a0b72009-10-07 20:48:45 +0000112 # maps _PidfileType to the most recently created pidfile ID of that type
113 self._last_pidfile_id = {}
showard34ab0992009-10-05 22:47:57 +0000114 # maps (working_directory, pidfile_name) to pidfile IDs
115 self._pidfile_index = {}
showardf85a0b72009-10-07 20:48:45 +0000116 # maps process to pidfile IDs
117 self._process_index = {}
118 # tracks pidfiles of processes that have been killed
119 self._killed_pidfiles = set()
showard4a604792009-10-20 23:49:10 +0000120 # pidfile IDs that have just been unregistered (so will disappear on the
121 # next cycle)
122 self._unregistered_pidfiles = set()
showard34ab0992009-10-05 22:47:57 +0000123
124
125 # utility APIs for use by the test
126
showardf85a0b72009-10-07 20:48:45 +0000127 def finish_process(self, pidfile_type, exit_status=0):
128 pidfile_id = self._last_pidfile_id[pidfile_type]
129 self._set_pidfile_exit_status(pidfile_id, exit_status)
showard34ab0992009-10-05 22:47:57 +0000130
131
showard65db3932009-10-28 19:54:35 +0000132 def finish_specific_process(self, working_directory, pidfile_name):
showardd1195652009-12-08 22:21:02 +0000133 pidfile_id = self.pidfile_from_path(working_directory, pidfile_name)
showard65db3932009-10-28 19:54:35 +0000134 self._set_pidfile_exit_status(pidfile_id, 0)
135
136
showard34ab0992009-10-05 22:47:57 +0000137 def _set_pidfile_exit_status(self, pidfile_id, exit_status):
showardf85a0b72009-10-07 20:48:45 +0000138 assert pidfile_id is not None
showard34ab0992009-10-05 22:47:57 +0000139 contents = self._pidfiles[pidfile_id]
140 contents.exit_status = exit_status
141 contents.num_tests_failed = 0
142
143
showardf85a0b72009-10-07 20:48:45 +0000144 def was_last_process_killed(self, pidfile_type):
145 pidfile_id = self._last_pidfile_id[pidfile_type]
146 return pidfile_id in self._killed_pidfiles
147
148
showard418785b2009-11-23 20:19:59 +0000149 def nonfinished_pidfile_ids(self):
150 return [pidfile_id for pidfile_id, pidfile_contents
showard4a604792009-10-20 23:49:10 +0000151 in self._pidfiles.iteritems()
showard418785b2009-11-23 20:19:59 +0000152 if pidfile_contents.exit_status is None]
153
154
155 def running_pidfile_ids(self):
156 return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids()
157 if self._pidfiles[pidfile_id].process is not None]
showard4a604792009-10-20 23:49:10 +0000158
159
showardd1195652009-12-08 22:21:02 +0000160 def pidfile_from_path(self, working_directory, pidfile_name):
161 return self._pidfile_index[(working_directory, pidfile_name)]
162
163
showard34ab0992009-10-05 22:47:57 +0000164 # DroneManager emulation APIs for use by monitor_db
165
166 def get_orphaned_autoserv_processes(self):
167 return set()
168
169
170 def total_running_processes(self):
showard418785b2009-11-23 20:19:59 +0000171 return sum(pidfile_id._num_processes
172 for pidfile_id in self.nonfinished_pidfile_ids())
showard34ab0992009-10-05 22:47:57 +0000173
174
showard9bb960b2009-11-19 01:02:11 +0000175 def max_runnable_processes(self, username):
showard418785b2009-11-23 20:19:59 +0000176 return self.process_capacity - self.total_running_processes()
showard34ab0992009-10-05 22:47:57 +0000177
178
showard4a604792009-10-20 23:49:10 +0000179 def refresh(self):
180 for pidfile_id in self._unregistered_pidfiles:
181 # intentionally handle non-registered pidfiles silently
182 self._pidfiles.pop(pidfile_id, None)
183 self._unregistered_pidfiles = set()
184
185
showard34ab0992009-10-05 22:47:57 +0000186 def execute_actions(self):
187 # executing an "execute_command" causes a pidfile to be created
188 for pidfile_id in self._future_pidfiles:
189 # Process objects are opaque to monitor_db
showardf85a0b72009-10-07 20:48:45 +0000190 process = object()
191 self._pidfiles[pidfile_id].process = process
192 self._process_index[process] = pidfile_id
showard34ab0992009-10-05 22:47:57 +0000193 self._future_pidfiles = []
194
195
196 def attach_file_to_execution(self, result_dir, file_contents,
197 file_path=None):
198 self._attached_files.setdefault(result_dir, set()).add((file_path,
199 file_contents))
200 return 'attach_path'
201
202
showardf85a0b72009-10-07 20:48:45 +0000203 def _initialize_pidfile(self, pidfile_id):
204 if pidfile_id not in self._pidfiles:
showardd1195652009-12-08 22:21:02 +0000205 assert pidfile_id.key() not in self._pidfile_index
showardf85a0b72009-10-07 20:48:45 +0000206 self._pidfiles[pidfile_id] = drone_manager.PidfileContents()
showardd1195652009-12-08 22:21:02 +0000207 self._pidfile_index[pidfile_id.key()] = pidfile_id
showardf85a0b72009-10-07 20:48:45 +0000208
209
210 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name):
211 if working_directory.startswith('hosts/'):
212 # such paths look like hosts/host1/1-verify, we'll grab the end
213 type_string = working_directory.rsplit('-', 1)[1]
214 pidfile_type = _PidfileType.get_value(type_string)
215 else:
showardd1195652009-12-08 22:21:02 +0000216 pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name]
showardf85a0b72009-10-07 20:48:45 +0000217 self._last_pidfile_id[pidfile_type] = pidfile_id
218
219
showard34ab0992009-10-05 22:47:57 +0000220 def execute_command(self, command, working_directory, pidfile_name,
showard418785b2009-11-23 20:19:59 +0000221 num_processes, log_file=None, paired_with_pidfile=None,
showard9bb960b2009-11-19 01:02:11 +0000222 username=None):
showardd1195652009-12-08 22:21:02 +0000223 pidfile_id = self._DummyPidfileId(working_directory, pidfile_name)
224 if pidfile_id.key() in self._pidfile_index:
225 pidfile_id = self._pidfile_index[pidfile_id.key()]
226 pidfile_id._num_processes = num_processes
227 pidfile_id._paired_with_pidfile = paired_with_pidfile
228
showard34ab0992009-10-05 22:47:57 +0000229 self._future_pidfiles.append(pidfile_id)
showardf85a0b72009-10-07 20:48:45 +0000230 self._initialize_pidfile(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000231 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id
showardf85a0b72009-10-07 20:48:45 +0000232 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name)
showard34ab0992009-10-05 22:47:57 +0000233 return pidfile_id
234
235
236 def get_pidfile_contents(self, pidfile_id, use_second_read=False):
showard4a604792009-10-20 23:49:10 +0000237 if pidfile_id not in self._pidfiles:
238 print 'Request for nonexistent pidfile %s' % pidfile_id
239 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents())
showard34ab0992009-10-05 22:47:57 +0000240
241
242 def is_process_running(self, process):
243 return True
244
245
246 def register_pidfile(self, pidfile_id):
showardf85a0b72009-10-07 20:48:45 +0000247 self._initialize_pidfile(pidfile_id)
248
249
250 def unregister_pidfile(self, pidfile_id):
showard4a604792009-10-20 23:49:10 +0000251 self._unregistered_pidfiles.add(pidfile_id)
showard34ab0992009-10-05 22:47:57 +0000252
253
showardd1195652009-12-08 22:21:02 +0000254 def declare_process_count(self, pidfile_id, num_processes):
255 pidfile_id.num_processes = num_processes
256
257
showard34ab0992009-10-05 22:47:57 +0000258 def absolute_path(self, path):
259 return 'absolute/' + path
260
261
262 def write_lines_to_file(self, file_path, lines, paired_with_process=None):
263 # TODO: record this
264 pass
265
266
267 def get_pidfile_id_from(self, execution_tag, pidfile_name):
showardd1195652009-12-08 22:21:02 +0000268 default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name,
269 num_processes=0)
showard4a604792009-10-20 23:49:10 +0000270 return self._pidfile_index.get((execution_tag, pidfile_name),
showardd1195652009-12-08 22:21:02 +0000271 default_pidfile)
showard34ab0992009-10-05 22:47:57 +0000272
273
showardf85a0b72009-10-07 20:48:45 +0000274 def kill_process(self, process):
275 pidfile_id = self._process_index[process]
276 self._killed_pidfiles.add(pidfile_id)
277 self._set_pidfile_exit_status(pidfile_id, 271)
278
279
showard34ab0992009-10-05 22:47:57 +0000280class MockEmailManager(NullMethodObject):
281 _NULL_METHODS = ('send_queued_emails', 'send_email')
282
showardf85a0b72009-10-07 20:48:45 +0000283 def enqueue_notify_email(self, subject, message):
284 logging.warn('enqueue_notify_email: %s', subject)
285 logging.warn(message)
286
showard34ab0992009-10-05 22:47:57 +0000287
288class SchedulerFunctionalTest(unittest.TestCase,
289 frontend_test_utils.FrontendTestMixin):
290 # some number of ticks after which the scheduler is presumed to have
291 # stabilized, given no external changes
292 _A_LOT_OF_TICKS = 10
293
294 def setUp(self):
295 self._frontend_common_setup()
296 self._set_stubs()
297 self._set_global_config_values()
showardd1195652009-12-08 22:21:02 +0000298 self._create_dispatcher()
showard34ab0992009-10-05 22:47:57 +0000299
300 logging.basicConfig(level=logging.DEBUG)
301
302
showardd1195652009-12-08 22:21:02 +0000303 def _create_dispatcher(self):
304 self.dispatcher = monitor_db.Dispatcher()
305
306
showard34ab0992009-10-05 22:47:57 +0000307 def tearDown(self):
308 self._frontend_common_teardown()
309
310
311 def _set_stubs(self):
312 self.mock_config = MockGlobalConfig()
313 self.god.stub_with(global_config, 'global_config', self.mock_config)
314
315 self.mock_drone_manager = MockDroneManager()
316 self.god.stub_with(monitor_db, '_drone_manager',
317 self.mock_drone_manager)
318
319 self.mock_email_manager = MockEmailManager()
320 self.god.stub_with(email_manager, 'manager', self.mock_email_manager)
321
322 self._database = (
323 database_connection.TranslatingDatabase.get_test_database(
324 file_path=self._test_db_file,
325 translators=_DB_TRANSLATORS))
326 self._database.connect(db_type='django')
327 self.god.stub_with(monitor_db, '_db', self._database)
328
329
330 def _set_global_config_values(self):
331 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
332 1)
333
334
335 def _initialize_test(self):
336 self.dispatcher.initialize()
337
338
339 def _run_dispatcher(self):
340 for _ in xrange(self._A_LOT_OF_TICKS):
341 self.dispatcher.tick()
342
343
344 def test_idle(self):
showardb8900452009-10-12 20:31:01 +0000345 self._initialize_test()
showard34ab0992009-10-05 22:47:57 +0000346 self._run_dispatcher()
347
348
showardb8900452009-10-12 20:31:01 +0000349 def _assert_process_executed(self, working_directory, pidfile_name):
350 process_was_executed = self.mock_drone_manager.was_process_executed(
351 'hosts/host1/1-verify', monitor_db._AUTOSERV_PID_FILE)
352 self.assert_(process_was_executed,
353 '%s/%s not executed' % (working_directory, pidfile_name))
354
355
showardd1195652009-12-08 22:21:02 +0000356 def _update_instance(self, model_instance):
357 return type(model_instance).objects.get(pk=model_instance.pk)
358
359
showard418785b2009-11-23 20:19:59 +0000360 def _check_statuses(self, queue_entry, queue_entry_status,
361 host_status=None):
showard4a604792009-10-20 23:49:10 +0000362 # update from DB
showardd1195652009-12-08 22:21:02 +0000363 queue_entry = self._update_instance(queue_entry)
showard4a604792009-10-20 23:49:10 +0000364 self.assertEquals(queue_entry.status, queue_entry_status)
showard418785b2009-11-23 20:19:59 +0000365 if host_status:
366 self.assertEquals(queue_entry.host.status, host_status)
showard4a604792009-10-20 23:49:10 +0000367
368
showard7b2d7cb2009-10-28 19:53:03 +0000369 def _check_host_status(self, host, status):
370 # update from DB
371 host = models.Host.objects.get(id=host.id)
372 self.assertEquals(host.status, status)
373
374
showard4a604792009-10-20 23:49:10 +0000375 def _run_pre_job_verify(self, queue_entry):
376 self._run_dispatcher() # launches verify
377 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
378 HostStatus.VERIFYING)
379 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
380
381
showard34ab0992009-10-05 22:47:57 +0000382 def test_simple_job(self):
showardb8900452009-10-12 20:31:01 +0000383 self._initialize_test()
384 job, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000385 self._run_pre_job_verify(queue_entry)
showard34ab0992009-10-05 22:47:57 +0000386 self._run_dispatcher() # launches job
showard4a604792009-10-20 23:49:10 +0000387 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
388 self._finish_job(queue_entry)
389 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
390 self._assert_nothing_is_running()
showardb8900452009-10-12 20:31:01 +0000391
392
showard4a604792009-10-20 23:49:10 +0000393 def _setup_for_pre_job_cleanup(self):
394 self._initialize_test()
395 job, queue_entry = self._make_job_and_queue_entry()
396 job.reboot_before = models.RebootBefore.ALWAYS
397 job.save()
398 return queue_entry
399
400
401 def _run_pre_job_cleanup_job(self, queue_entry):
402 self._run_dispatcher() # cleanup
403 self._check_statuses(queue_entry, HqeStatus.VERIFYING,
404 HostStatus.CLEANING)
405 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
406 self._run_dispatcher() # verify
407 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
408 self._run_dispatcher() # job
409 self._finish_job(queue_entry)
410
411
412 def test_pre_job_cleanup(self):
413 queue_entry = self._setup_for_pre_job_cleanup()
414 self._run_pre_job_cleanup_job(queue_entry)
415
416
417 def _run_pre_job_cleanup_one_failure(self):
418 queue_entry = self._setup_for_pre_job_cleanup()
419 self._run_dispatcher() # cleanup
420 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
421 exit_status=256)
422 self._run_dispatcher() # repair
423 self._check_statuses(queue_entry, HqeStatus.QUEUED,
424 HostStatus.REPAIRING)
425 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
426 return queue_entry
427
428
429 def test_pre_job_cleanup_failure(self):
430 queue_entry = self._run_pre_job_cleanup_one_failure()
431 # from here the job should run as normal
432 self._run_pre_job_cleanup_job(queue_entry)
433
434
435 def test_pre_job_cleanup_double_failure(self):
436 # TODO (showard): this test isn't perfect. in reality, when the second
437 # cleanup fails, it copies its results over to the job directory using
438 # copy_results_on_drone() and then parses them. since we don't handle
439 # that, there appear to be no results at the job directory. the
440 # scheduler handles this gracefully, parsing gets effectively skipped,
441 # and this test passes as is. but we ought to properly test that
442 # behavior.
443 queue_entry = self._run_pre_job_cleanup_one_failure()
444 self._run_dispatcher() # second cleanup
445 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
446 exit_status=256)
447 self._run_dispatcher()
448 self._check_statuses(queue_entry, HqeStatus.FAILED,
449 HostStatus.REPAIR_FAILED)
450 # nothing else should run
451 self._assert_nothing_is_running()
452
453
454 def _assert_nothing_is_running(self):
455 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])
456
457
showard7b2d7cb2009-10-28 19:53:03 +0000458 def _setup_for_post_job_cleanup(self):
showard4a604792009-10-20 23:49:10 +0000459 self._initialize_test()
460 job, queue_entry = self._make_job_and_queue_entry()
461 job.reboot_after = models.RebootAfter.ALWAYS
462 job.save()
showard7b2d7cb2009-10-28 19:53:03 +0000463 return queue_entry
showard4a604792009-10-20 23:49:10 +0000464
showard7b2d7cb2009-10-28 19:53:03 +0000465
466 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,
467 include_verify=True):
468 if include_verify:
469 self._run_pre_job_verify(queue_entry)
showard4a604792009-10-20 23:49:10 +0000470 self._run_dispatcher() # job
471 self.mock_drone_manager.finish_process(_PidfileType.JOB)
472 self._run_dispatcher() # parsing + cleanup
473 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
474 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
475 exit_status=256)
476 self._run_dispatcher() # repair, HQE unaffected
showard4a604792009-10-20 23:49:10 +0000477 return queue_entry
478
479
480 def test_post_job_cleanup_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000481 queue_entry = self._setup_for_post_job_cleanup()
482 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
483 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
484 HostStatus.REPAIRING)
showard4a604792009-10-20 23:49:10 +0000485 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
486 self._run_dispatcher()
487 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
488
489
490 def test_post_job_cleanup_failure_repair_failure(self):
showard7b2d7cb2009-10-28 19:53:03 +0000491 queue_entry = self._setup_for_post_job_cleanup()
492 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)
showard4a604792009-10-20 23:49:10 +0000493 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,
494 exit_status=256)
495 self._run_dispatcher()
496 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
497 HostStatus.REPAIR_FAILED)
498
499
showardd1195652009-12-08 22:21:02 +0000500 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):
501 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]
502 queue_entry = self._update_instance(queue_entry)
503 pidfile_id = self.mock_drone_manager.pidfile_from_path(
504 queue_entry.execution_path(), pidfile_name)
505 self.assert_(pidfile_id._paired_with_pidfile)
506
507
showard4a604792009-10-20 23:49:10 +0000508 def _finish_job(self, queue_entry):
showardf85a0b72009-10-07 20:48:45 +0000509 self.mock_drone_manager.finish_process(_PidfileType.JOB)
showard34ab0992009-10-05 22:47:57 +0000510 self._run_dispatcher() # launches parsing + cleanup
showard4a604792009-10-20 23:49:10 +0000511 self._check_statuses(queue_entry, HqeStatus.PARSING,
512 HostStatus.CLEANING)
showardd1195652009-12-08 22:21:02 +0000513 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)
showardf85a0b72009-10-07 20:48:45 +0000514 self._finish_parsing_and_cleanup()
515
516
517 def _finish_parsing_and_cleanup(self):
518 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
519 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
showard34ab0992009-10-05 22:47:57 +0000520 self._run_dispatcher()
521
522
showard7b2d7cb2009-10-28 19:53:03 +0000523 def _create_reverify_request(self):
524 host = self.hosts[0]
525 models.SpecialTask.objects.create(host=host,
showard9bb960b2009-11-19 01:02:11 +0000526 task=models.SpecialTask.Task.VERIFY,
527 requested_by=self.user)
showard7b2d7cb2009-10-28 19:53:03 +0000528 return host
529
530
531 def test_requested_reverify(self):
532 host = self._create_reverify_request()
533 self._run_dispatcher()
534 self._check_host_status(host, HostStatus.VERIFYING)
535 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
536 self._run_dispatcher()
537 self._check_host_status(host, HostStatus.READY)
538
539
540 def test_requested_reverify_failure(self):
541 host = self._create_reverify_request()
542 self._run_dispatcher()
543 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
544 exit_status=256)
545 self._run_dispatcher() # repair
546 self._check_host_status(host, HostStatus.REPAIRING)
547 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
548 self._run_dispatcher()
549 self._check_host_status(host, HostStatus.READY)
550
551
552 def _setup_for_do_not_verify(self):
553 self._initialize_test()
554 job, queue_entry = self._make_job_and_queue_entry()
555 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY
556 queue_entry.host.save()
557 return queue_entry
558
559
560 def test_do_not_verify_job(self):
561 queue_entry = self._setup_for_do_not_verify()
562 self._run_dispatcher() # runs job directly
563 self._finish_job(queue_entry)
564
565
566 def test_do_not_verify_job_with_cleanup(self):
567 queue_entry = self._setup_for_do_not_verify()
568 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
569 queue_entry.job.save()
570
571 self._run_dispatcher() # cleanup
572 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
573 self._run_dispatcher() # job
574 self._finish_job(queue_entry)
575
576
577 def test_do_not_verify_pre_job_cleanup_failure(self):
578 queue_entry = self._setup_for_do_not_verify()
579 queue_entry.job.reboot_before = models.RebootBefore.ALWAYS
580 queue_entry.job.save()
581
582 self._run_dispatcher() # cleanup
583 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,
584 exit_status=256)
585 self._run_dispatcher() # failure ignored; job runs
586 self._finish_job(queue_entry)
587
588
589 def test_do_not_verify_post_job_cleanup_failure(self):
590 queue_entry = self._setup_for_do_not_verify()
591
592 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,
593 include_verify=False)
594 # failure ignored, host still set to Ready
595 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)
596 self._run_dispatcher() # nothing else runs
597 self._assert_nothing_is_running()
598
599
600 def test_do_not_verify_requested_reverify_failure(self):
601 host = self._create_reverify_request()
602 host.protection = host_protections.Protection.DO_NOT_VERIFY
603 host.save()
604
605 self._run_dispatcher()
606 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
607 exit_status=256)
608 self._run_dispatcher()
609 self._check_host_status(host, HostStatus.READY) # ignore failure
610 self._assert_nothing_is_running()
611
612
showardf85a0b72009-10-07 20:48:45 +0000613 def test_job_abort_in_verify(self):
showardb8900452009-10-12 20:31:01 +0000614 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000615 job = self._create_job(hosts=[1])
616 self._run_dispatcher() # launches verify
617 job.hostqueueentry_set.update(aborted=True)
618 self._run_dispatcher() # kills verify, launches cleanup
619 self.assert_(self.mock_drone_manager.was_last_process_killed(
620 _PidfileType.VERIFY))
621 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
622 self._run_dispatcher()
623
624
625 def test_job_abort(self):
showardb8900452009-10-12 20:31:01 +0000626 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000627 job = self._create_job(hosts=[1])
628 job.run_verify = False
629 job.save()
630
631 self._run_dispatcher() # launches job
632 job.hostqueueentry_set.update(aborted=True)
633 self._run_dispatcher() # kills job, launches gathering
634 self.assert_(self.mock_drone_manager.was_last_process_killed(
635 _PidfileType.JOB))
636 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
637 self._run_dispatcher() # launches parsing + cleanup
638 self._finish_parsing_and_cleanup()
639
640
641 def test_no_pidfile_leaking(self):
showardb8900452009-10-12 20:31:01 +0000642 self._initialize_test()
showardf85a0b72009-10-07 20:48:45 +0000643 self.test_simple_job()
644 self.assertEquals(self.mock_drone_manager._pidfiles, {})
645
646 self.test_job_abort_in_verify()
647 self.assertEquals(self.mock_drone_manager._pidfiles, {})
648
649 self.test_job_abort()
650 self.assertEquals(self.mock_drone_manager._pidfiles, {})
651
652
showardb8900452009-10-12 20:31:01 +0000653 def _make_job_and_queue_entry(self):
654 job = self._create_job(hosts=[1])
655 queue_entry = job.hostqueueentry_set.all()[0]
656 return job, queue_entry
657
658
659 def test_recover_running_no_process(self):
660 # recovery should re-execute a Running HQE if no process is found
661 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000662 queue_entry.status = HqeStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000663 queue_entry.execution_subdir = '1-myuser/host1'
664 queue_entry.save()
showard4a604792009-10-20 23:49:10 +0000665 queue_entry.host.status = HostStatus.RUNNING
showardb8900452009-10-12 20:31:01 +0000666 queue_entry.host.save()
667
668 self._initialize_test()
669 self._run_dispatcher()
showard4a604792009-10-20 23:49:10 +0000670 self._finish_job(queue_entry)
showardb8900452009-10-12 20:31:01 +0000671
672
673 def test_recover_verifying_hqe_no_special_task(self):
674 # recovery should fail on a Verifing HQE with no corresponding
675 # Verify or Cleanup SpecialTask
676 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000677 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000678 queue_entry.save()
679
680 # make some dummy SpecialTasks that shouldn't count
681 models.SpecialTask.objects.create(host=queue_entry.host,
682 task=models.SpecialTask.Task.VERIFY)
683 models.SpecialTask.objects.create(host=queue_entry.host,
684 task=models.SpecialTask.Task.CLEANUP,
685 queue_entry=queue_entry,
686 is_complete=True)
687
688 self.assertRaises(monitor_db.SchedulerError, self._initialize_test)
689
690
691 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):
692 _, queue_entry = self._make_job_and_queue_entry()
showard4a604792009-10-20 23:49:10 +0000693 queue_entry.status = HqeStatus.VERIFYING
showardb8900452009-10-12 20:31:01 +0000694 queue_entry.save()
695
696 special_task = models.SpecialTask.objects.create(
697 host=queue_entry.host, task=task, queue_entry=queue_entry)
698
699 self._initialize_test()
700 self._run_dispatcher()
701 self.mock_drone_manager.finish_process(pidfile_type)
702 self._run_dispatcher()
703 # don't bother checking the rest of the job execution, as long as the
704 # SpecialTask ran
705
706
707 def test_recover_verifying_hqe_with_cleanup(self):
708 # recover an HQE that was in pre-job cleanup
709 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,
710 _PidfileType.CLEANUP)
711
712
713 def test_recover_verifying_hqe_with_verify(self):
714 # recover an HQE that was in pre-job verify
715 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,
716 _PidfileType.VERIFY)
717
718
showarda21b9492009-11-04 20:43:18 +0000719 def test_recover_pending_hqes_with_group(self):
720 # recover a group of HQEs that are in Pending, in the same group (e.g.,
721 # in a job with atomic hosts)
722 job = self._create_job(hosts=[1,2], atomic_group=1)
723 job.save()
724
725 job.hostqueueentry_set.all().update(status=HqeStatus.PENDING)
726
727 self._initialize_test()
728 for queue_entry in job.hostqueueentry_set.all():
729 self.assertEquals(queue_entry.status, HqeStatus.STARTING)
730
731
showardd1195652009-12-08 22:21:02 +0000732 def test_recover_parsing(self):
733 self._initialize_test()
734 job, queue_entry = self._make_job_and_queue_entry()
735 job.run_verify = False
736 job.reboot_after = models.RebootAfter.NEVER
737 job.save()
738
739 self._run_dispatcher() # launches job
740 self.mock_drone_manager.finish_process(_PidfileType.JOB)
741 self._run_dispatcher() # launches parsing
742
743 # now "restart" the scheduler
744 self._create_dispatcher()
745 self._initialize_test()
746 self._run_dispatcher()
747 self.mock_drone_manager.finish_process(_PidfileType.PARSE)
748 self._run_dispatcher()
749
750
751 def test_recover_parsing__no_process_already_aborted(self):
752 _, queue_entry = self._make_job_and_queue_entry()
753 queue_entry.execution_subdir = 'host1'
754 queue_entry.status = HqeStatus.PARSING
755 queue_entry.aborted = True
756 queue_entry.save()
757
758 self._initialize_test()
759 self._run_dispatcher()
760
761
showard65db3932009-10-28 19:54:35 +0000762 def test_job_scheduled_just_after_abort(self):
763 # test a pretty obscure corner case where a job is aborted while queued,
764 # another job is ready to run, and throttling is active. the post-abort
765 # cleanup must not be pre-empted by the second job.
766 job1, queue_entry1 = self._make_job_and_queue_entry()
767 job2, queue_entry2 = self._make_job_and_queue_entry()
768
showard418785b2009-11-23 20:19:59 +0000769 self.mock_drone_manager.process_capacity = 0
showard65db3932009-10-28 19:54:35 +0000770 self._run_dispatcher() # schedule job1, but won't start verify
771 job1.hostqueueentry_set.update(aborted=True)
showard418785b2009-11-23 20:19:59 +0000772 self.mock_drone_manager.process_capacity = 100
showard65db3932009-10-28 19:54:35 +0000773 self._run_dispatcher() # cleanup must run here, not verify for job2
774 self._check_statuses(queue_entry1, HqeStatus.ABORTED,
775 HostStatus.CLEANING)
776 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
777 self._run_dispatcher() # now verify starts for job2
778 self._check_statuses(queue_entry2, HqeStatus.VERIFYING,
779 HostStatus.VERIFYING)
780
781
showard65db3932009-10-28 19:54:35 +0000782 def test_reverify_interrupting_pre_job(self):
783 # ensure things behave sanely if a reverify is scheduled in the middle
784 # of pre-job actions
785 _, queue_entry = self._make_job_and_queue_entry()
786
787 self._run_dispatcher() # pre-job verify
788 self._create_reverify_request()
789 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,
790 exit_status=256)
791 self._run_dispatcher() # repair
792 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)
793 self._run_dispatcher() # reverify runs now
794 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
795 self._run_dispatcher() # pre-job verify
796 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
797 self._run_dispatcher() # and job runs...
798 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)
799 self._finish_job(queue_entry) # reverify has been deleted
800 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
801 HostStatus.READY)
802 self._assert_nothing_is_running()
803
804
805 def test_reverify_while_job_running(self):
806 # once a job is running, a reverify must not be allowed to preempt
807 # Gathering
808 _, queue_entry = self._make_job_and_queue_entry()
809 self._run_pre_job_verify(queue_entry)
810 self._run_dispatcher() # job runs
811 self._create_reverify_request()
812 # make job end with a signal, so gathering will run
813 self.mock_drone_manager.finish_process(_PidfileType.JOB,
814 exit_status=271)
815 self._run_dispatcher() # gathering must start
816 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
817 self._run_dispatcher() # parsing and cleanup
818 self._finish_parsing_and_cleanup()
819 self._run_dispatcher() # now reverify runs
820 self._check_statuses(queue_entry, HqeStatus.FAILED,
821 HostStatus.VERIFYING)
822 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
823 self._run_dispatcher()
824 self._check_host_status(queue_entry.host, HostStatus.READY)
825
826
827 def test_reverify_while_host_pending(self):
828 # ensure that if a reverify is scheduled while a host is in Pending, it
829 # won't run until the host is actually free
830 job = self._create_job(hosts=[1,2])
831 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')
832 job.synch_count = 2
833 job.save()
834
835 host2 = self.hosts[1]
836 host2.locked = True
837 host2.save()
838
839 self._run_dispatcher() # verify host1
840 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
841 self._run_dispatcher() # host1 Pending
842 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
843 self._create_reverify_request()
844 self._run_dispatcher() # nothing should happen here
845 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)
846
847 # now let the job run
848 host2.locked = False
849 host2.save()
850 self._run_dispatcher() # verify host2
851 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
852 self._run_dispatcher() # run job
853 self._finish_job(queue_entry)
854 # need to explicitly finish host1's post-job cleanup
855 self.mock_drone_manager.finish_specific_process(
856 'hosts/host1/4-cleanup', monitor_db._AUTOSERV_PID_FILE)
857 self._run_dispatcher()
858 # the reverify should now be running
859 self._check_statuses(queue_entry, HqeStatus.COMPLETED,
860 HostStatus.VERIFYING)
861 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
862 self._run_dispatcher()
863 self._check_host_status(queue_entry.host, HostStatus.READY)
864
865
showard418785b2009-11-23 20:19:59 +0000866 def test_throttling(self):
867 job = self._create_job(hosts=[1,2,3])
868 job.synch_count = 3
869 job.save()
870
871 queue_entries = list(job.hostqueueentry_set.all())
872 def _check_hqe_statuses(*statuses):
873 for queue_entry, status in zip(queue_entries, statuses):
874 self._check_statuses(queue_entry, status)
875
876 self.mock_drone_manager.process_capacity = 2
877 self._run_dispatcher() # verify runs on 1 and 2
878 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.VERIFYING,
879 HqeStatus.VERIFYING)
880 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)
881
882 self.mock_drone_manager.finish_specific_process(
883 'hosts/host1/1-verify', monitor_db._AUTOSERV_PID_FILE)
884 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
885 self._run_dispatcher() # verify runs on 3
886 _check_hqe_statuses(HqeStatus.PENDING, HqeStatus.PENDING,
887 HqeStatus.VERIFYING)
888
889 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
890 self._run_dispatcher() # job won't run due to throttling
891 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,
892 HqeStatus.STARTING)
893 self._assert_nothing_is_running()
894
895 self.mock_drone_manager.process_capacity = 3
896 self._run_dispatcher() # now job runs
897 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,
898 HqeStatus.RUNNING)
899
900 self.mock_drone_manager.process_capacity = 2
901 self.mock_drone_manager.finish_process(_PidfileType.JOB,
902 exit_status=271)
903 self._run_dispatcher() # gathering won't run due to throttling
904 _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,
905 HqeStatus.GATHERING)
906 self._assert_nothing_is_running()
907
908 self.mock_drone_manager.process_capacity = 3
909 self._run_dispatcher() # now gathering runs
910
911 self.mock_drone_manager.process_capacity = 0
912 self.mock_drone_manager.finish_process(_PidfileType.GATHER)
913 self._run_dispatcher() # parsing runs despite throttling
914 _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,
915 HqeStatus.PARSING)
916
917
showardd1195652009-12-08 22:21:02 +0000918 def test_simple_atomic_group_job(self):
919 job = self._create_job(atomic_group=1)
920 self._run_dispatcher() # expand + verify
921 queue_entries = job.hostqueueentry_set.all()
922 self.assertEquals(len(queue_entries), 2)
923 self.assertEquals(queue_entries[0].host.hostname, 'host5')
924 self.assertEquals(queue_entries[1].host.hostname, 'host6')
925
926 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)
927 self._run_dispatcher() # delay task started waiting
928
929 self.mock_drone_manager.finish_specific_process(
930 'hosts/host5/1-verify', monitor_db._AUTOSERV_PID_FILE)
931 self._run_dispatcher() # job starts now
932 for entry in queue_entries:
933 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)
934
935 # rest of job proceeds normally
936
937
showard34ab0992009-10-05 22:47:57 +0000938if __name__ == '__main__':
939 unittest.main()