Add test case for aborting a synchronous job while it's throttled in the Starting state. Was trying to repro a bug. It doesn't repro, indicating that maybe the bug has already been fixed (or maybe this test case is missing something). Either way, it's good to have another test case around.
Also fixing a little test bug where we need to mock out a new global config value.
Signed-off-by: Steve Howard <showard@google.com>
git-svn-id: http://test.kernel.org/svn/autotest/trunk@4043 592f7852-d20e-0410-864c-8624ca9c26a4
diff --git a/scheduler/monitor_db_functional_test.py b/scheduler/monitor_db_functional_test.py
index 4a7e64f..336ee38 100644
--- a/scheduler/monitor_db_functional_test.py
+++ b/scheduler/monitor_db_functional_test.py
@@ -51,13 +51,14 @@
# the SpecialTask names here must match the suffixes used on the SpecialTask
# results directories
_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
- 'parse')
+ 'parse', 'archive')
_PIDFILE_TO_PIDFILE_TYPE = {
monitor_db._AUTOSERV_PID_FILE: _PidfileType.JOB,
monitor_db._CRASHINFO_PID_FILE: _PidfileType.GATHER,
monitor_db._PARSER_PID_FILE: _PidfileType.PARSE,
+ monitor_db._ARCHIVER_PID_FILE: _PidfileType.ARCHIVE,
}
@@ -341,6 +342,8 @@
def _set_global_config_values(self):
self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins',
1)
+ self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins',
+ 999999)
def _initialize_test(self):
@@ -526,14 +529,18 @@
self._check_statuses(queue_entry, HqeStatus.PARSING,
HostStatus.CLEANING)
self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)
- self._finish_parsing_and_cleanup()
+ self._finish_parsing_and_cleanup(queue_entry)
- def _finish_parsing_and_cleanup(self):
+ def _finish_parsing_and_cleanup(self, queue_entry):
self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)
self.mock_drone_manager.finish_process(_PidfileType.PARSE)
self._run_dispatcher()
+ self._check_statuses(queue_entry, HqeStatus.ARCHIVING, HostStatus.READY)
+ self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)
+ self._run_dispatcher()
+
def _create_reverify_request(self):
host = self.hosts[0]
@@ -650,7 +657,8 @@
_PidfileType.JOB))
self.mock_drone_manager.finish_process(_PidfileType.GATHER)
self._run_dispatcher() # launches parsing + cleanup
- self._finish_parsing_and_cleanup()
+ queue_entry = job.hostqueueentry_set.all()[0]
+ self._finish_parsing_and_cleanup(queue_entry)
def test_no_pidfile_leaking(self):
@@ -830,7 +838,7 @@
self._run_dispatcher() # gathering must start
self.mock_drone_manager.finish_process(_PidfileType.GATHER)
self._run_dispatcher() # parsing and cleanup
- self._finish_parsing_and_cleanup()
+ self._finish_parsing_and_cleanup(queue_entry)
self._run_dispatcher() # now reverify runs
self._check_statuses(queue_entry, HqeStatus.FAILED,
HostStatus.VERIFYING)
@@ -930,6 +938,23 @@
HqeStatus.PARSING)
+ def test_abort_starting_while_throttling(self):
+ self._initialize_test()
+ job = self._create_job(hosts=[1,2], synchronous=True)
+ queue_entry = job.hostqueueentry_set.all()[0]
+ job.run_verify = False
+ job.reboot_after = models.RebootAfter.NEVER
+ job.save()
+
+ self.mock_drone_manager.process_capacity = 0
+ self._run_dispatcher() # go to starting, but don't start job
+ self._check_statuses(queue_entry, HqeStatus.STARTING,
+ HostStatus.PENDING)
+
+ job.hostqueueentry_set.update(aborted=True)
+ self._run_dispatcher()
+
+
def test_simple_atomic_group_job(self):
job = self._create_job(atomic_group=1)
self._run_dispatcher() # expand + verify