[autotest] Setup ts_mon in autoserv.
Added code to set up ts_mon flushing process before running autoserv
jobs. Also, cleaned up run_autoserv's usage of sys.exit.
TEST=None
BUG=chromium:642402
BUG=chromium:642560
Change-Id: Ifbb9bd17871d1215d085cc82f6bd43cfc75b7f49
Reviewed-on: https://chromium-review.googlesource.com/378596
Commit-Ready: Paul Hobbs <phobbs@google.com>
Tested-by: Paul Hobbs <phobbs@google.com>
Reviewed-by: Dan Shi <dshi@google.com>
diff --git a/server/autoserv b/server/autoserv
index e3483ad..316fe18 100755
--- a/server/autoserv
+++ b/server/autoserv
@@ -6,8 +6,8 @@
Run a control file through the server side engine
"""
-import ast
import datetime
+import contextlib
import getpass
import logging
import os
@@ -20,7 +20,6 @@
import urllib2
import common
-
from autotest_lib.client.common_lib import control_data
from autotest_lib.client.common_lib import error
from autotest_lib.client.common_lib import global_config
@@ -62,6 +61,7 @@
from autotest_lib.server import server_logging_config
from autotest_lib.server import server_job, utils, autoserv_parser, autotest
from autotest_lib.server import utils as server_utils
+from autotest_lib.server import site_utils
from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
from autotest_lib.site_utils import job_directories
from autotest_lib.site_utils import job_overhead
@@ -70,6 +70,7 @@
from autotest_lib.client.common_lib import pidfile, logging_manager
from autotest_lib.client.common_lib.cros.graphite import autotest_stats
+
# Control segment to stage server-side package.
STAGE_SERVER_SIDE_PACKAGE_CONTROL_FILE = server_job._control_segment_path(
'stage_server_side_package')
@@ -522,53 +523,55 @@
exit_code = 0
auto_start_servod = _CONFIG.get_config_value(
'AUTOSERV', 'auto_start_servod', type=bool, default=False)
+
try:
- try:
- if repair:
- if auto_start_servod and len(machines) == 1:
- _start_servod(machines[0])
- job.repair(job_labels)
- elif verify:
- job.verify(job_labels)
- elif provision:
- job.provision(job_labels)
- elif reset:
- job.reset(job_labels)
- elif cleanup:
- job.cleanup(job_labels)
- else:
- if auto_start_servod and len(machines) == 1:
- _start_servod(machines[0])
- if use_ssp:
- try:
- _run_with_ssp(job, container_name, job_or_task_id,
- results, parser, ssp_url, job_folder,
- machines)
- finally:
- # Update the ownership of files in result folder.
- correct_results_folder_permission(results)
+ with site_utils.SetupTsMonGlobalState('autoserv', indirect=True, short_lived=True):
+ try:
+ if repair:
+ if auto_start_servod and len(machines) == 1:
+ _start_servod(machines[0])
+ job.repair(job_labels)
+ elif verify:
+ job.verify(job_labels)
+ elif provision:
+ job.provision(job_labels)
+ elif reset:
+ job.reset(job_labels)
+ elif cleanup:
+ job.cleanup(job_labels)
else:
- if collect_crashinfo:
- # Update the ownership of files in result folder. If the
- # job to collect crashinfo was running inside container
- # (SSP) and crashed before correcting folder permission,
- # the result folder might have wrong permission setting.
+ if auto_start_servod and len(machines) == 1:
+ _start_servod(machines[0])
+ if use_ssp:
try:
+ _run_with_ssp(job, container_name, job_or_task_id,
+ results, parser, ssp_url, job_folder,
+ machines)
+ finally:
+ # Update the ownership of files in result folder.
correct_results_folder_permission(results)
- except:
- # Ignore any error as the user may not have root
- # permission to run sudo command.
- pass
- job.run(install_before, install_after,
- verify_job_repo_url=verify_job_repo_url,
- only_collect_crashinfo=collect_crashinfo,
- skip_crash_collection=skip_crash_collection,
- job_labels=job_labels,
- use_packaging=(not no_use_packaging))
- finally:
- while job.hosts:
- host = job.hosts.pop()
- host.close()
+ else:
+ if collect_crashinfo:
+ # Update the ownership of files in result folder. If the
+ # job to collect crashinfo was running inside container
+ # (SSP) and crashed before correcting folder permission,
+ # the result folder might have wrong permission setting.
+ try:
+ correct_results_folder_permission(results)
+ except:
+ # Ignore any error as the user may not have root
+ # permission to run sudo command.
+ pass
+ job.run(install_before, install_after,
+ verify_job_repo_url=verify_job_repo_url,
+ only_collect_crashinfo=collect_crashinfo,
+ skip_crash_collection=skip_crash_collection,
+ job_labels=job_labels,
+ use_packaging=(not no_use_packaging))
+ finally:
+ while job.hosts:
+ host = job.hosts.pop()
+ host.close()
except:
exit_code = 1
traceback.print_exc()
diff --git a/server/site_utils.py b/server/site_utils.py
index 2e4a66b..68040bc 100644
--- a/server/site_utils.py
+++ b/server/site_utils.py
@@ -3,6 +3,7 @@
# found in the LICENSE file.
+import contextlib
import grp
import httplib
import json
@@ -24,11 +25,13 @@
try:
from chromite.lib import cros_build_lib
+ from chromite.lib import ts_mon_config
except ImportError:
- logging.warn('Unable to import chromite.')
+ logging.warn('Unable to import chromite. Monarch is disabled.')
# Init the module variable to None. Access to this module can check if it
# is not None before making calls.
cros_build_lib = None
+ ts_mon_config = None
CONFIG = global_config.global_config
@@ -738,3 +741,21 @@
"""
_, afe_host = get_host_info_from_machine(machine)
return len(afe_host.attributes.get('serials', '').split(',')) > 1
+
+
+def SetupTsMonGlobalState(*args, **kwargs):
+ """Import-safe wrap around chromite.lib.ts_mon_config's setup function.
+
+ @param *args: Args to pass through.
+ @param **kwargs: Kwargs to pass through.
+ """
+ if ts_mon_config:
+ return ts_mon_config.SetupTsMonGlobalState(*args, **kwargs)
+ else:
+ return TrivialContextManager
+
+
+@contextlib.contextmanager
+def TrivialContextManager():
+ """Context manager that does nothing."""
+ yield