blob: 969c47e8d28151e4bf3fc17c04dc78403a7d6a2c [file] [log] [blame]
mbligh1ee9ad72008-08-01 16:15:08 +00001#!/usr/bin/python -u
mblighf9751332008-04-08 18:25:33 +00002
mbligheeb13572008-07-30 00:04:01 +00003import os, sys, unittest, optparse
mblighdc906012008-06-27 19:29:11 +00004import common
mbligheeb13572008-07-30 00:04:01 +00005from autotest_lib.utils import parallel
mbligh5f554842009-12-21 21:50:18 +00006from autotest_lib.client.common_lib.test_utils import unittest as custom_unittest
mbligheeb13572008-07-30 00:04:01 +00007
mbligheeb13572008-07-30 00:04:01 +00008parser = optparse.OptionParser()
9parser.add_option("-r", action="store", type="string", dest="start",
10 default='',
11 help="root directory to start running unittests")
12parser.add_option("--full", action="store_true", dest="full", default=False,
13 help="whether to run the shortened version of the test")
mbligh43758df2008-09-04 19:54:45 +000014parser.add_option("--debug", action="store_true", dest="debug", default=False,
15 help="run in debug mode")
jamesren74e77fc2010-02-19 21:56:39 +000016parser.add_option("--skip-tests", dest="skip_tests", default=[],
17 help="A space separated list of tests to skip")
18
mblighf9751332008-04-08 18:25:33 +000019
mbligh671c5922008-07-28 19:34:38 +000020LONG_TESTS = set((
21 'monitor_db_unittest.py',
showard34ab0992009-10-05 22:47:57 +000022 'monitor_db_functional_test.py',
showard9afb2e92010-01-12 18:56:50 +000023 'monitor_db_cleanup_test.py',
mbligh671c5922008-07-28 19:34:38 +000024 'barrier_unittest.py',
mbligheeb13572008-07-30 00:04:01 +000025 'migrate_unittest.py',
mbligh671c5922008-07-28 19:34:38 +000026 'frontend_unittest.py',
showard363cdb52009-05-12 17:21:36 +000027 'client_compilation_unittest.py',
28 'csv_encoder_unittest.py',
showardf8b19042009-05-12 17:22:49 +000029 'rpc_interface_unittest.py',
showardf828c772010-01-25 21:49:42 +000030 'resources_test.py',
showardef1edaf2009-07-01 22:21:30 +000031 'logging_manager_test.py',
showarded2afea2009-07-07 20:54:07 +000032 'models_test.py',
jadmanski656b3b82010-01-27 23:42:27 +000033 'serviceHandler_unittest.py',
jamesrenc44ae992010-02-19 00:12:54 +000034 'scheduler_models_unittest.py',
mbligh671c5922008-07-28 19:34:38 +000035 ))
36
jamesren74e77fc2010-02-19 21:56:39 +000037
mbligh780fa7f2009-07-02 19:01:53 +000038ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
mbligh671c5922008-07-28 19:34:38 +000039
mbligheeb13572008-07-30 00:04:01 +000040
mbligh780fa7f2009-07-02 19:01:53 +000041class TestFailure(Exception): pass
mbligh671c5922008-07-28 19:34:38 +000042
43
mbligh780fa7f2009-07-02 19:01:53 +000044def run_test(mod_names, options):
45 """
46 @param mod_names: A list of individual parts of the module name to import
47 and run as a test suite.
48 @param options: optparse options.
49 """
mbligh43758df2008-09-04 19:54:45 +000050 if not options.debug:
mbligheeb13572008-07-30 00:04:01 +000051 parallel.redirect_io()
52
mbligh780fa7f2009-07-02 19:01:53 +000053 print "Running %s" % '.'.join(mod_names)
54 mod = common.setup_modules.import_module(mod_names[-1],
55 '.'.join(mod_names[:-1]))
mbligh5f554842009-12-21 21:50:18 +000056 for ut_module in [unittest, custom_unittest]:
57 test = ut_module.defaultTestLoader.loadTestsFromModule(mod)
58 suite = ut_module.TestSuite(test)
59 runner = ut_module.TextTestRunner(verbosity=2)
60 result = runner.run(suite)
61 if result.errors or result.failures:
62 msg = '%s had %d failures and %d errors.'
63 msg %= '.'.join(mod_names), len(result.failures), len(result.errors)
64 raise TestFailure(msg)
mbligh671c5922008-07-28 19:34:38 +000065
66
mbligh780fa7f2009-07-02 19:01:53 +000067def find_and_run_tests(start, options):
68 """
69 Find and run Python unittest suites below the given directory. Only look
70 in subdirectories of start that are actual importable Python modules.
71
72 @param start: The absolute directory to look for tests under.
73 @param options: optparse options.
74 """
75 modules = []
jamesren74e77fc2010-02-19 21:56:39 +000076 skip_tests = set()
77 if options.skip_tests:
78 skip_tests.update(options.skip_tests.split())
mbligh780fa7f2009-07-02 19:01:53 +000079
80 for dirpath, subdirs, filenames in os.walk(start):
81 # Only look in and below subdirectories that are python modules.
82 if '__init__.py' not in filenames:
mbligha64df1a2009-09-18 16:54:39 +000083 if options.full:
84 for filename in filenames:
85 if filename.endswith('.pyc'):
86 os.unlink(os.path.join(dirpath, filename))
mbligh780fa7f2009-07-02 19:01:53 +000087 # Skip all subdirectories below this one, it is not a module.
88 del subdirs[:]
89 if options.debug:
90 print 'Skipping', dirpath
91 continue # Skip this directory.
92
93 # Look for unittest files.
94 for fname in filenames:
95 if fname.endswith('_unittest.py') or fname.endswith('_test.py'):
96 if not options.full and fname in LONG_TESTS:
97 continue
jamesren74e77fc2010-02-19 21:56:39 +000098 if fname in skip_tests:
99 continue
mbligh780fa7f2009-07-02 19:01:53 +0000100 path_no_py = os.path.join(dirpath, fname).rstrip('.py')
101 assert path_no_py.startswith(ROOT)
102 names = path_no_py[len(ROOT)+1:].split('/')
103 modules.append(['autotest_lib'] + names)
104 if options.debug:
105 print 'testing', path_no_py
106
107 if options.debug:
108 print 'Number of test modules found:', len(modules)
mbligh671c5922008-07-28 19:34:38 +0000109
showardcc85e812008-08-08 20:33:30 +0000110 functions = {}
mbligh780fa7f2009-07-02 19:01:53 +0000111 for module_names in modules:
mbligheeb13572008-07-30 00:04:01 +0000112 # Create a function that'll test a particular module. module=module
113 # is a hack to force python to evaluate the params now. We then
114 # rename the function to make error reporting nicer.
mbligh780fa7f2009-07-02 19:01:53 +0000115 run_module = lambda module=module_names: run_test(module, options)
116 name = '.'.join(module_names)
showardcc85e812008-08-08 20:33:30 +0000117 run_module.__name__ = name
showardcc85e812008-08-08 20:33:30 +0000118 functions[run_module] = set()
119
mbligheeb13572008-07-30 00:04:01 +0000120 try:
121 dargs = {}
mbligh43758df2008-09-04 19:54:45 +0000122 if options.debug:
mbligheeb13572008-07-30 00:04:01 +0000123 dargs['max_simultaneous_procs'] = 1
124 pe = parallel.ParallelExecute(functions, **dargs)
125 pe.run_until_completion()
126 except parallel.ParallelError, err:
127 return err.errors
128 return []
mblighf9751332008-04-08 18:25:33 +0000129
130
mbligheeb13572008-07-30 00:04:01 +0000131def main():
132 options, args = parser.parse_args()
133 if args:
134 parser.error('Unexpected argument(s): %s' % args)
135 parser.print_help()
136 sys.exit(1)
mbligh671c5922008-07-28 19:34:38 +0000137
showardcc85e812008-08-08 20:33:30 +0000138 # Strip the arguments off the command line, so that the unit tests do not
139 # see them.
mbligh780fa7f2009-07-02 19:01:53 +0000140 del sys.argv[1:]
showardcc85e812008-08-08 20:33:30 +0000141
mbligh780fa7f2009-07-02 19:01:53 +0000142 absolute_start = os.path.join(ROOT, options.start)
143 errors = find_and_run_tests(absolute_start, options)
mbligh671c5922008-07-28 19:34:38 +0000144 if errors:
145 print "%d tests resulted in an error/failure:" % len(errors)
146 for error in errors:
147 print "\t%s" % error
mbligh780fa7f2009-07-02 19:01:53 +0000148 print "Rerun", sys.argv[0], "--debug to see the failure details."
mbligh671c5922008-07-28 19:34:38 +0000149 sys.exit(1)
150 else:
151 print "All passed!"
152 sys.exit(0)
mbligheeb13572008-07-30 00:04:01 +0000153
mbligh780fa7f2009-07-02 19:01:53 +0000154
mbligheeb13572008-07-30 00:04:01 +0000155if __name__ == "__main__":
156 main()