blob: a20414b251ae18cf49b8a67e63a893891a64b0d0 [file] [log] [blame]
mbligh6231cd62008-02-02 19:18:33 +00001# Shell class for a test, inherited by all individual tests
2#
3# Methods:
jadmanski0afbb632008-06-06 21:10:57 +00004# __init__ initialise
5# initialize run once for each job
6# setup run once for each new version of the test installed
7# run run the test (wrapped by job.run_test())
mbligh6231cd62008-02-02 19:18:33 +00008#
9# Data:
jadmanski0afbb632008-06-06 21:10:57 +000010# job backreference to the job this test instance is part of
11# outputdir eg. results/<job>/<testname.tag>
12# resultsdir eg. results/<job>/<testname.tag>/results
13# profdir eg. results/<job>/<testname.tag>/profiling
14# debugdir eg. results/<job>/<testname.tag>/debug
15# bindir eg. tests/<test>
16# src eg. tests/<test>/src
jadmanski825e24c2008-08-27 20:54:31 +000017# tmpdir eg. tmp/<tempname>_<testname.tag>
mbligh6231cd62008-02-02 19:18:33 +000018
mbligh234a84f2008-11-20 19:57:43 +000019import fcntl, os, re, sys, shutil, tarfile, tempfile, time, traceback
mbligh6894ce22009-09-18 19:56:30 +000020import warnings, logging, glob, resource
mbligh6231cd62008-02-02 19:18:33 +000021
jadmanskic27c2312009-08-05 20:58:51 +000022from autotest_lib.client.common_lib import error
mbligh53da18e2009-01-05 21:13:26 +000023from autotest_lib.client.bin import utils
mbligh6231cd62008-02-02 19:18:33 +000024
25
26class base_test:
jadmanski0afbb632008-06-06 21:10:57 +000027 preserve_srcdir = False
jadmanski91d56a92009-04-01 15:20:40 +000028 network_destabilizing = False
mbligh6231cd62008-02-02 19:18:33 +000029
jadmanski0afbb632008-06-06 21:10:57 +000030 def __init__(self, job, bindir, outputdir):
31 self.job = job
mbligh21e33582009-02-04 18:18:31 +000032 self.pkgmgr = job.pkgmgr
jadmanski0afbb632008-06-06 21:10:57 +000033 self.autodir = job.autodir
jadmanski0afbb632008-06-06 21:10:57 +000034 self.outputdir = outputdir
showardb18134f2009-03-20 20:52:18 +000035 self.tagged_testname = os.path.basename(self.outputdir)
jadmanski0afbb632008-06-06 21:10:57 +000036 self.resultsdir = os.path.join(self.outputdir, 'results')
37 os.mkdir(self.resultsdir)
38 self.profdir = os.path.join(self.outputdir, 'profiling')
39 os.mkdir(self.profdir)
40 self.debugdir = os.path.join(self.outputdir, 'debug')
41 os.mkdir(self.debugdir)
mbligh6894ce22009-09-18 19:56:30 +000042 self.configure_crash_handler()
jadmanski0afbb632008-06-06 21:10:57 +000043 self.bindir = bindir
44 if hasattr(job, 'libdir'):
45 self.libdir = job.libdir
46 self.srcdir = os.path.join(self.bindir, 'src')
mbligh5c1bb252009-03-25 22:06:49 +000047 self.tmpdir = tempfile.mkdtemp("_" + self.tagged_testname,
showardb18134f2009-03-20 20:52:18 +000048 dir=job.tmpdir)
mbligh7af09972009-04-17 22:17:08 +000049 self._keyvals = []
50 self._new_keyval = False
mbligh32cb5b42009-05-01 23:05:09 +000051 self.failed_constraints = []
mbligh5e703a22009-06-15 22:00:12 +000052 self.iteration = 0
mbligh742ae422009-05-13 20:46:41 +000053 self.before_iteration_hooks = []
54 self.after_iteration_hooks = []
mbligh6231cd62008-02-02 19:18:33 +000055
mbligh6231cd62008-02-02 19:18:33 +000056
mbligh6894ce22009-09-18 19:56:30 +000057 def configure_crash_handler(self):
58 """
59 Configure the crash handler by:
60 * Setting up core size to unlimited
61 * Putting an appropriate crash handler on /proc/sys/kernel/core_pattern
62 * Create files that the crash handler will use to figure which tests
63 are active at a given moment
64
65 The crash handler will pick up the core file and write it to
66 self.debugdir, and perform analysis on it to generate a report. The
67 program also outputs some results to syslog.
68
69 If multiple tests are running, an attempt to verify if we still have
70 the old PID on the system process table to determine whether it is a
71 parent of the current test execution. If we can't determine it, the
72 core file and the report file will be copied to all test debug dirs.
73 """
74 self.pattern_file = '/proc/sys/kernel/core_pattern'
75 try:
76 # Enable core dumps
77 resource.setrlimit(resource.RLIMIT_CORE, (-1, -1))
78 # Trying to backup core pattern and register our script
79 self.core_pattern_backup = open(self.pattern_file, 'r').read()
80 pattern_file = open(self.pattern_file, 'w')
81 tools_dir = os.path.join(self.autodir, 'tools')
82 crash_handler_path = os.path.join(tools_dir, 'crash_handler.py')
83 pattern_file.write('|' + crash_handler_path + ' %p %t %u %s %h %e')
84 # Writing the files that the crash handler is going to use
85 self.debugdir_tmp_file = ('/tmp/autotest_results_dir.%s' %
86 os.getpid())
87 utils.open_write_close(self.debugdir_tmp_file, self.debugdir + "\n")
88 except Exception, e:
89 self.crash_handling_enabled = False
90 logging.error('Crash handling system disabled: %s' % e)
91 else:
92 self.crash_handling_enabled = True
93 logging.debug('Crash handling system enabled.')
94
95
96 def crash_handler_report(self):
97 """
98 If core dumps are found on the debugdir after the execution of the
99 test, let the user know.
100 """
101 if self.crash_handling_enabled:
102 core_dirs = glob.glob('%s/crash.*' % self.debugdir)
103 if core_dirs:
104 logging.warning('Programs crashed during test execution:')
105 for dir in core_dirs:
106 logging.warning('Please verify %s for more info', dir)
107 # Remove the debugdir info file
108 os.unlink(self.debugdir_tmp_file)
109 # Restore the core pattern backup
110 try:
111 utils.open_write_close(self.pattern_file,
112 self.core_pattern_backup)
113 except EnvironmentError:
114 pass
115
116
jadmanski0afbb632008-06-06 21:10:57 +0000117 def assert_(self, expr, msg='Assertion failed.'):
118 if not expr:
119 raise error.TestError(msg)
mbligh6231cd62008-02-02 19:18:33 +0000120
121
jadmanski0afbb632008-06-06 21:10:57 +0000122 def write_test_keyval(self, attr_dict):
123 utils.write_keyval(self.outputdir, attr_dict)
jadmanskicc549172008-05-21 18:11:51 +0000124
125
jadmanski0afbb632008-06-06 21:10:57 +0000126 @staticmethod
127 def _append_type_to_keys(dictionary, typename):
128 new_dict = {}
129 for key, value in dictionary.iteritems():
130 new_key = "%s{%s}" % (key, typename)
131 new_dict[new_key] = value
132 return new_dict
jadmanskicc549172008-05-21 18:11:51 +0000133
134
mbligh0b3dd5f2008-07-16 20:37:13 +0000135 def write_perf_keyval(self, perf_dict):
136 self.write_iteration_keyval({}, perf_dict)
jadmanskicc549172008-05-21 18:11:51 +0000137
mbligh0b3dd5f2008-07-16 20:37:13 +0000138
139 def write_attr_keyval(self, attr_dict):
140 self.write_iteration_keyval(attr_dict, {})
141
142
143 def write_iteration_keyval(self, attr_dict, perf_dict):
mbligh7af09972009-04-17 22:17:08 +0000144 # append the dictionaries before they have the {perf} and {attr} added
145 self._keyvals.append({'attr':attr_dict, 'perf':perf_dict})
146 self._new_keyval = True
147
mbligh0b3dd5f2008-07-16 20:37:13 +0000148 if attr_dict:
149 attr_dict = self._append_type_to_keys(attr_dict, "attr")
150 utils.write_keyval(self.resultsdir, attr_dict, type_tag="attr")
151
152 if perf_dict:
153 perf_dict = self._append_type_to_keys(perf_dict, "perf")
154 utils.write_keyval(self.resultsdir, perf_dict, type_tag="perf")
jadmanskicc549172008-05-21 18:11:51 +0000155
jadmanski0afbb632008-06-06 21:10:57 +0000156 keyval_path = os.path.join(self.resultsdir, "keyval")
157 print >> open(keyval_path, "a"), ""
jadmanskicc549172008-05-21 18:11:51 +0000158
159
mbligh7af09972009-04-17 22:17:08 +0000160 def analyze_perf_constraints(self, constraints):
161 if not self._new_keyval:
162 return
163
164 self._new_keyval = False
mbligh32cb5b42009-05-01 23:05:09 +0000165 failures = []
mbligh7af09972009-04-17 22:17:08 +0000166 for constraint in constraints:
167 print "___________________ constraint = %s" % constraint
168 print "___________________ keyvals = %s" % self._keyvals[-1]['perf']
169 try:
170 if not eval(constraint, self._keyvals[-1]['perf']):
mbligh8beabca2009-05-21 01:33:15 +0000171 failures.append('%s: constraint was not met' % constraint)
mbligh7af09972009-04-17 22:17:08 +0000172 except:
mbligh32cb5b42009-05-01 23:05:09 +0000173 failures.append('could not evaluate constraint: %s'
174 % constraint)
mbligh7af09972009-04-17 22:17:08 +0000175
mbligh32cb5b42009-05-01 23:05:09 +0000176 # keep track of the errors for each iteration
177 self.failed_constraints.append(failures)
178
179
180 def process_failed_constraints(self):
181 msg = ''
182 for i, failures in enumerate(self.failed_constraints):
183 if failures:
184 msg += 'iteration %d:%s ' % (i, ','.join(failures))
185
186 if msg:
187 raise error.TestFail(msg)
mbligh7af09972009-04-17 22:17:08 +0000188
189
mbligh742ae422009-05-13 20:46:41 +0000190 def register_before_iteration_hook(self, iteration_hook):
191 """
192 This is how we expect test writers to register a before_iteration_hook.
193 This adds the method to the list of hooks which are executed
194 before each iteration.
195
196 @param iteration_hook: Method to run before each iteration. A valid
197 hook accepts a single argument which is the
198 test object.
199 """
200 self.before_iteration_hooks.append(iteration_hook)
201
202
203 def register_after_iteration_hook(self, iteration_hook):
204 """
205 This is how we expect test writers to register an after_iteration_hook.
206 This adds the method to the list of hooks which are executed
207 after each iteration.
208
209 @param iteration_hook: Method to run after each iteration. A valid
210 hook accepts a single argument which is the
211 test object.
212 """
213 self.after_iteration_hooks.append(iteration_hook)
214
215
jadmanski0afbb632008-06-06 21:10:57 +0000216 def initialize(self):
217 pass
mbligh6231cd62008-02-02 19:18:33 +0000218
219
jadmanski0afbb632008-06-06 21:10:57 +0000220 def setup(self):
221 pass
mbligh6231cd62008-02-02 19:18:33 +0000222
223
mbligh14f98562008-07-29 21:16:27 +0000224 def warmup(self, *args, **dargs):
mbligh4205d892008-07-14 16:23:20 +0000225 pass
mblighb53a3472008-07-11 21:27:58 +0000226
227
mblighb5dac432008-11-27 00:38:44 +0000228 def drop_caches_between_iterations(self):
229 if self.job.drop_caches_between_iterations:
230 print "Dropping caches between iterations"
mbligh53da18e2009-01-05 21:13:26 +0000231 utils.drop_caches()
mblighb5dac432008-11-27 00:38:44 +0000232
233
mblighf58865f2009-05-13 21:32:42 +0000234 def _call_run_once(self, constraints, profile_only,
235 postprocess_profiled_run, args, dargs):
mbligh6b97f792009-03-23 21:23:12 +0000236 self.drop_caches_between_iterations()
mbligh4395bbd2009-03-25 19:34:17 +0000237
238 # execute iteration hooks
mbligh742ae422009-05-13 20:46:41 +0000239 for hook in self.before_iteration_hooks:
240 hook(self)
mblighf58865f2009-05-13 21:32:42 +0000241
242 if profile_only:
mblighc6bf6012009-10-02 00:02:15 +0000243 if not self.job.profilers.present():
244 self.job.record('WARN', None, None, 'No profilers have been '
245 'added but profile_only is set - nothing '
246 'will be run')
mblighf58865f2009-05-13 21:32:42 +0000247 self.run_once_profiling(postprocess_profiled_run, *args, **dargs)
248 else:
249 self.run_once(*args, **dargs)
250
mbligh742ae422009-05-13 20:46:41 +0000251 for hook in self.after_iteration_hooks:
252 hook(self)
mbligh4395bbd2009-03-25 19:34:17 +0000253
mbligh6b97f792009-03-23 21:23:12 +0000254 self.postprocess_iteration()
mbligh7af09972009-04-17 22:17:08 +0000255 self.analyze_perf_constraints(constraints)
mbligh6b97f792009-03-23 21:23:12 +0000256
257
mbligh4b835b82009-02-11 01:26:13 +0000258 def execute(self, iterations=None, test_length=None, profile_only=False,
mbligha49c5cb2009-02-26 01:01:09 +0000259 _get_time=time.time, postprocess_profiled_run=None,
mbligh7af09972009-04-17 22:17:08 +0000260 constraints=(), *args, **dargs):
mbligh777d96e2008-09-03 16:34:38 +0000261 """
262 This is the basic execute method for the tests inherited from base_test.
263 If you want to implement a benchmark test, it's better to implement
264 the run_once function, to cope with the profiling infrastructure. For
265 other tests, you can just override the default implementation.
mbligh60434712008-07-16 16:35:10 +0000266
mbligh777d96e2008-09-03 16:34:38 +0000267 @param test_length: The minimum test length in seconds. We'll run the
mbligh4b835b82009-02-11 01:26:13 +0000268 run_once function for a number of times large enough to cover the
269 minimum test length.
mbligh777d96e2008-09-03 16:34:38 +0000270
271 @param iterations: A number of iterations that we'll run the run_once
mbligh4b835b82009-02-11 01:26:13 +0000272 function. This parameter is incompatible with test_length and will
273 be silently ignored if you specify both.
274
mblighf58865f2009-05-13 21:32:42 +0000275 @param profile_only: If true run X iterations with profilers enabled.
276 Otherwise run X iterations and one with profiling if profiles are
277 enabled.
mbligh4b835b82009-02-11 01:26:13 +0000278
279 @param _get_time: [time.time] Used for unit test time injection.
mblighc9314082009-02-26 00:48:18 +0000280
mbligha49c5cb2009-02-26 01:01:09 +0000281 @param postprocess_profiled_run: Run the postprocessing for the
282 profiled run.
mbligh777d96e2008-09-03 16:34:38 +0000283 """
284
285 # For our special class of tests, the benchmarks, we don't want
286 # profilers to run during the test iterations. Let's reserve only
287 # the last iteration for profiling, if needed. So let's stop
288 # all profilers if they are present and active.
mblighb3c0c912008-11-27 00:32:45 +0000289 profilers = self.job.profilers
290 if profilers.active():
mbligh777d96e2008-09-03 16:34:38 +0000291 profilers.stop(self)
292 # If the user called this test in an odd way (specified both iterations
mbligh4b835b82009-02-11 01:26:13 +0000293 # and test_length), let's warn them.
mbligh777d96e2008-09-03 16:34:38 +0000294 if iterations and test_length:
mblighf58865f2009-05-13 21:32:42 +0000295 logging.info('Iterations parameter ignored (timed execution).')
mbligh777d96e2008-09-03 16:34:38 +0000296 if test_length:
mbligh4b835b82009-02-11 01:26:13 +0000297 test_start = _get_time()
mbligh777d96e2008-09-03 16:34:38 +0000298 time_elapsed = 0
299 timed_counter = 0
showardb18134f2009-03-20 20:52:18 +0000300 logging.info('Test started. Minimum test length: %d s',
mbligh4b835b82009-02-11 01:26:13 +0000301 test_length)
mbligh777d96e2008-09-03 16:34:38 +0000302 while time_elapsed < test_length:
303 timed_counter = timed_counter + 1
304 if time_elapsed == 0:
showardb18134f2009-03-20 20:52:18 +0000305 logging.info('Executing iteration %d', timed_counter)
mbligh777d96e2008-09-03 16:34:38 +0000306 elif time_elapsed > 0:
showardb18134f2009-03-20 20:52:18 +0000307 logging.info(
mbligh4b835b82009-02-11 01:26:13 +0000308 'Executing iteration %d, time_elapsed %d s',
309 timed_counter, time_elapsed)
mblighf58865f2009-05-13 21:32:42 +0000310 self._call_run_once(constraints, profile_only,
311 postprocess_profiled_run, args, dargs)
mbligh4b835b82009-02-11 01:26:13 +0000312 test_iteration_finish = _get_time()
mbligh777d96e2008-09-03 16:34:38 +0000313 time_elapsed = test_iteration_finish - test_start
showardb18134f2009-03-20 20:52:18 +0000314 logging.info('Test finished after %d iterations',
mbligh4b835b82009-02-11 01:26:13 +0000315 timed_counter)
showardb18134f2009-03-20 20:52:18 +0000316 logging.info('Time elapsed: %d s', time_elapsed)
mbligh777d96e2008-09-03 16:34:38 +0000317 else:
mblighf58865f2009-05-13 21:32:42 +0000318 if iterations is None:
mbligh777d96e2008-09-03 16:34:38 +0000319 iterations = 1
mblighf58865f2009-05-13 21:32:42 +0000320 logging.info('Test started. Number of iterations: %d', iterations)
321 for self.iteration in xrange(1, iterations+1):
322 logging.info('Executing iteration %d of %d', self.iteration,
323 iterations)
324 self._call_run_once(constraints, profile_only,
325 postprocess_profiled_run, args, dargs)
326 logging.info('Test finished after %d iterations.', iterations)
mbligh60434712008-07-16 16:35:10 +0000327
mblighf58865f2009-05-13 21:32:42 +0000328 if not profile_only:
mbligh5e703a22009-06-15 22:00:12 +0000329 self.iteration += 1
mblighf58865f2009-05-13 21:32:42 +0000330 self.run_once_profiling(postprocess_profiled_run, *args, **dargs)
mblighd27604e2009-02-03 02:06:08 +0000331
332 # Do any postprocessing, normally extracting performance keyvals, etc
333 self.postprocess()
mbligh32cb5b42009-05-01 23:05:09 +0000334 self.process_failed_constraints()
mblighd27604e2009-02-03 02:06:08 +0000335
336
mbligha49c5cb2009-02-26 01:01:09 +0000337 def run_once_profiling(self, postprocess_profiled_run, *args, **dargs):
mblighd27604e2009-02-03 02:06:08 +0000338 profilers = self.job.profilers
mbligh60434712008-07-16 16:35:10 +0000339 # Do a profiling run if necessary
mblighb3c0c912008-11-27 00:32:45 +0000340 if profilers.present():
mblighc9314082009-02-26 00:48:18 +0000341 self.drop_caches_between_iterations()
mbligh60434712008-07-16 16:35:10 +0000342 profilers.start(self)
mbligh777d96e2008-09-03 16:34:38 +0000343 print 'Profilers present. Profiling run started'
jadmanski0d390072008-11-19 21:19:56 +0000344 try:
345 self.run_once(*args, **dargs)
mbligha49c5cb2009-02-26 01:01:09 +0000346
347 # Priority to the run_once() argument over the attribute.
348 postprocess_attribute = getattr(self,
349 'postprocess_profiled_run',
350 False)
351
352 if (postprocess_profiled_run or
353 (postprocess_profiled_run is None and
354 postprocess_attribute)):
355 self.postprocess_iteration()
356
jadmanski0d390072008-11-19 21:19:56 +0000357 finally:
358 profilers.stop(self)
359 profilers.report(self)
mbligh60434712008-07-16 16:35:10 +0000360
mbligh60434712008-07-16 16:35:10 +0000361
362 def postprocess(self):
363 pass
364
365
mbligh34b297b2009-02-03 17:49:48 +0000366 def postprocess_iteration(self):
367 pass
368
369
mbligh60434712008-07-16 16:35:10 +0000370 def cleanup(self):
371 pass
mblighcd8a5162008-07-16 16:32:12 +0000372
373
mbligh742ae422009-05-13 20:46:41 +0000374 def _exec(self, args, dargs):
showardee36bc72009-06-18 23:13:53 +0000375 self.job.logging.tee_redirect_debug_dir(self.debugdir,
376 log_name=self.tagged_testname)
jadmanski0afbb632008-06-06 21:10:57 +0000377 try:
jadmanski91d56a92009-04-01 15:20:40 +0000378 if self.network_destabilizing:
379 self.job.disable_warnings("NETWORK")
380
jadmanski62655782008-07-28 21:27:46 +0000381 # write out the test attributes into a keyval
382 dargs = dargs.copy()
jadmanski23afbec2008-09-17 18:12:07 +0000383 run_cleanup = dargs.pop('run_cleanup', self.job.run_test_cleanup)
mbligh234a84f2008-11-20 19:57:43 +0000384 keyvals = dargs.pop('test_attributes', {}).copy()
jadmanski62655782008-07-28 21:27:46 +0000385 keyvals['version'] = self.version
jadmanski2ae0e052008-09-04 16:37:28 +0000386 for i, arg in enumerate(args):
387 keyvals['param-%d' % i] = repr(arg)
388 for name, arg in dargs.iteritems():
389 keyvals['param-%s' % name] = repr(arg)
jadmanski62655782008-07-28 21:27:46 +0000390 self.write_test_keyval(keyvals)
391
mblighcf238192008-07-17 01:18:44 +0000392 _validate_args(args, dargs, self.initialize, self.setup,
393 self.execute, self.cleanup)
mbligh6231cd62008-02-02 19:18:33 +0000394
jadmanski0afbb632008-06-06 21:10:57 +0000395 try:
mblighcf238192008-07-17 01:18:44 +0000396 # Initialize:
mbligh5c1bb252009-03-25 22:06:49 +0000397 _cherry_pick_call(self.initialize, *args, **dargs)
mblighcf238192008-07-17 01:18:44 +0000398
mblighc5ddfd12008-08-04 17:15:00 +0000399 lockfile = open(os.path.join(self.job.tmpdir, '.testlock'), 'w')
400 try:
401 fcntl.flock(lockfile, fcntl.LOCK_EX)
402 # Setup: (compile and install the test, if needed)
403 p_args, p_dargs = _cherry_pick_args(self.setup,args,dargs)
404 utils.update_version(self.srcdir, self.preserve_srcdir,
405 self.version, self.setup,
406 *p_args, **p_dargs)
407 finally:
408 fcntl.flock(lockfile, fcntl.LOCK_UN)
409 lockfile.close()
mblighcf238192008-07-17 01:18:44 +0000410
mblighcf238192008-07-17 01:18:44 +0000411 # Execute:
jadmanski62655782008-07-28 21:27:46 +0000412 os.chdir(self.outputdir)
mbligh4395bbd2009-03-25 19:34:17 +0000413
mbligh5c1bb252009-03-25 22:06:49 +0000414 # call self.warmup cherry picking the arguments it accepts and
415 # translate exceptions if needed
416 _call_test_function(_cherry_pick_call, self.warmup,
417 *args, **dargs)
418
mblighcf238192008-07-17 01:18:44 +0000419 if hasattr(self, 'run_once'):
420 p_args, p_dargs = _cherry_pick_args(self.run_once,
421 args, dargs)
jadmanski886c81f2009-02-19 12:54:03 +0000422 # pull in any non-* and non-** args from self.execute
423 for param in _get_nonstar_args(self.execute):
424 if param in dargs:
425 p_dargs[param] = dargs[param]
mblighcf238192008-07-17 01:18:44 +0000426 else:
427 p_args, p_dargs = _cherry_pick_args(self.execute,
428 args, dargs)
mbligh5c1bb252009-03-25 22:06:49 +0000429
430 _call_test_function(self.execute, *p_args, **p_dargs)
mbligh234a84f2008-11-20 19:57:43 +0000431 except Exception:
432 # Save the exception while we run our cleanup() before
433 # reraising it.
jadmanskid625c7f2008-08-27 14:08:52 +0000434 exc_info = sys.exc_info()
mbligh234a84f2008-11-20 19:57:43 +0000435 try:
436 try:
437 if run_cleanup:
mbligh5c1bb252009-03-25 22:06:49 +0000438 _cherry_pick_call(self.cleanup, *args, **dargs)
mbligh234a84f2008-11-20 19:57:43 +0000439 except Exception:
440 print 'Ignoring exception during cleanup() phase:'
441 traceback.print_exc()
442 print 'Now raising the earlier %s error' % exc_info[0]
mbligh6894ce22009-09-18 19:56:30 +0000443 self.crash_handler_report()
mbligh234a84f2008-11-20 19:57:43 +0000444 finally:
showard75cdfee2009-06-10 17:40:41 +0000445 self.job.logging.restore()
mbligh234a84f2008-11-20 19:57:43 +0000446 try:
447 raise exc_info[0], exc_info[1], exc_info[2]
448 finally:
449 # http://docs.python.org/library/sys.html#sys.exc_info
450 # Be nice and prevent a circular reference.
451 del exc_info
jadmanskid625c7f2008-08-27 14:08:52 +0000452 else:
mbligh234a84f2008-11-20 19:57:43 +0000453 try:
454 if run_cleanup:
mbligh5c1bb252009-03-25 22:06:49 +0000455 _cherry_pick_call(self.cleanup, *args, **dargs)
mbligh6894ce22009-09-18 19:56:30 +0000456 self.crash_handler_report()
mbligh234a84f2008-11-20 19:57:43 +0000457 finally:
showard75cdfee2009-06-10 17:40:41 +0000458 self.job.logging.restore()
jadmanski0afbb632008-06-06 21:10:57 +0000459 except error.AutotestError:
jadmanski91d56a92009-04-01 15:20:40 +0000460 if self.network_destabilizing:
461 self.job.enable_warnings("NETWORK")
mbligh234a84f2008-11-20 19:57:43 +0000462 # Pass already-categorized errors on up.
jadmanski0afbb632008-06-06 21:10:57 +0000463 raise
464 except Exception, e:
jadmanski91d56a92009-04-01 15:20:40 +0000465 if self.network_destabilizing:
466 self.job.enable_warnings("NETWORK")
mbligh234a84f2008-11-20 19:57:43 +0000467 # Anything else is an ERROR in our own code, not execute().
mblighc2180832008-07-25 03:26:12 +0000468 raise error.UnhandledTestError(e)
jadmanski91d56a92009-04-01 15:20:40 +0000469 else:
470 if self.network_destabilizing:
471 self.job.enable_warnings("NETWORK")
mbligh6231cd62008-02-02 19:18:33 +0000472
473
jadmanski886c81f2009-02-19 12:54:03 +0000474def _get_nonstar_args(func):
475 """Extract all the (normal) function parameter names.
476
477 Given a function, returns a tuple of parameter names, specifically
478 excluding the * and ** parameters, if the function accepts them.
479
480 @param func: A callable that we want to chose arguments for.
481
482 @return: A tuple of parameters accepted by the function.
483 """
484 return func.func_code.co_varnames[:func.func_code.co_argcount]
485
486
mblighcf238192008-07-17 01:18:44 +0000487def _cherry_pick_args(func, args, dargs):
mbligh234a84f2008-11-20 19:57:43 +0000488 """Sanitize positional and keyword arguments before calling a function.
489
490 Given a callable (func), an argument tuple and a dictionary of keyword
491 arguments, pick only those arguments which the function is prepared to
492 accept and return a new argument tuple and keyword argument dictionary.
493
494 Args:
495 func: A callable that we want to choose arguments for.
496 args: A tuple of positional arguments to consider passing to func.
497 dargs: A dictionary of keyword arguments to consider passing to func.
498 Returns:
499 A tuple of: (args tuple, keyword arguments dictionary)
500 """
mblighcf238192008-07-17 01:18:44 +0000501 # Cherry pick args:
502 if func.func_code.co_flags & 0x04:
503 # func accepts *args, so return the entire args.
504 p_args = args
505 else:
506 p_args = ()
507
508 # Cherry pick dargs:
509 if func.func_code.co_flags & 0x08:
510 # func accepts **dargs, so return the entire dargs.
511 p_dargs = dargs
512 else:
mbligh234a84f2008-11-20 19:57:43 +0000513 # Only return the keyword arguments that func accepts.
mblighcf238192008-07-17 01:18:44 +0000514 p_dargs = {}
jadmanski886c81f2009-02-19 12:54:03 +0000515 for param in _get_nonstar_args(func):
mblighcf238192008-07-17 01:18:44 +0000516 if param in dargs:
517 p_dargs[param] = dargs[param]
518
519 return p_args, p_dargs
520
521
mbligh5c1bb252009-03-25 22:06:49 +0000522def _cherry_pick_call(func, *args, **dargs):
523 """Cherry picks arguments from args/dargs based on what "func" accepts
524 and calls the function with the picked arguments."""
525 p_args, p_dargs = _cherry_pick_args(func, args, dargs)
526 return func(*p_args, **p_dargs)
527
528
mblighcf238192008-07-17 01:18:44 +0000529def _validate_args(args, dargs, *funcs):
mbligh234a84f2008-11-20 19:57:43 +0000530 """Verify that arguments are appropriate for at least one callable.
531
532 Given a list of callables as additional parameters, verify that
533 the proposed keyword arguments in dargs will each be accepted by at least
534 one of the callables.
535
536 NOTE: args is currently not supported and must be empty.
537
538 Args:
539 args: A tuple of proposed positional arguments.
540 dargs: A dictionary of proposed keyword arguments.
541 *funcs: Callables to be searched for acceptance of args and dargs.
542 Raises:
543 error.AutotestError: if an arg won't be accepted by any of *funcs.
544 """
mblighcf238192008-07-17 01:18:44 +0000545 all_co_flags = 0
546 all_varnames = ()
547 for func in funcs:
548 all_co_flags |= func.func_code.co_flags
549 all_varnames += func.func_code.co_varnames[:func.func_code.co_argcount]
550
551 # Check if given args belongs to at least one of the methods below.
552 if len(args) > 0:
553 # Current implementation doesn't allow the use of args.
mbligh234a84f2008-11-20 19:57:43 +0000554 raise error.TestError('Unnamed arguments not accepted. Please '
555 'call job.run_test with named args only')
mblighcf238192008-07-17 01:18:44 +0000556
557 # Check if given dargs belongs to at least one of the methods below.
558 if len(dargs) > 0:
559 if not all_co_flags & 0x08:
560 # no func accepts *dargs, so:
561 for param in dargs:
562 if not param in all_varnames:
563 raise error.AutotestError('Unknown parameter: %s' % param)
564
565
mbligh6231cd62008-02-02 19:18:33 +0000566def _installtest(job, url):
mblighc5ddfd12008-08-04 17:15:00 +0000567 (group, name) = job.pkgmgr.get_package_name(url, 'test')
mbligh6231cd62008-02-02 19:18:33 +0000568
jadmanski0afbb632008-06-06 21:10:57 +0000569 # Bail if the test is already installed
570 group_dir = os.path.join(job.testdir, "download", group)
571 if os.path.exists(os.path.join(group_dir, name)):
572 return (group, name)
mbligh6231cd62008-02-02 19:18:33 +0000573
jadmanski0afbb632008-06-06 21:10:57 +0000574 # If the group directory is missing create it and add
575 # an empty __init__.py so that sub-directories are
576 # considered for import.
577 if not os.path.exists(group_dir):
578 os.mkdir(group_dir)
579 f = file(os.path.join(group_dir, '__init__.py'), 'w+')
580 f.close()
mbligh6231cd62008-02-02 19:18:33 +0000581
jadmanski0afbb632008-06-06 21:10:57 +0000582 print name + ": installing test url=" + url
mblighc5ddfd12008-08-04 17:15:00 +0000583 tarball = os.path.basename(url)
584 tarball_path = os.path.join(group_dir, tarball)
585 test_dir = os.path.join(group_dir, name)
586 job.pkgmgr.fetch_pkg(tarball, tarball_path,
587 repo_url = os.path.dirname(url))
588
589 # Create the directory for the test
590 if not os.path.exists(test_dir):
591 os.mkdir(os.path.join(group_dir, name))
592
593 job.pkgmgr.untar_pkg(tarball_path, test_dir)
594
595 os.remove(tarball_path)
mbligh6231cd62008-02-02 19:18:33 +0000596
jadmanski0afbb632008-06-06 21:10:57 +0000597 # For this 'sub-object' to be importable via the name
598 # 'group.name' we need to provide an __init__.py,
599 # so link the main entry point to this.
600 os.symlink(name + '.py', os.path.join(group_dir, name,
601 '__init__.py'))
mbligh6231cd62008-02-02 19:18:33 +0000602
jadmanski0afbb632008-06-06 21:10:57 +0000603 # The test is now installed.
604 return (group, name)
mbligh6231cd62008-02-02 19:18:33 +0000605
606
mbligh5c1bb252009-03-25 22:06:49 +0000607def _call_test_function(func, *args, **dargs):
608 """Calls a test function and translates exceptions so that errors
609 inside test code are considered test failures."""
610 try:
611 return func(*args, **dargs)
612 except error.AutotestError:
613 # Pass already-categorized errors on up as is.
614 raise
615 except Exception, e:
616 # Other exceptions must be treated as a FAIL when
617 # raised during the test functions
618 raise error.UnhandledTestFail(e)
619
620
mbligh6231cd62008-02-02 19:18:33 +0000621def runtest(job, url, tag, args, dargs,
jadmanski30e9b592008-09-25 19:51:57 +0000622 local_namespace={}, global_namespace={},
mbligh4395bbd2009-03-25 19:34:17 +0000623 before_test_hook=None, after_test_hook=None,
624 before_iteration_hook=None, after_iteration_hook=None):
jadmanski0afbb632008-06-06 21:10:57 +0000625 local_namespace = local_namespace.copy()
626 global_namespace = global_namespace.copy()
mbligh6231cd62008-02-02 19:18:33 +0000627
jadmanski0afbb632008-06-06 21:10:57 +0000628 # if this is not a plain test name then download and install the
629 # specified test
mblighc5ddfd12008-08-04 17:15:00 +0000630 if url.endswith('.tar.bz2'):
jadmanski0afbb632008-06-06 21:10:57 +0000631 (group, testname) = _installtest(job, url)
632 bindir = os.path.join(job.testdir, 'download', group, testname)
633 site_bindir = None
634 else:
635 # if the test is local, it can be found in either testdir
636 # or site_testdir. tests in site_testdir override tests
637 # defined in testdir
638 (group, testname) = ('', url)
639 bindir = os.path.join(job.testdir, group, testname)
640 if hasattr(job, 'site_testdir'):
641 site_bindir = os.path.join(job.site_testdir,
642 group, testname)
643 else:
644 site_bindir = None
mbligh6231cd62008-02-02 19:18:33 +0000645
mblighc5ddfd12008-08-04 17:15:00 +0000646 # The job object here can be that of a server side job or a client
647 # side job. 'install_pkg' method won't be present for server side
648 # jobs, so do the fetch only if that method is present in the job
649 # obj.
650 if hasattr(job, 'install_pkg'):
651 try:
652 job.install_pkg(testname, 'test', bindir)
jadmanskic27c2312009-08-05 20:58:51 +0000653 except error.PackageInstallError, e:
mblighc5ddfd12008-08-04 17:15:00 +0000654 # continue as a fall back mechanism and see if the test code
655 # already exists on the machine
656 pass
657
jadmanski0afbb632008-06-06 21:10:57 +0000658 outputdir = os.path.join(job.resultdir, testname)
659 if tag:
660 outputdir += '.' + tag
mbligh6231cd62008-02-02 19:18:33 +0000661
jadmanski0afbb632008-06-06 21:10:57 +0000662 # if we can find the test in site_bindir, use this version
663 if site_bindir and os.path.exists(site_bindir):
664 bindir = site_bindir
665 testdir = job.site_testdir
666 elif os.path.exists(bindir):
667 testdir = job.testdir
mblighc5ddfd12008-08-04 17:15:00 +0000668 else:
jadmanski0afbb632008-06-06 21:10:57 +0000669 raise error.TestError(testname + ': test does not exist')
mbligh6231cd62008-02-02 19:18:33 +0000670
mblighc5ddfd12008-08-04 17:15:00 +0000671 local_namespace['job'] = job
672 local_namespace['bindir'] = bindir
673 local_namespace['outputdir'] = outputdir
674
jadmanski0afbb632008-06-06 21:10:57 +0000675 if group:
676 sys.path.insert(0, os.path.join(testdir, 'download'))
677 group += '.'
678 else:
679 sys.path.insert(0, os.path.join(testdir, testname))
mbligh6231cd62008-02-02 19:18:33 +0000680
jadmanski0afbb632008-06-06 21:10:57 +0000681 try:
jadmanski0afbb632008-06-06 21:10:57 +0000682 exec ("import %s%s" % (group, testname),
683 local_namespace, global_namespace)
684 exec ("mytest = %s%s.%s(job, bindir, outputdir)" %
685 (group, testname, testname),
686 local_namespace, global_namespace)
687 finally:
jadmanski0afbb632008-06-06 21:10:57 +0000688 sys.path.pop(0)
mbligh6231cd62008-02-02 19:18:33 +0000689
jadmanski0afbb632008-06-06 21:10:57 +0000690 pwd = os.getcwd()
691 os.chdir(outputdir)
mbligh4395bbd2009-03-25 19:34:17 +0000692
jadmanski0afbb632008-06-06 21:10:57 +0000693 try:
694 mytest = global_namespace['mytest']
jadmanski30e9b592008-09-25 19:51:57 +0000695 if before_test_hook:
696 before_test_hook(mytest)
mbligh742ae422009-05-13 20:46:41 +0000697
698 # we use the register iteration hooks methods to register the passed
699 # in hooks
700 if before_iteration_hook:
701 mytest.register_before_iteration_hook(before_iteration_hook)
702 if after_iteration_hook:
703 mytest.register_after_iteration_hook(after_iteration_hook)
704 mytest._exec(args, dargs)
jadmanski0afbb632008-06-06 21:10:57 +0000705 finally:
jadmanski213b02b2008-08-26 20:51:58 +0000706 os.chdir(pwd)
jadmanski0afbb632008-06-06 21:10:57 +0000707 if after_test_hook:
708 after_test_hook(mytest)
jadmanski825e24c2008-08-27 20:54:31 +0000709 shutil.rmtree(mytest.tmpdir, ignore_errors=True)