blob: 589ad30fba09e36e39dbf2a17fb37e89f823b911 [file] [log] [blame]
mbligh6231cd62008-02-02 19:18:33 +00001# Shell class for a test, inherited by all individual tests
2#
3# Methods:
jadmanski0afbb632008-06-06 21:10:57 +00004# __init__ initialise
5# initialize run once for each job
6# setup run once for each new version of the test installed
7# run run the test (wrapped by job.run_test())
mbligh6231cd62008-02-02 19:18:33 +00008#
9# Data:
jadmanski0afbb632008-06-06 21:10:57 +000010# job backreference to the job this test instance is part of
11# outputdir eg. results/<job>/<testname.tag>
12# resultsdir eg. results/<job>/<testname.tag>/results
13# profdir eg. results/<job>/<testname.tag>/profiling
14# debugdir eg. results/<job>/<testname.tag>/debug
15# bindir eg. tests/<test>
16# src eg. tests/<test>/src
jadmanski825e24c2008-08-27 20:54:31 +000017# tmpdir eg. tmp/<tempname>_<testname.tag>
mbligh6231cd62008-02-02 19:18:33 +000018
mbligh234a84f2008-11-20 19:57:43 +000019import fcntl, os, re, sys, shutil, tarfile, tempfile, time, traceback
mbligh6894ce22009-09-18 19:56:30 +000020import warnings, logging, glob, resource
mbligh6231cd62008-02-02 19:18:33 +000021
jadmanskic27c2312009-08-05 20:58:51 +000022from autotest_lib.client.common_lib import error
mbligh53da18e2009-01-05 21:13:26 +000023from autotest_lib.client.bin import utils
mbligh6231cd62008-02-02 19:18:33 +000024
25
26class base_test:
jadmanski0afbb632008-06-06 21:10:57 +000027 preserve_srcdir = False
jadmanski91d56a92009-04-01 15:20:40 +000028 network_destabilizing = False
mbligh6231cd62008-02-02 19:18:33 +000029
jadmanski0afbb632008-06-06 21:10:57 +000030 def __init__(self, job, bindir, outputdir):
31 self.job = job
mbligh21e33582009-02-04 18:18:31 +000032 self.pkgmgr = job.pkgmgr
jadmanski0afbb632008-06-06 21:10:57 +000033 self.autodir = job.autodir
jadmanski0afbb632008-06-06 21:10:57 +000034 self.outputdir = outputdir
showardb18134f2009-03-20 20:52:18 +000035 self.tagged_testname = os.path.basename(self.outputdir)
jadmanski0afbb632008-06-06 21:10:57 +000036 self.resultsdir = os.path.join(self.outputdir, 'results')
37 os.mkdir(self.resultsdir)
38 self.profdir = os.path.join(self.outputdir, 'profiling')
39 os.mkdir(self.profdir)
40 self.debugdir = os.path.join(self.outputdir, 'debug')
41 os.mkdir(self.debugdir)
mbligh6894ce22009-09-18 19:56:30 +000042 self.configure_crash_handler()
jadmanski0afbb632008-06-06 21:10:57 +000043 self.bindir = bindir
44 if hasattr(job, 'libdir'):
45 self.libdir = job.libdir
46 self.srcdir = os.path.join(self.bindir, 'src')
mbligh5c1bb252009-03-25 22:06:49 +000047 self.tmpdir = tempfile.mkdtemp("_" + self.tagged_testname,
showardb18134f2009-03-20 20:52:18 +000048 dir=job.tmpdir)
mbligh7af09972009-04-17 22:17:08 +000049 self._keyvals = []
50 self._new_keyval = False
mbligh32cb5b42009-05-01 23:05:09 +000051 self.failed_constraints = []
mbligh5e703a22009-06-15 22:00:12 +000052 self.iteration = 0
mbligh742ae422009-05-13 20:46:41 +000053 self.before_iteration_hooks = []
54 self.after_iteration_hooks = []
mbligh6231cd62008-02-02 19:18:33 +000055
mbligh6231cd62008-02-02 19:18:33 +000056
mbligh6894ce22009-09-18 19:56:30 +000057 def configure_crash_handler(self):
58 """
59 Configure the crash handler by:
60 * Setting up core size to unlimited
61 * Putting an appropriate crash handler on /proc/sys/kernel/core_pattern
62 * Create files that the crash handler will use to figure which tests
63 are active at a given moment
64
65 The crash handler will pick up the core file and write it to
66 self.debugdir, and perform analysis on it to generate a report. The
67 program also outputs some results to syslog.
68
69 If multiple tests are running, an attempt to verify if we still have
70 the old PID on the system process table to determine whether it is a
71 parent of the current test execution. If we can't determine it, the
72 core file and the report file will be copied to all test debug dirs.
73 """
74 self.pattern_file = '/proc/sys/kernel/core_pattern'
75 try:
76 # Enable core dumps
77 resource.setrlimit(resource.RLIMIT_CORE, (-1, -1))
78 # Trying to backup core pattern and register our script
79 self.core_pattern_backup = open(self.pattern_file, 'r').read()
80 pattern_file = open(self.pattern_file, 'w')
81 tools_dir = os.path.join(self.autodir, 'tools')
82 crash_handler_path = os.path.join(tools_dir, 'crash_handler.py')
83 pattern_file.write('|' + crash_handler_path + ' %p %t %u %s %h %e')
84 # Writing the files that the crash handler is going to use
85 self.debugdir_tmp_file = ('/tmp/autotest_results_dir.%s' %
86 os.getpid())
87 utils.open_write_close(self.debugdir_tmp_file, self.debugdir + "\n")
88 except Exception, e:
89 self.crash_handling_enabled = False
90 logging.error('Crash handling system disabled: %s' % e)
91 else:
92 self.crash_handling_enabled = True
93 logging.debug('Crash handling system enabled.')
94
95
96 def crash_handler_report(self):
97 """
98 If core dumps are found on the debugdir after the execution of the
99 test, let the user know.
100 """
101 if self.crash_handling_enabled:
102 core_dirs = glob.glob('%s/crash.*' % self.debugdir)
103 if core_dirs:
104 logging.warning('Programs crashed during test execution:')
105 for dir in core_dirs:
106 logging.warning('Please verify %s for more info', dir)
107 # Remove the debugdir info file
108 os.unlink(self.debugdir_tmp_file)
109 # Restore the core pattern backup
110 try:
111 utils.open_write_close(self.pattern_file,
112 self.core_pattern_backup)
113 except EnvironmentError:
114 pass
115
116
jadmanski0afbb632008-06-06 21:10:57 +0000117 def assert_(self, expr, msg='Assertion failed.'):
118 if not expr:
119 raise error.TestError(msg)
mbligh6231cd62008-02-02 19:18:33 +0000120
121
jadmanski0afbb632008-06-06 21:10:57 +0000122 def write_test_keyval(self, attr_dict):
123 utils.write_keyval(self.outputdir, attr_dict)
jadmanskicc549172008-05-21 18:11:51 +0000124
125
jadmanski0afbb632008-06-06 21:10:57 +0000126 @staticmethod
127 def _append_type_to_keys(dictionary, typename):
128 new_dict = {}
129 for key, value in dictionary.iteritems():
130 new_key = "%s{%s}" % (key, typename)
131 new_dict[new_key] = value
132 return new_dict
jadmanskicc549172008-05-21 18:11:51 +0000133
134
mbligh0b3dd5f2008-07-16 20:37:13 +0000135 def write_perf_keyval(self, perf_dict):
136 self.write_iteration_keyval({}, perf_dict)
jadmanskicc549172008-05-21 18:11:51 +0000137
mbligh0b3dd5f2008-07-16 20:37:13 +0000138
139 def write_attr_keyval(self, attr_dict):
140 self.write_iteration_keyval(attr_dict, {})
141
142
143 def write_iteration_keyval(self, attr_dict, perf_dict):
mbligh7af09972009-04-17 22:17:08 +0000144 # append the dictionaries before they have the {perf} and {attr} added
145 self._keyvals.append({'attr':attr_dict, 'perf':perf_dict})
146 self._new_keyval = True
147
mbligh0b3dd5f2008-07-16 20:37:13 +0000148 if attr_dict:
149 attr_dict = self._append_type_to_keys(attr_dict, "attr")
150 utils.write_keyval(self.resultsdir, attr_dict, type_tag="attr")
151
152 if perf_dict:
153 perf_dict = self._append_type_to_keys(perf_dict, "perf")
154 utils.write_keyval(self.resultsdir, perf_dict, type_tag="perf")
jadmanskicc549172008-05-21 18:11:51 +0000155
jadmanski0afbb632008-06-06 21:10:57 +0000156 keyval_path = os.path.join(self.resultsdir, "keyval")
157 print >> open(keyval_path, "a"), ""
jadmanskicc549172008-05-21 18:11:51 +0000158
159
mbligh7af09972009-04-17 22:17:08 +0000160 def analyze_perf_constraints(self, constraints):
161 if not self._new_keyval:
162 return
163
jadmanski0d9ea772009-10-08 22:30:56 +0000164 # create a dict from the keyvals suitable as an environment for eval
165 keyval_env = self._keyvals[-1]['perf'].copy()
166 keyval_env['__builtins__'] = None
mbligh7af09972009-04-17 22:17:08 +0000167 self._new_keyval = False
mbligh32cb5b42009-05-01 23:05:09 +0000168 failures = []
jadmanski0d9ea772009-10-08 22:30:56 +0000169
170 # evaluate each constraint using the current keyvals
mbligh7af09972009-04-17 22:17:08 +0000171 for constraint in constraints:
jadmanski0d9ea772009-10-08 22:30:56 +0000172 logging.info('___________________ constraint = %s', constraint)
173 logging.info('___________________ keyvals = %s', keyval_env)
174
mbligh7af09972009-04-17 22:17:08 +0000175 try:
jadmanski0d9ea772009-10-08 22:30:56 +0000176 if not eval(constraint, keyval_env):
mbligh8beabca2009-05-21 01:33:15 +0000177 failures.append('%s: constraint was not met' % constraint)
mbligh7af09972009-04-17 22:17:08 +0000178 except:
mbligh32cb5b42009-05-01 23:05:09 +0000179 failures.append('could not evaluate constraint: %s'
180 % constraint)
mbligh7af09972009-04-17 22:17:08 +0000181
mbligh32cb5b42009-05-01 23:05:09 +0000182 # keep track of the errors for each iteration
183 self.failed_constraints.append(failures)
184
185
186 def process_failed_constraints(self):
187 msg = ''
188 for i, failures in enumerate(self.failed_constraints):
189 if failures:
190 msg += 'iteration %d:%s ' % (i, ','.join(failures))
191
192 if msg:
193 raise error.TestFail(msg)
mbligh7af09972009-04-17 22:17:08 +0000194
195
mbligh742ae422009-05-13 20:46:41 +0000196 def register_before_iteration_hook(self, iteration_hook):
197 """
198 This is how we expect test writers to register a before_iteration_hook.
199 This adds the method to the list of hooks which are executed
200 before each iteration.
201
202 @param iteration_hook: Method to run before each iteration. A valid
203 hook accepts a single argument which is the
204 test object.
205 """
206 self.before_iteration_hooks.append(iteration_hook)
207
208
209 def register_after_iteration_hook(self, iteration_hook):
210 """
211 This is how we expect test writers to register an after_iteration_hook.
212 This adds the method to the list of hooks which are executed
213 after each iteration.
214
215 @param iteration_hook: Method to run after each iteration. A valid
216 hook accepts a single argument which is the
217 test object.
218 """
219 self.after_iteration_hooks.append(iteration_hook)
220
221
jadmanski0afbb632008-06-06 21:10:57 +0000222 def initialize(self):
223 pass
mbligh6231cd62008-02-02 19:18:33 +0000224
225
jadmanski0afbb632008-06-06 21:10:57 +0000226 def setup(self):
227 pass
mbligh6231cd62008-02-02 19:18:33 +0000228
229
mbligh14f98562008-07-29 21:16:27 +0000230 def warmup(self, *args, **dargs):
mbligh4205d892008-07-14 16:23:20 +0000231 pass
mblighb53a3472008-07-11 21:27:58 +0000232
233
mblighb5dac432008-11-27 00:38:44 +0000234 def drop_caches_between_iterations(self):
235 if self.job.drop_caches_between_iterations:
236 print "Dropping caches between iterations"
mbligh53da18e2009-01-05 21:13:26 +0000237 utils.drop_caches()
mblighb5dac432008-11-27 00:38:44 +0000238
239
mblighf58865f2009-05-13 21:32:42 +0000240 def _call_run_once(self, constraints, profile_only,
241 postprocess_profiled_run, args, dargs):
mbligh6b97f792009-03-23 21:23:12 +0000242 self.drop_caches_between_iterations()
mbligh4395bbd2009-03-25 19:34:17 +0000243
244 # execute iteration hooks
mbligh742ae422009-05-13 20:46:41 +0000245 for hook in self.before_iteration_hooks:
246 hook(self)
mblighf58865f2009-05-13 21:32:42 +0000247
248 if profile_only:
mblighc6bf6012009-10-02 00:02:15 +0000249 if not self.job.profilers.present():
250 self.job.record('WARN', None, None, 'No profilers have been '
251 'added but profile_only is set - nothing '
252 'will be run')
mblighf58865f2009-05-13 21:32:42 +0000253 self.run_once_profiling(postprocess_profiled_run, *args, **dargs)
254 else:
255 self.run_once(*args, **dargs)
256
mbligh742ae422009-05-13 20:46:41 +0000257 for hook in self.after_iteration_hooks:
258 hook(self)
mbligh4395bbd2009-03-25 19:34:17 +0000259
mbligh6b97f792009-03-23 21:23:12 +0000260 self.postprocess_iteration()
mbligh7af09972009-04-17 22:17:08 +0000261 self.analyze_perf_constraints(constraints)
mbligh6b97f792009-03-23 21:23:12 +0000262
263
mbligh4b835b82009-02-11 01:26:13 +0000264 def execute(self, iterations=None, test_length=None, profile_only=False,
mbligha49c5cb2009-02-26 01:01:09 +0000265 _get_time=time.time, postprocess_profiled_run=None,
mbligh7af09972009-04-17 22:17:08 +0000266 constraints=(), *args, **dargs):
mbligh777d96e2008-09-03 16:34:38 +0000267 """
268 This is the basic execute method for the tests inherited from base_test.
269 If you want to implement a benchmark test, it's better to implement
270 the run_once function, to cope with the profiling infrastructure. For
271 other tests, you can just override the default implementation.
mbligh60434712008-07-16 16:35:10 +0000272
mbligh777d96e2008-09-03 16:34:38 +0000273 @param test_length: The minimum test length in seconds. We'll run the
mbligh4b835b82009-02-11 01:26:13 +0000274 run_once function for a number of times large enough to cover the
275 minimum test length.
mbligh777d96e2008-09-03 16:34:38 +0000276
277 @param iterations: A number of iterations that we'll run the run_once
mbligh4b835b82009-02-11 01:26:13 +0000278 function. This parameter is incompatible with test_length and will
279 be silently ignored if you specify both.
280
mblighf58865f2009-05-13 21:32:42 +0000281 @param profile_only: If true run X iterations with profilers enabled.
282 Otherwise run X iterations and one with profiling if profiles are
283 enabled.
mbligh4b835b82009-02-11 01:26:13 +0000284
285 @param _get_time: [time.time] Used for unit test time injection.
mblighc9314082009-02-26 00:48:18 +0000286
mbligha49c5cb2009-02-26 01:01:09 +0000287 @param postprocess_profiled_run: Run the postprocessing for the
288 profiled run.
mbligh777d96e2008-09-03 16:34:38 +0000289 """
290
291 # For our special class of tests, the benchmarks, we don't want
292 # profilers to run during the test iterations. Let's reserve only
293 # the last iteration for profiling, if needed. So let's stop
294 # all profilers if they are present and active.
mblighb3c0c912008-11-27 00:32:45 +0000295 profilers = self.job.profilers
296 if profilers.active():
mbligh777d96e2008-09-03 16:34:38 +0000297 profilers.stop(self)
298 # If the user called this test in an odd way (specified both iterations
mbligh4b835b82009-02-11 01:26:13 +0000299 # and test_length), let's warn them.
mbligh777d96e2008-09-03 16:34:38 +0000300 if iterations and test_length:
mblighf58865f2009-05-13 21:32:42 +0000301 logging.info('Iterations parameter ignored (timed execution).')
mbligh777d96e2008-09-03 16:34:38 +0000302 if test_length:
mbligh4b835b82009-02-11 01:26:13 +0000303 test_start = _get_time()
mbligh777d96e2008-09-03 16:34:38 +0000304 time_elapsed = 0
305 timed_counter = 0
showardb18134f2009-03-20 20:52:18 +0000306 logging.info('Test started. Minimum test length: %d s',
mbligh4b835b82009-02-11 01:26:13 +0000307 test_length)
mbligh777d96e2008-09-03 16:34:38 +0000308 while time_elapsed < test_length:
309 timed_counter = timed_counter + 1
310 if time_elapsed == 0:
showardb18134f2009-03-20 20:52:18 +0000311 logging.info('Executing iteration %d', timed_counter)
mbligh777d96e2008-09-03 16:34:38 +0000312 elif time_elapsed > 0:
showardb18134f2009-03-20 20:52:18 +0000313 logging.info(
mbligh4b835b82009-02-11 01:26:13 +0000314 'Executing iteration %d, time_elapsed %d s',
315 timed_counter, time_elapsed)
mblighf58865f2009-05-13 21:32:42 +0000316 self._call_run_once(constraints, profile_only,
317 postprocess_profiled_run, args, dargs)
mbligh4b835b82009-02-11 01:26:13 +0000318 test_iteration_finish = _get_time()
mbligh777d96e2008-09-03 16:34:38 +0000319 time_elapsed = test_iteration_finish - test_start
showardb18134f2009-03-20 20:52:18 +0000320 logging.info('Test finished after %d iterations',
mbligh4b835b82009-02-11 01:26:13 +0000321 timed_counter)
showardb18134f2009-03-20 20:52:18 +0000322 logging.info('Time elapsed: %d s', time_elapsed)
mbligh777d96e2008-09-03 16:34:38 +0000323 else:
mblighf58865f2009-05-13 21:32:42 +0000324 if iterations is None:
mbligh777d96e2008-09-03 16:34:38 +0000325 iterations = 1
mblighf58865f2009-05-13 21:32:42 +0000326 logging.info('Test started. Number of iterations: %d', iterations)
327 for self.iteration in xrange(1, iterations+1):
328 logging.info('Executing iteration %d of %d', self.iteration,
329 iterations)
330 self._call_run_once(constraints, profile_only,
331 postprocess_profiled_run, args, dargs)
332 logging.info('Test finished after %d iterations.', iterations)
mbligh60434712008-07-16 16:35:10 +0000333
mblighf58865f2009-05-13 21:32:42 +0000334 if not profile_only:
mbligh5e703a22009-06-15 22:00:12 +0000335 self.iteration += 1
mblighf58865f2009-05-13 21:32:42 +0000336 self.run_once_profiling(postprocess_profiled_run, *args, **dargs)
mblighd27604e2009-02-03 02:06:08 +0000337
338 # Do any postprocessing, normally extracting performance keyvals, etc
339 self.postprocess()
mbligh32cb5b42009-05-01 23:05:09 +0000340 self.process_failed_constraints()
mblighd27604e2009-02-03 02:06:08 +0000341
342
mbligha49c5cb2009-02-26 01:01:09 +0000343 def run_once_profiling(self, postprocess_profiled_run, *args, **dargs):
mblighd27604e2009-02-03 02:06:08 +0000344 profilers = self.job.profilers
mbligh60434712008-07-16 16:35:10 +0000345 # Do a profiling run if necessary
mblighb3c0c912008-11-27 00:32:45 +0000346 if profilers.present():
mblighc9314082009-02-26 00:48:18 +0000347 self.drop_caches_between_iterations()
mbligh60434712008-07-16 16:35:10 +0000348 profilers.start(self)
mbligh777d96e2008-09-03 16:34:38 +0000349 print 'Profilers present. Profiling run started'
jadmanski0d390072008-11-19 21:19:56 +0000350 try:
351 self.run_once(*args, **dargs)
mbligha49c5cb2009-02-26 01:01:09 +0000352
353 # Priority to the run_once() argument over the attribute.
354 postprocess_attribute = getattr(self,
355 'postprocess_profiled_run',
356 False)
357
358 if (postprocess_profiled_run or
359 (postprocess_profiled_run is None and
360 postprocess_attribute)):
361 self.postprocess_iteration()
362
jadmanski0d390072008-11-19 21:19:56 +0000363 finally:
364 profilers.stop(self)
365 profilers.report(self)
mbligh60434712008-07-16 16:35:10 +0000366
mbligh60434712008-07-16 16:35:10 +0000367
368 def postprocess(self):
369 pass
370
371
mbligh34b297b2009-02-03 17:49:48 +0000372 def postprocess_iteration(self):
373 pass
374
375
mbligh60434712008-07-16 16:35:10 +0000376 def cleanup(self):
377 pass
mblighcd8a5162008-07-16 16:32:12 +0000378
379
mbligh742ae422009-05-13 20:46:41 +0000380 def _exec(self, args, dargs):
showardee36bc72009-06-18 23:13:53 +0000381 self.job.logging.tee_redirect_debug_dir(self.debugdir,
382 log_name=self.tagged_testname)
jadmanski0afbb632008-06-06 21:10:57 +0000383 try:
jadmanski91d56a92009-04-01 15:20:40 +0000384 if self.network_destabilizing:
385 self.job.disable_warnings("NETWORK")
386
jadmanski62655782008-07-28 21:27:46 +0000387 # write out the test attributes into a keyval
388 dargs = dargs.copy()
jadmanski23afbec2008-09-17 18:12:07 +0000389 run_cleanup = dargs.pop('run_cleanup', self.job.run_test_cleanup)
mbligh234a84f2008-11-20 19:57:43 +0000390 keyvals = dargs.pop('test_attributes', {}).copy()
jadmanski62655782008-07-28 21:27:46 +0000391 keyvals['version'] = self.version
jadmanski2ae0e052008-09-04 16:37:28 +0000392 for i, arg in enumerate(args):
393 keyvals['param-%d' % i] = repr(arg)
394 for name, arg in dargs.iteritems():
395 keyvals['param-%s' % name] = repr(arg)
jadmanski62655782008-07-28 21:27:46 +0000396 self.write_test_keyval(keyvals)
397
mblighcf238192008-07-17 01:18:44 +0000398 _validate_args(args, dargs, self.initialize, self.setup,
399 self.execute, self.cleanup)
mbligh6231cd62008-02-02 19:18:33 +0000400
jadmanski0afbb632008-06-06 21:10:57 +0000401 try:
mblighcf238192008-07-17 01:18:44 +0000402 # Initialize:
mbligh5c1bb252009-03-25 22:06:49 +0000403 _cherry_pick_call(self.initialize, *args, **dargs)
mblighcf238192008-07-17 01:18:44 +0000404
mblighc5ddfd12008-08-04 17:15:00 +0000405 lockfile = open(os.path.join(self.job.tmpdir, '.testlock'), 'w')
406 try:
407 fcntl.flock(lockfile, fcntl.LOCK_EX)
408 # Setup: (compile and install the test, if needed)
409 p_args, p_dargs = _cherry_pick_args(self.setup,args,dargs)
410 utils.update_version(self.srcdir, self.preserve_srcdir,
411 self.version, self.setup,
412 *p_args, **p_dargs)
413 finally:
414 fcntl.flock(lockfile, fcntl.LOCK_UN)
415 lockfile.close()
mblighcf238192008-07-17 01:18:44 +0000416
mblighcf238192008-07-17 01:18:44 +0000417 # Execute:
jadmanski62655782008-07-28 21:27:46 +0000418 os.chdir(self.outputdir)
mbligh4395bbd2009-03-25 19:34:17 +0000419
mbligh5c1bb252009-03-25 22:06:49 +0000420 # call self.warmup cherry picking the arguments it accepts and
421 # translate exceptions if needed
422 _call_test_function(_cherry_pick_call, self.warmup,
423 *args, **dargs)
424
mblighcf238192008-07-17 01:18:44 +0000425 if hasattr(self, 'run_once'):
426 p_args, p_dargs = _cherry_pick_args(self.run_once,
427 args, dargs)
jadmanski886c81f2009-02-19 12:54:03 +0000428 # pull in any non-* and non-** args from self.execute
429 for param in _get_nonstar_args(self.execute):
430 if param in dargs:
431 p_dargs[param] = dargs[param]
mblighcf238192008-07-17 01:18:44 +0000432 else:
433 p_args, p_dargs = _cherry_pick_args(self.execute,
434 args, dargs)
mbligh5c1bb252009-03-25 22:06:49 +0000435
436 _call_test_function(self.execute, *p_args, **p_dargs)
mbligh234a84f2008-11-20 19:57:43 +0000437 except Exception:
438 # Save the exception while we run our cleanup() before
439 # reraising it.
jadmanskid625c7f2008-08-27 14:08:52 +0000440 exc_info = sys.exc_info()
mbligh234a84f2008-11-20 19:57:43 +0000441 try:
442 try:
443 if run_cleanup:
mbligh5c1bb252009-03-25 22:06:49 +0000444 _cherry_pick_call(self.cleanup, *args, **dargs)
mbligh234a84f2008-11-20 19:57:43 +0000445 except Exception:
446 print 'Ignoring exception during cleanup() phase:'
447 traceback.print_exc()
448 print 'Now raising the earlier %s error' % exc_info[0]
mbligh6894ce22009-09-18 19:56:30 +0000449 self.crash_handler_report()
mbligh234a84f2008-11-20 19:57:43 +0000450 finally:
showard75cdfee2009-06-10 17:40:41 +0000451 self.job.logging.restore()
mbligh234a84f2008-11-20 19:57:43 +0000452 try:
453 raise exc_info[0], exc_info[1], exc_info[2]
454 finally:
455 # http://docs.python.org/library/sys.html#sys.exc_info
456 # Be nice and prevent a circular reference.
457 del exc_info
jadmanskid625c7f2008-08-27 14:08:52 +0000458 else:
mbligh234a84f2008-11-20 19:57:43 +0000459 try:
460 if run_cleanup:
mbligh5c1bb252009-03-25 22:06:49 +0000461 _cherry_pick_call(self.cleanup, *args, **dargs)
mbligh6894ce22009-09-18 19:56:30 +0000462 self.crash_handler_report()
mbligh234a84f2008-11-20 19:57:43 +0000463 finally:
showard75cdfee2009-06-10 17:40:41 +0000464 self.job.logging.restore()
jadmanski0afbb632008-06-06 21:10:57 +0000465 except error.AutotestError:
jadmanski91d56a92009-04-01 15:20:40 +0000466 if self.network_destabilizing:
467 self.job.enable_warnings("NETWORK")
mbligh234a84f2008-11-20 19:57:43 +0000468 # Pass already-categorized errors on up.
jadmanski0afbb632008-06-06 21:10:57 +0000469 raise
470 except Exception, e:
jadmanski91d56a92009-04-01 15:20:40 +0000471 if self.network_destabilizing:
472 self.job.enable_warnings("NETWORK")
mbligh234a84f2008-11-20 19:57:43 +0000473 # Anything else is an ERROR in our own code, not execute().
mblighc2180832008-07-25 03:26:12 +0000474 raise error.UnhandledTestError(e)
jadmanski91d56a92009-04-01 15:20:40 +0000475 else:
476 if self.network_destabilizing:
477 self.job.enable_warnings("NETWORK")
mbligh6231cd62008-02-02 19:18:33 +0000478
479
jadmanski886c81f2009-02-19 12:54:03 +0000480def _get_nonstar_args(func):
481 """Extract all the (normal) function parameter names.
482
483 Given a function, returns a tuple of parameter names, specifically
484 excluding the * and ** parameters, if the function accepts them.
485
486 @param func: A callable that we want to chose arguments for.
487
488 @return: A tuple of parameters accepted by the function.
489 """
490 return func.func_code.co_varnames[:func.func_code.co_argcount]
491
492
mblighcf238192008-07-17 01:18:44 +0000493def _cherry_pick_args(func, args, dargs):
mbligh234a84f2008-11-20 19:57:43 +0000494 """Sanitize positional and keyword arguments before calling a function.
495
496 Given a callable (func), an argument tuple and a dictionary of keyword
497 arguments, pick only those arguments which the function is prepared to
498 accept and return a new argument tuple and keyword argument dictionary.
499
500 Args:
501 func: A callable that we want to choose arguments for.
502 args: A tuple of positional arguments to consider passing to func.
503 dargs: A dictionary of keyword arguments to consider passing to func.
504 Returns:
505 A tuple of: (args tuple, keyword arguments dictionary)
506 """
mblighcf238192008-07-17 01:18:44 +0000507 # Cherry pick args:
508 if func.func_code.co_flags & 0x04:
509 # func accepts *args, so return the entire args.
510 p_args = args
511 else:
512 p_args = ()
513
514 # Cherry pick dargs:
515 if func.func_code.co_flags & 0x08:
516 # func accepts **dargs, so return the entire dargs.
517 p_dargs = dargs
518 else:
mbligh234a84f2008-11-20 19:57:43 +0000519 # Only return the keyword arguments that func accepts.
mblighcf238192008-07-17 01:18:44 +0000520 p_dargs = {}
jadmanski886c81f2009-02-19 12:54:03 +0000521 for param in _get_nonstar_args(func):
mblighcf238192008-07-17 01:18:44 +0000522 if param in dargs:
523 p_dargs[param] = dargs[param]
524
525 return p_args, p_dargs
526
527
mbligh5c1bb252009-03-25 22:06:49 +0000528def _cherry_pick_call(func, *args, **dargs):
529 """Cherry picks arguments from args/dargs based on what "func" accepts
530 and calls the function with the picked arguments."""
531 p_args, p_dargs = _cherry_pick_args(func, args, dargs)
532 return func(*p_args, **p_dargs)
533
534
mblighcf238192008-07-17 01:18:44 +0000535def _validate_args(args, dargs, *funcs):
mbligh234a84f2008-11-20 19:57:43 +0000536 """Verify that arguments are appropriate for at least one callable.
537
538 Given a list of callables as additional parameters, verify that
539 the proposed keyword arguments in dargs will each be accepted by at least
540 one of the callables.
541
542 NOTE: args is currently not supported and must be empty.
543
544 Args:
545 args: A tuple of proposed positional arguments.
546 dargs: A dictionary of proposed keyword arguments.
547 *funcs: Callables to be searched for acceptance of args and dargs.
548 Raises:
549 error.AutotestError: if an arg won't be accepted by any of *funcs.
550 """
mblighcf238192008-07-17 01:18:44 +0000551 all_co_flags = 0
552 all_varnames = ()
553 for func in funcs:
554 all_co_flags |= func.func_code.co_flags
555 all_varnames += func.func_code.co_varnames[:func.func_code.co_argcount]
556
557 # Check if given args belongs to at least one of the methods below.
558 if len(args) > 0:
559 # Current implementation doesn't allow the use of args.
mbligh234a84f2008-11-20 19:57:43 +0000560 raise error.TestError('Unnamed arguments not accepted. Please '
561 'call job.run_test with named args only')
mblighcf238192008-07-17 01:18:44 +0000562
563 # Check if given dargs belongs to at least one of the methods below.
564 if len(dargs) > 0:
565 if not all_co_flags & 0x08:
566 # no func accepts *dargs, so:
567 for param in dargs:
568 if not param in all_varnames:
569 raise error.AutotestError('Unknown parameter: %s' % param)
570
571
mbligh6231cd62008-02-02 19:18:33 +0000572def _installtest(job, url):
mblighc5ddfd12008-08-04 17:15:00 +0000573 (group, name) = job.pkgmgr.get_package_name(url, 'test')
mbligh6231cd62008-02-02 19:18:33 +0000574
jadmanski0afbb632008-06-06 21:10:57 +0000575 # Bail if the test is already installed
576 group_dir = os.path.join(job.testdir, "download", group)
577 if os.path.exists(os.path.join(group_dir, name)):
578 return (group, name)
mbligh6231cd62008-02-02 19:18:33 +0000579
jadmanski0afbb632008-06-06 21:10:57 +0000580 # If the group directory is missing create it and add
581 # an empty __init__.py so that sub-directories are
582 # considered for import.
583 if not os.path.exists(group_dir):
584 os.mkdir(group_dir)
585 f = file(os.path.join(group_dir, '__init__.py'), 'w+')
586 f.close()
mbligh6231cd62008-02-02 19:18:33 +0000587
jadmanski0afbb632008-06-06 21:10:57 +0000588 print name + ": installing test url=" + url
mblighc5ddfd12008-08-04 17:15:00 +0000589 tarball = os.path.basename(url)
590 tarball_path = os.path.join(group_dir, tarball)
591 test_dir = os.path.join(group_dir, name)
592 job.pkgmgr.fetch_pkg(tarball, tarball_path,
593 repo_url = os.path.dirname(url))
594
595 # Create the directory for the test
596 if not os.path.exists(test_dir):
597 os.mkdir(os.path.join(group_dir, name))
598
599 job.pkgmgr.untar_pkg(tarball_path, test_dir)
600
601 os.remove(tarball_path)
mbligh6231cd62008-02-02 19:18:33 +0000602
jadmanski0afbb632008-06-06 21:10:57 +0000603 # For this 'sub-object' to be importable via the name
604 # 'group.name' we need to provide an __init__.py,
605 # so link the main entry point to this.
606 os.symlink(name + '.py', os.path.join(group_dir, name,
607 '__init__.py'))
mbligh6231cd62008-02-02 19:18:33 +0000608
jadmanski0afbb632008-06-06 21:10:57 +0000609 # The test is now installed.
610 return (group, name)
mbligh6231cd62008-02-02 19:18:33 +0000611
612
mbligh5c1bb252009-03-25 22:06:49 +0000613def _call_test_function(func, *args, **dargs):
614 """Calls a test function and translates exceptions so that errors
615 inside test code are considered test failures."""
616 try:
617 return func(*args, **dargs)
618 except error.AutotestError:
619 # Pass already-categorized errors on up as is.
620 raise
621 except Exception, e:
622 # Other exceptions must be treated as a FAIL when
623 # raised during the test functions
624 raise error.UnhandledTestFail(e)
625
626
mbligh6231cd62008-02-02 19:18:33 +0000627def runtest(job, url, tag, args, dargs,
jadmanski30e9b592008-09-25 19:51:57 +0000628 local_namespace={}, global_namespace={},
mbligh4395bbd2009-03-25 19:34:17 +0000629 before_test_hook=None, after_test_hook=None,
630 before_iteration_hook=None, after_iteration_hook=None):
jadmanski0afbb632008-06-06 21:10:57 +0000631 local_namespace = local_namespace.copy()
632 global_namespace = global_namespace.copy()
mbligh6231cd62008-02-02 19:18:33 +0000633
jadmanski0afbb632008-06-06 21:10:57 +0000634 # if this is not a plain test name then download and install the
635 # specified test
mblighc5ddfd12008-08-04 17:15:00 +0000636 if url.endswith('.tar.bz2'):
jadmanski0afbb632008-06-06 21:10:57 +0000637 (group, testname) = _installtest(job, url)
638 bindir = os.path.join(job.testdir, 'download', group, testname)
639 site_bindir = None
640 else:
641 # if the test is local, it can be found in either testdir
642 # or site_testdir. tests in site_testdir override tests
643 # defined in testdir
644 (group, testname) = ('', url)
645 bindir = os.path.join(job.testdir, group, testname)
646 if hasattr(job, 'site_testdir'):
647 site_bindir = os.path.join(job.site_testdir,
648 group, testname)
649 else:
650 site_bindir = None
mbligh6231cd62008-02-02 19:18:33 +0000651
mblighc5ddfd12008-08-04 17:15:00 +0000652 # The job object here can be that of a server side job or a client
653 # side job. 'install_pkg' method won't be present for server side
654 # jobs, so do the fetch only if that method is present in the job
655 # obj.
656 if hasattr(job, 'install_pkg'):
657 try:
658 job.install_pkg(testname, 'test', bindir)
jadmanskic27c2312009-08-05 20:58:51 +0000659 except error.PackageInstallError, e:
mblighc5ddfd12008-08-04 17:15:00 +0000660 # continue as a fall back mechanism and see if the test code
661 # already exists on the machine
662 pass
663
jadmanski0afbb632008-06-06 21:10:57 +0000664 outputdir = os.path.join(job.resultdir, testname)
665 if tag:
666 outputdir += '.' + tag
mbligh6231cd62008-02-02 19:18:33 +0000667
jadmanski0afbb632008-06-06 21:10:57 +0000668 # if we can find the test in site_bindir, use this version
669 if site_bindir and os.path.exists(site_bindir):
670 bindir = site_bindir
671 testdir = job.site_testdir
672 elif os.path.exists(bindir):
673 testdir = job.testdir
mblighc5ddfd12008-08-04 17:15:00 +0000674 else:
jadmanski0afbb632008-06-06 21:10:57 +0000675 raise error.TestError(testname + ': test does not exist')
mbligh6231cd62008-02-02 19:18:33 +0000676
mblighc5ddfd12008-08-04 17:15:00 +0000677 local_namespace['job'] = job
678 local_namespace['bindir'] = bindir
679 local_namespace['outputdir'] = outputdir
680
jadmanski0afbb632008-06-06 21:10:57 +0000681 if group:
682 sys.path.insert(0, os.path.join(testdir, 'download'))
683 group += '.'
684 else:
685 sys.path.insert(0, os.path.join(testdir, testname))
mbligh6231cd62008-02-02 19:18:33 +0000686
jadmanski0afbb632008-06-06 21:10:57 +0000687 try:
jadmanski0afbb632008-06-06 21:10:57 +0000688 exec ("import %s%s" % (group, testname),
689 local_namespace, global_namespace)
690 exec ("mytest = %s%s.%s(job, bindir, outputdir)" %
691 (group, testname, testname),
692 local_namespace, global_namespace)
693 finally:
jadmanski0afbb632008-06-06 21:10:57 +0000694 sys.path.pop(0)
mbligh6231cd62008-02-02 19:18:33 +0000695
jadmanski0afbb632008-06-06 21:10:57 +0000696 pwd = os.getcwd()
697 os.chdir(outputdir)
mbligh4395bbd2009-03-25 19:34:17 +0000698
jadmanski0afbb632008-06-06 21:10:57 +0000699 try:
700 mytest = global_namespace['mytest']
jadmanski30e9b592008-09-25 19:51:57 +0000701 if before_test_hook:
702 before_test_hook(mytest)
mbligh742ae422009-05-13 20:46:41 +0000703
704 # we use the register iteration hooks methods to register the passed
705 # in hooks
706 if before_iteration_hook:
707 mytest.register_before_iteration_hook(before_iteration_hook)
708 if after_iteration_hook:
709 mytest.register_after_iteration_hook(after_iteration_hook)
710 mytest._exec(args, dargs)
jadmanski0afbb632008-06-06 21:10:57 +0000711 finally:
jadmanski213b02b2008-08-26 20:51:58 +0000712 os.chdir(pwd)
jadmanski0afbb632008-06-06 21:10:57 +0000713 if after_test_hook:
714 after_test_hook(mytest)
jadmanski825e24c2008-08-27 20:54:31 +0000715 shutil.rmtree(mytest.tmpdir, ignore_errors=True)