blob: 62244f4d543ba810f2039716b04118a34b3033d3 [file] [log] [blame]
mblighf1c52842007-10-16 15:21:38 +00001"""
2The main job wrapper for the server side.
3
4This is the core infrastructure. Derived from the client side job.py
5
6Copyright Martin J. Bligh, Andy Whitcroft 2007
7"""
8
9__author__ = """
10Martin J. Bligh <mbligh@google.com>
11Andy Whitcroft <apw@shadowen.org>
12"""
13
mblighdbdac6c2008-03-05 15:49:58 +000014import os, sys, re, time, select, subprocess, traceback
mblighf1c52842007-10-16 15:21:38 +000015
mbligh6437ff52008-04-17 15:24:38 +000016from autotest_lib.client.bin import fd_stack
mbligh302482e2008-05-01 20:06:16 +000017from autotest_lib.client.common_lib import error, logging
mbligh6437ff52008-04-17 15:24:38 +000018from autotest_lib.server import test, subcommand
19from autotest_lib.tko import db as tko_db, status_lib, utils as tko_utils
20from autotest_lib.server.utils import *
21
mbligh3f4bced2007-11-05 17:55:53 +000022
mblighed5a4102007-11-20 00:46:41 +000023# load up a control segment
24# these are all stored in <server_dir>/control_segments
25def load_control_segment(name):
26 server_dir = os.path.dirname(os.path.abspath(__file__))
mbligh7f86e0b2007-11-24 19:45:07 +000027 script_file = os.path.join(server_dir, "control_segments", name)
mblighed5a4102007-11-20 00:46:41 +000028 if os.path.exists(script_file):
29 return file(script_file).read()
30 else:
31 return ""
32
33
mblighf1c52842007-10-16 15:21:38 +000034preamble = """\
35import os, sys
36
mblighccb9e182008-04-17 15:42:10 +000037from autotest_lib.server import hosts, autotest, kvm, git, standalone_profiler
38from autotest_lib.server import source_kernel, rpm_kernel, deb_kernel
39from autotest_lib.server import git_kernel
40from autotest_lib.server.subcommand import *
41from autotest_lib.server.utils import run, get_tmp_dir, sh_escape
mblighccb9e182008-04-17 15:42:10 +000042from autotest_lib.client.common_lib.error import *
43from autotest_lib.client.common_lib import barrier
mblighf1c52842007-10-16 15:21:38 +000044
mbligh119c12a2007-11-12 22:13:44 +000045autotest.Autotest.job = job
mbligh31a49de2007-11-05 18:41:19 +000046hosts.SSHHost.job = job
mblighe1417fa2007-12-10 16:55:13 +000047barrier = barrier.barrier
mbligh1fb77cc2008-02-27 16:41:20 +000048if len(machines) > 1:
49 open('.machines', 'w').write('\\n'.join(machines) + '\\n')
mblighf1c52842007-10-16 15:21:38 +000050"""
51
52client_wrapper = """
53at = autotest.Autotest()
54
55def run_client(machine):
56 host = hosts.SSHHost(machine)
57 at.run(control, host=host)
58
mbligh6437ff52008-04-17 15:24:38 +000059job.parallel_simple(run_client, machines)
mblighf1c52842007-10-16 15:21:38 +000060"""
61
mbligh303ccac2007-11-05 18:07:28 +000062crashdumps = """
63def crashdumps(machine):
64 host = hosts.SSHHost(machine, initialize=False)
65 host.get_crashdumps(test_start_time)
66
mbligh6437ff52008-04-17 15:24:38 +000067job.parallel_simple(crashdumps, machines, log=False)
mbligh303ccac2007-11-05 18:07:28 +000068"""
69
mbligh98ff1462007-12-19 16:27:55 +000070reboot_segment="""\
71def reboot(machine):
mbligh17f0c662007-11-05 18:28:19 +000072 host = hosts.SSHHost(machine, initialize=False)
73 host.reboot()
mblighf1c52842007-10-16 15:21:38 +000074
mbligh6437ff52008-04-17 15:24:38 +000075job.parallel_simple(reboot, machines, log=False)
mblighf1c52842007-10-16 15:21:38 +000076"""
77
mblighf36243d2007-10-30 15:36:16 +000078install="""\
79def install(machine):
mbligh17f0c662007-11-05 18:28:19 +000080 host = hosts.SSHHost(machine, initialize=False)
81 host.machine_install()
mblighf36243d2007-10-30 15:36:16 +000082
mbligh6437ff52008-04-17 15:24:38 +000083job.parallel_simple(install, machines, log=False)
mblighf36243d2007-10-30 15:36:16 +000084"""
85
mbligh7f86e0b2007-11-24 19:45:07 +000086# load up the verifier control segment, with an optional site-specific hook
mblighed5a4102007-11-20 00:46:41 +000087verify = load_control_segment("site_verify")
88verify += load_control_segment("verify")
mbligh1d42d4e2007-11-05 22:42:00 +000089
mbligh7f86e0b2007-11-24 19:45:07 +000090# load up the repair control segment, with an optional site-specific hook
91repair = load_control_segment("site_repair")
92repair += load_control_segment("repair")
93
mbligh1d42d4e2007-11-05 22:42:00 +000094
mbligh970b94e2008-01-24 16:29:34 +000095# load up site-specific code for generating site-specific job data
96try:
97 import site_job
98 get_site_job_data = site_job.get_site_job_data
99 del site_job
100except ImportError:
101 # by default provide a stub that generates no site data
102 def get_site_job_data(job):
103 return {}
104
105
mblighcaa62c22008-04-07 21:51:17 +0000106class base_server_job:
mblighf1c52842007-10-16 15:21:38 +0000107 """The actual job against which we do everything.
108
109 Properties:
110 autodir
111 The top level autotest directory (/usr/local/autotest).
112 serverdir
113 <autodir>/server/
114 clientdir
115 <autodir>/client/
116 conmuxdir
117 <autodir>/conmux/
118 testdir
119 <autodir>/server/tests/
120 control
121 the control file for this job
122 """
123
jadmanski6e8bf752008-05-14 00:17:48 +0000124 STATUS_VERSION = 1
125
126
mblighe8b37a92007-12-19 15:54:11 +0000127 def __init__(self, control, args, resultdir, label, user, machines,
mbligh6437ff52008-04-17 15:24:38 +0000128 client=False, parse_job=""):
mblighf1c52842007-10-16 15:21:38 +0000129 """
130 control
131 The control file (pathname of)
132 args
133 args to pass to the control file
134 resultdir
135 where to throw the results
mbligh18420c22007-10-16 22:27:14 +0000136 label
137 label for the job
mblighf1c52842007-10-16 15:21:38 +0000138 user
139 Username for the job (email address)
140 client
141 True if a client-side control file
142 """
mblighf5427bb2008-04-09 15:55:57 +0000143 path = os.path.dirname(__file__)
mblighf1c52842007-10-16 15:21:38 +0000144 self.autodir = os.path.abspath(os.path.join(path, '..'))
145 self.serverdir = os.path.join(self.autodir, 'server')
mbligh05269362007-10-16 16:58:11 +0000146 self.testdir = os.path.join(self.serverdir, 'tests')
147 self.tmpdir = os.path.join(self.serverdir, 'tmp')
mblighf1c52842007-10-16 15:21:38 +0000148 self.conmuxdir = os.path.join(self.autodir, 'conmux')
149 self.clientdir = os.path.join(self.autodir, 'client')
mblighe25fd5b2008-01-22 17:23:37 +0000150 if control:
151 self.control = open(control, 'r').read()
152 self.control = re.sub('\r', '', self.control)
153 else:
154 self.control = None
mblighf1c52842007-10-16 15:21:38 +0000155 self.resultdir = resultdir
156 if not os.path.exists(resultdir):
157 os.mkdir(resultdir)
mbligh3ccb8592007-11-05 18:13:40 +0000158 self.debugdir = os.path.join(resultdir, 'debug')
159 if not os.path.exists(self.debugdir):
160 os.mkdir(self.debugdir)
mbligh3dcf2c92007-10-16 22:24:00 +0000161 self.status = os.path.join(resultdir, 'status')
mbligh18420c22007-10-16 22:27:14 +0000162 self.label = label
mblighf1c52842007-10-16 15:21:38 +0000163 self.user = user
164 self.args = args
mblighe8b37a92007-12-19 15:54:11 +0000165 self.machines = machines
mblighf1c52842007-10-16 15:21:38 +0000166 self.client = client
167 self.record_prefix = ''
mblighf4e04152008-02-21 16:05:53 +0000168 self.warning_loggers = set()
mblighf1c52842007-10-16 15:21:38 +0000169
mbligh3f4bced2007-11-05 17:55:53 +0000170 self.stdout = fd_stack.fd_stack(1, sys.stdout)
171 self.stderr = fd_stack.fd_stack(2, sys.stderr)
172
mbligh3dcf2c92007-10-16 22:24:00 +0000173 if os.path.exists(self.status):
174 os.unlink(self.status)
jadmanski6e8bf752008-05-14 00:17:48 +0000175 job_data = {'label' : label, 'user' : user,
176 'hostname' : ','.join(machines),
177 'status_version' : str(self.STATUS_VERSION)}
mbligh970b94e2008-01-24 16:29:34 +0000178 job_data.update(get_site_job_data(self))
mblighf1c52842007-10-16 15:21:38 +0000179 write_keyval(self.resultdir, job_data)
180
mbligh6437ff52008-04-17 15:24:38 +0000181 self.parse_job = parse_job
182 if self.parse_job and len(machines) == 1:
183 self.using_parser = True
184 self.init_parser(resultdir)
185 else:
186 self.using_parser = False
187
188
189 def init_parser(self, resultdir):
190 """Start the continuous parsing of resultdir. This sets up
191 the database connection and inserts the basic job object into
192 the database if necessary."""
193 # redirect parser debugging to .parse.log
194 parse_log = os.path.join(resultdir, '.parse.log')
195 parse_log = open(parse_log, 'w', 0)
196 tko_utils.redirect_parser_debugging(parse_log)
197 # create a job model object and set up the db
198 self.results_db = tko_db.db(autocommit=True)
jadmanski6e8bf752008-05-14 00:17:48 +0000199 self.parser = status_lib.parser(self.STATUS_VERSION)
mbligh6437ff52008-04-17 15:24:38 +0000200 self.job_model = self.parser.make_job(resultdir)
201 self.parser.start(self.job_model)
202 # check if a job already exists in the db and insert it if
203 # it does not
204 job_idx = self.results_db.find_job(self.parse_job)
205 if job_idx is None:
206 self.results_db.insert_job(self.parse_job,
207 self.job_model)
208 else:
209 machine_idx = self.results_db.lookup_machine(
210 self.job_model.machine)
211 self.job_model.index = job_idx
212 self.job_model.machine_idx = machine_idx
213
214
mblighfe0af112008-04-17 15:27:47 +0000215 def cleanup_parser(self):
mbligh6437ff52008-04-17 15:24:38 +0000216 """This should be called after the server job is finished
217 to carry out any remaining cleanup (e.g. flushing any
218 remaining test results to the results db)"""
219 if not self.using_parser:
220 return
221 final_tests = self.parser.end()
222 for test in final_tests:
223 self.results_db.insert_test(self.job_model, test)
mblighfe0af112008-04-17 15:27:47 +0000224 self.using_parser = False
mbligh6437ff52008-04-17 15:24:38 +0000225
mblighf1c52842007-10-16 15:21:38 +0000226
mblighe25fd5b2008-01-22 17:23:37 +0000227 def verify(self):
228 if not self.machines:
mbligh6437ff52008-04-17 15:24:38 +0000229 raise error.AutoservError(
230 'No machines specified to verify')
mblighe25fd5b2008-01-22 17:23:37 +0000231 try:
232 namespace = {'machines' : self.machines, 'job' : self}
233 exec(preamble + verify, namespace, namespace)
234 except Exception, e:
mbligh302482e2008-05-01 20:06:16 +0000235 msg = ('Verify failed\n' + str(e) + '\n'
236 + traceback.format_exc())
mblighe25fd5b2008-01-22 17:23:37 +0000237 self.record('ABORT', None, None, msg)
238 raise
239
240
241 def repair(self):
242 if not self.machines:
mbligh6437ff52008-04-17 15:24:38 +0000243 raise error.AutoservError(
244 'No machines specified to repair')
mblighe25fd5b2008-01-22 17:23:37 +0000245 namespace = {'machines' : self.machines, 'job' : self}
mbligh16c722d2008-03-05 00:58:44 +0000246 # no matter what happens during repair, go on to try to reverify
247 try:
248 exec(preamble + repair, namespace, namespace)
249 except Exception, exc:
250 print 'Exception occured during repair'
251 traceback.print_exc()
mbligh8141f862008-01-25 17:20:40 +0000252 self.verify()
mblighe25fd5b2008-01-22 17:23:37 +0000253
254
mblighcaa62c22008-04-07 21:51:17 +0000255 def enable_external_logging(self):
256 """Start or restart external logging mechanism.
257 """
258 pass
259
260
261 def disable_external_logging(self):
262 """ Pause or stop external logging mechanism.
263 """
264 pass
265
266
267 def use_external_logging(self):
268 """Return True if external logging should be used.
269 """
270 return False
271
272
mbligh6437ff52008-04-17 15:24:38 +0000273 def parallel_simple(self, function, machines, log=True, timeout=None):
274 """Run 'function' using parallel_simple, with an extra
275 wrapper to handle the necessary setup for continuous parsing,
276 if possible. If continuous parsing is already properly
277 initialized then this should just work."""
278 is_forking = not (len(machines) == 1 and
279 self.machines == machines)
280 if self.parse_job and is_forking:
281 def wrapper(machine):
282 self.parse_job += "/" + machine
283 self.using_parser = True
284 self.machines = [machine]
285 self.resultdir = os.path.join(self.resultdir,
286 machine)
287 self.init_parser(self.resultdir)
288 result = function(machine)
mblighfe0af112008-04-17 15:27:47 +0000289 self.cleanup_parser()
mbligh6437ff52008-04-17 15:24:38 +0000290 return result
291 else:
292 wrapper = function
293 subcommand.parallel_simple(wrapper, machines, log, timeout)
294
295
mblighe8b37a92007-12-19 15:54:11 +0000296 def run(self, reboot = False, install_before = False,
mblighddd54332008-03-07 18:14:06 +0000297 install_after = False, collect_crashdumps = True,
298 namespace = {}):
mbligh60dbd502007-10-26 14:59:31 +0000299 # use a copy so changes don't affect the original dictionary
300 namespace = namespace.copy()
mblighe8b37a92007-12-19 15:54:11 +0000301 machines = self.machines
mbligh60dbd502007-10-26 14:59:31 +0000302
mblighfaf0cd42007-11-19 16:00:24 +0000303 self.aborted = False
mblighf1c52842007-10-16 15:21:38 +0000304 namespace['machines'] = machines
305 namespace['args'] = self.args
306 namespace['job'] = self
mbligh6e294382007-11-05 18:11:29 +0000307 test_start_time = int(time.time())
mblighf1c52842007-10-16 15:21:38 +0000308
mbligh87c5d882007-10-29 17:07:24 +0000309 os.chdir(self.resultdir)
mblighcaa62c22008-04-07 21:51:17 +0000310
311 self.enable_external_logging()
mbligh87c5d882007-10-29 17:07:24 +0000312 status_log = os.path.join(self.resultdir, 'status.log')
mblighf1c52842007-10-16 15:21:38 +0000313 try:
mblighf36243d2007-10-30 15:36:16 +0000314 if install_before and machines:
315 exec(preamble + install, namespace, namespace)
mblighf1c52842007-10-16 15:21:38 +0000316 if self.client:
317 namespace['control'] = self.control
318 open('control', 'w').write(self.control)
319 open('control.srv', 'w').write(client_wrapper)
320 server_control = client_wrapper
321 else:
322 open('control.srv', 'w').write(self.control)
323 server_control = self.control
mblighf1c52842007-10-16 15:21:38 +0000324 exec(preamble + server_control, namespace, namespace)
325
326 finally:
mblighddd54332008-03-07 18:14:06 +0000327 if machines and collect_crashdumps:
mbligh6e294382007-11-05 18:11:29 +0000328 namespace['test_start_time'] = test_start_time
mbligh98ff1462007-12-19 16:27:55 +0000329 exec(preamble + crashdumps,
330 namespace, namespace)
mblighcaa62c22008-04-07 21:51:17 +0000331 self.disable_external_logging()
mblighf1c52842007-10-16 15:21:38 +0000332 if reboot and machines:
mbligh98ff1462007-12-19 16:27:55 +0000333 exec(preamble + reboot_segment,
334 namespace, namespace)
mblighf36243d2007-10-30 15:36:16 +0000335 if install_after and machines:
336 exec(preamble + install, namespace, namespace)
mblighf1c52842007-10-16 15:21:38 +0000337
338
339 def run_test(self, url, *args, **dargs):
340 """Summon a test object and run it.
341
342 tag
343 tag to add to testname
344 url
345 url of the test to run
346 """
347
mblighf1c52842007-10-16 15:21:38 +0000348 (group, testname) = test.testname(url)
349 tag = None
350 subdir = testname
mbligh43ac5222007-10-16 15:55:01 +0000351
mblighf1c52842007-10-16 15:21:38 +0000352 if dargs.has_key('tag'):
353 tag = dargs['tag']
354 del dargs['tag']
355 if tag:
356 subdir += '.' + tag
mblighf1c52842007-10-16 15:21:38 +0000357
mbligh43ac5222007-10-16 15:55:01 +0000358 try:
359 test.runtest(self, url, tag, args, dargs)
360 self.record('GOOD', subdir, testname, 'completed successfully')
mbligh302482e2008-05-01 20:06:16 +0000361 except error.TestNAError, detail:
362 self.record('TEST_NA', subdir, testmame, str(detail))
mbligh43ac5222007-10-16 15:55:01 +0000363 except Exception, detail:
mbligh302482e2008-05-01 20:06:16 +0000364 info = str(detail) + "\n" + traceback.format_exc()
365 self.record('FAIL', subdir, testname, info)
mblighf1c52842007-10-16 15:21:38 +0000366
367
368 def run_group(self, function, *args, **dargs):
369 """\
370 function:
371 subroutine to run
372 *args:
373 arguments for the function
374 """
375
376 result = None
377 name = function.__name__
378
379 # Allow the tag for the group to be specified.
380 if dargs.has_key('tag'):
381 tag = dargs['tag']
382 del dargs['tag']
383 if tag:
384 name = tag
385
386 # if tag:
387 # name += '.' + tag
388 old_record_prefix = self.record_prefix
389 try:
390 try:
391 self.record('START', None, name)
392 self.record_prefix += '\t'
393 result = function(*args, **dargs)
394 self.record_prefix = old_record_prefix
395 self.record('END GOOD', None, name)
396 except:
397 self.record_prefix = old_record_prefix
mbligh302482e2008-05-01 20:06:16 +0000398 self.record('END FAIL', None, name,
399 traceback.format_exc())
400
mblighf1c52842007-10-16 15:21:38 +0000401 # We don't want to raise up an error higher if it's just
402 # a TestError - we want to carry on to other tests. Hence
403 # this outer try/except block.
mbligh6437ff52008-04-17 15:24:38 +0000404 except error.TestError:
mblighf1c52842007-10-16 15:21:38 +0000405 pass
406 except:
mbligh6437ff52008-04-17 15:24:38 +0000407 raise error.TestError(name + ' failed\n' +
mbligh0a1727a2008-04-21 18:10:07 +0000408 traceback.format_exc())
mblighf1c52842007-10-16 15:21:38 +0000409
410 return result
411
412
mblighf4e04152008-02-21 16:05:53 +0000413 def record(self, status_code, subdir, operation, status=''):
mblighf1c52842007-10-16 15:21:38 +0000414 """
415 Record job-level status
416
417 The intent is to make this file both machine parseable and
418 human readable. That involves a little more complexity, but
419 really isn't all that bad ;-)
420
421 Format is <status code>\t<subdir>\t<operation>\t<status>
422
mbligh302482e2008-05-01 20:06:16 +0000423 status code: see common_lib.logging.is_valid_status()
424 for valid status definition
mblighf1c52842007-10-16 15:21:38 +0000425
426 subdir: MUST be a relevant subdirectory in the results,
427 or None, which will be represented as '----'
428
429 operation: description of what you ran (e.g. "dbench", or
430 "mkfs -t foobar /dev/sda9")
431
432 status: error message or "completed sucessfully"
433
434 ------------------------------------------------------------
435
436 Initial tabs indicate indent levels for grouping, and is
437 governed by self.record_prefix
438
439 multiline messages have secondary lines prefaced by a double
440 space (' ')
mblighf4e04152008-02-21 16:05:53 +0000441
442 Executing this method will trigger the logging of all new
443 warnings to date from the various console loggers.
444 """
mblighdab39662008-02-27 16:47:55 +0000445 # poll all our warning loggers for new warnings
446 warnings = self._read_warnings()
447 for timestamp, msg in warnings:
448 self.__record("WARN", None, None, msg, timestamp)
449
450 # write out the actual status log line
451 self.__record(status_code, subdir, operation, status)
452
453
454 def _read_warnings(self):
mblighf4e04152008-02-21 16:05:53 +0000455 warnings = []
456 while True:
457 # pull in a line of output from every logger that has
458 # output ready to be read
459 loggers, _, _ = select.select(self.warning_loggers,
460 [], [], 0)
461 closed_loggers = set()
462 for logger in loggers:
463 line = logger.readline()
464 # record any broken pipes (aka line == empty)
465 if len(line) == 0:
466 closed_loggers.add(logger)
467 continue
468 timestamp, msg = line.split('\t', 1)
469 warnings.append((int(timestamp), msg.strip()))
470
471 # stop listening to loggers that are closed
472 self.warning_loggers -= closed_loggers
473
474 # stop if none of the loggers have any output left
475 if not loggers:
476 break
477
mblighdab39662008-02-27 16:47:55 +0000478 # sort into timestamp order
479 warnings.sort()
480 return warnings
mblighf4e04152008-02-21 16:05:53 +0000481
482
mblighdab39662008-02-27 16:47:55 +0000483 def _render_record(self, status_code, subdir, operation, status='',
484 epoch_time=None, record_prefix=None):
mblighf4e04152008-02-21 16:05:53 +0000485 """
mblighdab39662008-02-27 16:47:55 +0000486 Internal Function to generate a record to be written into a
487 status log. For use by server_job.* classes only.
mblighf1c52842007-10-16 15:21:38 +0000488 """
mblighf1c52842007-10-16 15:21:38 +0000489 if subdir:
490 if re.match(r'[\n\t]', subdir):
mbligh6437ff52008-04-17 15:24:38 +0000491 raise ValueError(
492 'Invalid character in subdir string')
mblighf1c52842007-10-16 15:21:38 +0000493 substr = subdir
494 else:
495 substr = '----'
mbligh6437ff52008-04-17 15:24:38 +0000496
mbligh302482e2008-05-01 20:06:16 +0000497 if not logging.is_valid_status(status_code):
mbligh6437ff52008-04-17 15:24:38 +0000498 raise ValueError('Invalid status code supplied: %s' %
499 status_code)
mblighe25fd5b2008-01-22 17:23:37 +0000500 if not operation:
501 operation = '----'
mblighf1c52842007-10-16 15:21:38 +0000502 if re.match(r'[\n\t]', operation):
mbligh6437ff52008-04-17 15:24:38 +0000503 raise ValueError(
504 'Invalid character in operation string')
mblighf1c52842007-10-16 15:21:38 +0000505 operation = operation.rstrip()
506 status = status.rstrip()
507 status = re.sub(r"\t", " ", status)
508 # Ensure any continuation lines are marked so we can
509 # detect them in the status file to ensure it is parsable.
510 status = re.sub(r"\n", "\n" + self.record_prefix + " ", status)
511
mbligh30270302007-11-05 20:33:52 +0000512 # Generate timestamps for inclusion in the logs
mblighf4e04152008-02-21 16:05:53 +0000513 if epoch_time is None:
514 epoch_time = int(time.time())
mbligh30270302007-11-05 20:33:52 +0000515 local_time = time.localtime(epoch_time)
516 epoch_time_str = "timestamp=%d" % (epoch_time,)
517 local_time_str = time.strftime("localtime=%b %d %H:%M:%S",
518 local_time)
519
mblighdab39662008-02-27 16:47:55 +0000520 if record_prefix is None:
521 record_prefix = self.record_prefix
522
mbligh30270302007-11-05 20:33:52 +0000523 msg = '\t'.join(str(x) for x in (status_code, substr, operation,
524 epoch_time_str, local_time_str,
525 status))
mblighdab39662008-02-27 16:47:55 +0000526 return record_prefix + msg + '\n'
527
528
529 def _record_prerendered(self, msg):
530 """
531 Record a pre-rendered msg into the status logs. The only
532 change this makes to the message is to add on the local
533 indentation. Should not be called outside of server_job.*
534 classes. Unlike __record, this does not write the message
535 to standard output.
536 """
mbligh6437ff52008-04-17 15:24:38 +0000537 lines = []
mblighdab39662008-02-27 16:47:55 +0000538 status_file = os.path.join(self.resultdir, 'status.log')
539 status_log = open(status_file, 'a')
540 for line in msg.splitlines():
541 line = self.record_prefix + line + '\n'
mbligh6437ff52008-04-17 15:24:38 +0000542 lines.append(line)
mblighdab39662008-02-27 16:47:55 +0000543 status_log.write(line)
544 status_log.close()
mbligh6437ff52008-04-17 15:24:38 +0000545 self.__parse_status(lines)
mblighdab39662008-02-27 16:47:55 +0000546
547
548 def __record(self, status_code, subdir, operation, status='',
549 epoch_time=None):
550 """
551 Actual function for recording a single line into the status
552 logs. Should never be called directly, only by job.record as
553 this would bypass the console monitor logging.
554 """
555
556 msg = self._render_record(status_code, subdir, operation,
557 status, epoch_time)
558
mblighf1c52842007-10-16 15:21:38 +0000559
mbligh31a49de2007-11-05 18:41:19 +0000560 status_file = os.path.join(self.resultdir, 'status.log')
mblighdab39662008-02-27 16:47:55 +0000561 sys.stdout.write(msg)
562 open(status_file, "a").write(msg)
mblighf1c52842007-10-16 15:21:38 +0000563 if subdir:
mblighd56eb592008-01-22 16:36:34 +0000564 test_dir = os.path.join(self.resultdir, subdir)
565 if not os.path.exists(test_dir):
566 os.mkdir(test_dir)
567 status_file = os.path.join(test_dir, 'status')
mblighdab39662008-02-27 16:47:55 +0000568 open(status_file, "a").write(msg)
mbligh6437ff52008-04-17 15:24:38 +0000569 self.__parse_status([msg])
mblighb03ba642008-03-13 17:37:17 +0000570
571
mbligh6437ff52008-04-17 15:24:38 +0000572 def __parse_status(self, new_lines):
573 if not self.using_parser:
574 return
575 new_tests = self.parser.process_lines(new_lines)
576 for test in new_tests:
577 self.results_db.insert_test(self.job_model, test)
mblighdab39662008-02-27 16:47:55 +0000578
579
580# a file-like object for catching stderr from an autotest client and
581# extracting status logs from it
582class client_logger(object):
583 """Partial file object to write to both stdout and
584 the status log file. We only implement those methods
585 utils.run() actually calls.
586 """
587 parser = re.compile(r"^AUTOTEST_STATUS:([^:]*):(.*)$")
588 extract_indent = re.compile(r"^(\t*).*$")
589
590 def __init__(self, job):
591 self.job = job
592 self.leftover = ""
593 self.last_line = ""
594 self.logs = {}
595
596
597 def _process_log_dict(self, log_dict):
598 log_list = log_dict.pop("logs", [])
599 for key in sorted(log_dict.iterkeys()):
600 log_list += self._process_log_dict(log_dict.pop(key))
601 return log_list
602
603
604 def _process_logs(self):
605 """Go through the accumulated logs in self.log and print them
606 out to stdout and the status log. Note that this processes
607 logs in an ordering where:
608
609 1) logs to different tags are never interleaved
610 2) logs to x.y come before logs to x.y.z for all z
611 3) logs to x.y come before x.z whenever y < z
612
613 Note that this will in general not be the same as the
614 chronological ordering of the logs. However, if a chronological
615 ordering is desired that one can be reconstructed from the
616 status log by looking at timestamp lines."""
617 log_list = self._process_log_dict(self.logs)
618 for line in log_list:
619 self.job._record_prerendered(line + '\n')
620 if log_list:
621 self.last_line = log_list[-1]
622
623
624 def _process_quoted_line(self, tag, line):
625 """Process a line quoted with an AUTOTEST_STATUS flag. If the
626 tag is blank then we want to push out all the data we've been
627 building up in self.logs, and then the newest line. If the
628 tag is not blank, then push the line into the logs for handling
629 later."""
630 print line
631 if tag == "":
632 self._process_logs()
633 self.job._record_prerendered(line + '\n')
634 self.last_line = line
635 else:
636 tag_parts = [int(x) for x in tag.split(".")]
637 log_dict = self.logs
638 for part in tag_parts:
639 log_dict = log_dict.setdefault(part, {})
640 log_list = log_dict.setdefault("logs", [])
641 log_list.append(line)
642
643
644 def _process_line(self, line):
645 """Write out a line of data to the appropriate stream. Status
646 lines sent by autotest will be prepended with
647 "AUTOTEST_STATUS", and all other lines are ssh error
648 messages."""
649 match = self.parser.search(line)
650 if match:
651 tag, line = match.groups()
652 self._process_quoted_line(tag, line)
653 else:
mblighfe749d22008-03-07 18:14:46 +0000654 print line
mblighdab39662008-02-27 16:47:55 +0000655
656
657 def _format_warnings(self, last_line, warnings):
mbligh71d340d2008-03-05 15:51:16 +0000658 # use the indentation of whatever the last log line was
mblighdab39662008-02-27 16:47:55 +0000659 indent = self.extract_indent.match(last_line).group(1)
mbligh71d340d2008-03-05 15:51:16 +0000660 # if the last line starts a new group, add an extra indent
661 if last_line.lstrip('\t').startswith("START\t"):
662 indent += '\t'
mblighdab39662008-02-27 16:47:55 +0000663 return [self.job._render_record("WARN", None, None, msg,
664 timestamp, indent).rstrip('\n')
665 for timestamp, msg in warnings]
666
667
668 def _process_warnings(self, last_line, log_dict, warnings):
669 if log_dict.keys() in ([], ["logs"]):
670 # there are no sub-jobs, just append the warnings here
671 warnings = self._format_warnings(last_line, warnings)
672 log_list = log_dict.setdefault("logs", [])
673 log_list += warnings
674 for warning in warnings:
675 sys.stdout.write(warning + '\n')
676 else:
677 # there are sub-jobs, so put the warnings in there
678 log_list = log_dict.get("logs", [])
679 if log_list:
680 last_line = log_list[-1]
681 for key in sorted(log_dict.iterkeys()):
682 if key != "logs":
683 self._process_warnings(last_line,
684 log_dict[key],
685 warnings)
686
687
688 def write(self, data):
689 # first check for any new console warnings
690 warnings = self.job._read_warnings()
691 self._process_warnings(self.last_line, self.logs, warnings)
692 # now process the newest data written out
693 data = self.leftover + data
694 lines = data.split("\n")
695 # process every line but the last one
696 for line in lines[:-1]:
697 self._process_line(line)
698 # save the last line for later processing
699 # since we may not have the whole line yet
700 self.leftover = lines[-1]
701
702
703 def flush(self):
704 sys.stdout.flush()
mblighdab39662008-02-27 16:47:55 +0000705
706
707 def close(self):
708 if self.leftover:
709 self._process_line(self.leftover)
710 self._process_logs()
711 self.flush()
mblighcaa62c22008-04-07 21:51:17 +0000712
713# site_server_job.py may be non-existant or empty, make sure that an
714# appropriate site_server_job class is created nevertheless
715try:
mblighccb9e182008-04-17 15:42:10 +0000716 from autotest_lib.server.site_server_job import site_server_job
mblighcaa62c22008-04-07 21:51:17 +0000717except ImportError:
718 class site_server_job(base_server_job):
719 pass
720
721class server_job(site_server_job):
722 pass