blob: 2ff3102de37958a1da65ccb466f56eecdcee333a [file] [log] [blame]
Richard Barnette9aec6932016-06-03 13:31:46 -07001# pylint: disable=missing-docstring
2
jadmanski4afc3672010-04-30 21:22:54 +00003import cPickle as pickle
Aviv Keshetf0c82242017-05-18 22:06:40 -07004import copy
5import errno
6import fcntl
7import logging
8import os
9import re
Fang Dengd9a056f2013-10-29 11:31:27 -070010import tempfile
Aviv Keshetf0c82242017-05-18 22:06:40 -070011import time
12import traceback
13import weakref
jadmanski4afc3672010-04-30 21:22:54 +000014from autotest_lib.client.common_lib import autotemp, error, log
jadmanskida2f1432009-11-06 15:20:09 +000015
16
17class job_directory(object):
18 """Represents a job.*dir directory."""
19
20
mblighfc3da5b2010-01-06 18:37:22 +000021 class JobDirectoryException(error.AutotestError):
22 """Generic job_directory exception superclass."""
23
24
25 class MissingDirectoryException(JobDirectoryException):
jadmanskida2f1432009-11-06 15:20:09 +000026 """Raised when a directory required by the job does not exist."""
27 def __init__(self, path):
28 Exception.__init__(self, 'Directory %s does not exist' % path)
29
30
mblighfc3da5b2010-01-06 18:37:22 +000031 class UncreatableDirectoryException(JobDirectoryException):
jadmanskida2f1432009-11-06 15:20:09 +000032 """Raised when a directory required by the job is missing and cannot
33 be created."""
34 def __init__(self, path, error):
35 msg = 'Creation of directory %s failed with exception %s'
36 msg %= (path, error)
37 Exception.__init__(self, msg)
38
39
mblighfc3da5b2010-01-06 18:37:22 +000040 class UnwritableDirectoryException(JobDirectoryException):
jadmanskida2f1432009-11-06 15:20:09 +000041 """Raised when a writable directory required by the job exists
42 but is not writable."""
43 def __init__(self, path):
44 msg = 'Directory %s exists but is not writable' % path
45 Exception.__init__(self, msg)
46
47
48 def __init__(self, path, is_writable=False):
49 """
50 Instantiate a job directory.
51
jadmanski4afc3672010-04-30 21:22:54 +000052 @param path: The path of the directory. If None a temporary directory
jadmanskida2f1432009-11-06 15:20:09 +000053 will be created instead.
jadmanski4afc3672010-04-30 21:22:54 +000054 @param is_writable: If True, expect the directory to be writable.
jadmanskida2f1432009-11-06 15:20:09 +000055
jadmanski4afc3672010-04-30 21:22:54 +000056 @raise MissingDirectoryException: raised if is_writable=False and the
jadmanskida2f1432009-11-06 15:20:09 +000057 directory does not exist.
jadmanski4afc3672010-04-30 21:22:54 +000058 @raise UnwritableDirectoryException: raised if is_writable=True and
jadmanskida2f1432009-11-06 15:20:09 +000059 the directory exists but is not writable.
jadmanski4afc3672010-04-30 21:22:54 +000060 @raise UncreatableDirectoryException: raised if is_writable=True, the
jadmanskida2f1432009-11-06 15:20:09 +000061 directory does not exist and it cannot be created.
62 """
63 if path is None:
64 if is_writable:
65 self._tempdir = autotemp.tempdir(unique_id='autotest')
66 self.path = self._tempdir.name
67 else:
68 raise self.MissingDirectoryException(path)
69 else:
70 self._tempdir = None
71 self.path = path
72 self._ensure_valid(is_writable)
73
74
75 def _ensure_valid(self, is_writable):
76 """
77 Ensure that this is a valid directory.
78
79 Will check if a directory exists, can optionally also enforce that
80 it be writable. It can optionally create it if necessary. Creation
81 will still fail if the path is rooted in a non-writable directory, or
82 if a file already exists at the given location.
83
84 @param dir_path A path where a directory should be located
85 @param is_writable A boolean indicating that the directory should
86 not only exist, but also be writable.
87
88 @raises MissingDirectoryException raised if is_writable=False and the
89 directory does not exist.
90 @raises UnwritableDirectoryException raised if is_writable=True and
91 the directory is not wrtiable.
92 @raises UncreatableDirectoryException raised if is_writable=True, the
93 directory does not exist and it cannot be created
94 """
mbligh8054b0d2009-11-25 17:38:19 +000095 # ensure the directory exists
96 if is_writable:
97 try:
98 os.makedirs(self.path)
99 except OSError, e:
mblighfbf73ae2009-12-19 05:22:42 +0000100 if e.errno != errno.EEXIST or not os.path.isdir(self.path):
jadmanskida2f1432009-11-06 15:20:09 +0000101 raise self.UncreatableDirectoryException(self.path, e)
mbligh8054b0d2009-11-25 17:38:19 +0000102 elif not os.path.isdir(self.path):
103 raise self.MissingDirectoryException(self.path)
jadmanskida2f1432009-11-06 15:20:09 +0000104
105 # if is_writable=True, also check that the directory is writable
106 if is_writable and not os.access(self.path, os.W_OK):
107 raise self.UnwritableDirectoryException(self.path)
108
109
110 @staticmethod
111 def property_factory(attribute):
112 """
113 Create a job.*dir -> job._*dir.path property accessor.
114
115 @param attribute A string with the name of the attribute this is
116 exposed as. '_'+attribute must then be attribute that holds
117 either None or a job_directory-like object.
118
119 @returns A read-only property object that exposes a job_directory path
120 """
121 @property
122 def dir_property(self):
123 underlying_attribute = getattr(self, '_' + attribute)
124 if underlying_attribute is None:
125 return None
126 else:
127 return underlying_attribute.path
128 return dir_property
129
130
mbligha2c99492010-01-27 22:59:50 +0000131# decorator for use with job_state methods
132def with_backing_lock(method):
133 """A decorator to perform a lock-*-unlock cycle.
134
135 When applied to a method, this decorator will automatically wrap
136 calls to the method in a backing file lock and before the call
137 followed by a backing file unlock.
138 """
139 def wrapped_method(self, *args, **dargs):
140 already_have_lock = self._backing_file_lock is not None
141 if not already_have_lock:
142 self._lock_backing_file()
143 try:
144 return method(self, *args, **dargs)
145 finally:
146 if not already_have_lock:
147 self._unlock_backing_file()
148 wrapped_method.__name__ = method.__name__
149 wrapped_method.__doc__ = method.__doc__
150 return wrapped_method
151
152
153# decorator for use with job_state methods
154def with_backing_file(method):
155 """A decorator to perform a lock-read-*-write-unlock cycle.
156
157 When applied to a method, this decorator will automatically wrap
158 calls to the method in a lock-and-read before the call followed by a
159 write-and-unlock. Any operation that is reading or writing state
160 should be decorated with this method to ensure that backing file
161 state is consistently maintained.
162 """
163 @with_backing_lock
164 def wrapped_method(self, *args, **dargs):
165 self._read_from_backing_file()
166 try:
167 return method(self, *args, **dargs)
168 finally:
169 self._write_to_backing_file()
170 wrapped_method.__name__ = method.__name__
171 wrapped_method.__doc__ = method.__doc__
172 return wrapped_method
173
174
175
mblighfbf73ae2009-12-19 05:22:42 +0000176class job_state(object):
177 """A class for managing explicit job and user state, optionally persistent.
178
179 The class allows you to save state by name (like a dictionary). Any state
180 stored in this class should be picklable and deep copyable. While this is
181 not enforced it is recommended that only valid python identifiers be used
182 as names. Additionally, the namespace 'stateful_property' is used for
183 storing the valued associated with properties constructed using the
184 property_factory method.
185 """
186
187 NO_DEFAULT = object()
188 PICKLE_PROTOCOL = 2 # highest protocol available in python 2.4
189
190
191 def __init__(self):
192 """Initialize the job state."""
193 self._state = {}
194 self._backing_file = None
jadmanskifa2e8892010-01-26 20:26:05 +0000195 self._backing_file_initialized = False
196 self._backing_file_lock = None
mblighfbf73ae2009-12-19 05:22:42 +0000197
198
jadmanskifa2e8892010-01-26 20:26:05 +0000199 def _lock_backing_file(self):
200 """Acquire a lock on the backing file."""
201 if self._backing_file:
202 self._backing_file_lock = open(self._backing_file, 'a')
jadmanskia087eae2010-01-29 20:57:57 +0000203 fcntl.flock(self._backing_file_lock, fcntl.LOCK_EX)
jadmanskifa2e8892010-01-26 20:26:05 +0000204
205
206 def _unlock_backing_file(self):
207 """Release a lock on the backing file."""
208 if self._backing_file_lock:
jadmanskia087eae2010-01-29 20:57:57 +0000209 fcntl.flock(self._backing_file_lock, fcntl.LOCK_UN)
jadmanskifa2e8892010-01-26 20:26:05 +0000210 self._backing_file_lock.close()
211 self._backing_file_lock = None
212
213
214 def read_from_file(self, file_path, merge=True):
215 """Read in any state from the file at file_path.
216
217 When merge=True, any state specified only in-memory will be preserved.
218 Any state specified on-disk will be set in-memory, even if an in-memory
219 setting already exists.
220
jadmanski4afc3672010-04-30 21:22:54 +0000221 @param file_path: The path where the state should be read from. It must
jadmanskifa2e8892010-01-26 20:26:05 +0000222 exist but it can be empty.
jadmanski4afc3672010-04-30 21:22:54 +0000223 @param merge: If true, merge the on-disk state with the in-memory
jadmanskifa2e8892010-01-26 20:26:05 +0000224 state. If false, replace the in-memory state with the on-disk
225 state.
226
jadmanski4afc3672010-04-30 21:22:54 +0000227 @warning: This method is intentionally concurrency-unsafe. It makes no
jadmanskifa2e8892010-01-26 20:26:05 +0000228 attempt to control concurrent access to the file at file_path.
229 """
230
231 # we can assume that the file exists
232 if os.path.getsize(file_path) == 0:
233 on_disk_state = {}
234 else:
235 on_disk_state = pickle.load(open(file_path))
236
237 if merge:
238 # merge the on-disk state with the in-memory state
239 for namespace, namespace_dict in on_disk_state.iteritems():
240 in_memory_namespace = self._state.setdefault(namespace, {})
241 for name, value in namespace_dict.iteritems():
242 if name in in_memory_namespace:
243 if in_memory_namespace[name] != value:
244 logging.info('Persistent value of %s.%s from %s '
245 'overridding existing in-memory '
246 'value', namespace, name, file_path)
247 in_memory_namespace[name] = value
248 else:
249 logging.debug('Value of %s.%s is unchanged, '
250 'skipping import', namespace, name)
251 else:
252 logging.debug('Importing %s.%s from state file %s',
253 namespace, name, file_path)
254 in_memory_namespace[name] = value
255 else:
256 # just replace the in-memory state with the on-disk state
257 self._state = on_disk_state
jadmanskifa2e8892010-01-26 20:26:05 +0000258
mbligha2c99492010-01-27 22:59:50 +0000259 # lock the backing file before we refresh it
260 with_backing_lock(self.__class__._write_to_backing_file)(self)
jadmanskifa2e8892010-01-26 20:26:05 +0000261
262
263 def write_to_file(self, file_path):
264 """Write out the current state to the given path.
265
jadmanski4afc3672010-04-30 21:22:54 +0000266 @param file_path: The path where the state should be written out to.
jadmanskifa2e8892010-01-26 20:26:05 +0000267 Must be writable.
268
jadmanski4afc3672010-04-30 21:22:54 +0000269 @warning: This method is intentionally concurrency-unsafe. It makes no
mbligha2c99492010-01-27 22:59:50 +0000270 attempt to control concurrent access to the file at file_path.
jadmanskifa2e8892010-01-26 20:26:05 +0000271 """
272 outfile = open(file_path, 'w')
273 try:
274 pickle.dump(self._state, outfile, self.PICKLE_PROTOCOL)
275 finally:
276 outfile.close()
jadmanskifa2e8892010-01-26 20:26:05 +0000277
278
279 def _read_from_backing_file(self):
280 """Refresh the current state from the backing file.
281
282 If the backing file has never been read before (indicated by checking
283 self._backing_file_initialized) it will merge the file with the
284 in-memory state, rather than overwriting it.
285 """
286 if self._backing_file:
287 merge_backing_file = not self._backing_file_initialized
288 self.read_from_file(self._backing_file, merge=merge_backing_file)
289 self._backing_file_initialized = True
290
291
292 def _write_to_backing_file(self):
293 """Flush the current state to the backing file."""
294 if self._backing_file:
295 self.write_to_file(self._backing_file)
296
297
mbligha2c99492010-01-27 22:59:50 +0000298 @with_backing_file
jadmanskifa2e8892010-01-26 20:26:05 +0000299 def _synchronize_backing_file(self):
300 """Synchronizes the contents of the in-memory and on-disk state."""
301 # state is implicitly synchronized in _with_backing_file methods
302 pass
303
304
305 def set_backing_file(self, file_path):
306 """Change the path used as the backing file for the persistent state.
307
308 When a new backing file is specified if a file already exists then
309 its contents will be added into the current state, with conflicts
310 between the file and memory being resolved in favor of the file
311 contents. The file will then be kept in sync with the (combined)
312 in-memory state. The syncing can be disabled by setting this to None.
313
jadmanski4afc3672010-04-30 21:22:54 +0000314 @param file_path: A path on the filesystem that can be read from and
jadmanskifa2e8892010-01-26 20:26:05 +0000315 written to, or None to turn off the backing store.
316 """
317 self._synchronize_backing_file()
318 self._backing_file = file_path
319 self._backing_file_initialized = False
320 self._synchronize_backing_file()
321
322
mbligha2c99492010-01-27 22:59:50 +0000323 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000324 def get(self, namespace, name, default=NO_DEFAULT):
325 """Returns the value associated with a particular name.
326
jadmanski4afc3672010-04-30 21:22:54 +0000327 @param namespace: The namespace that the property should be stored in.
328 @param name: The name the value was saved with.
329 @param default: A default value to return if no state is currently
mblighfbf73ae2009-12-19 05:22:42 +0000330 associated with var.
331
jadmanski4afc3672010-04-30 21:22:54 +0000332 @return: A deep copy of the value associated with name. Note that this
mblighfbf73ae2009-12-19 05:22:42 +0000333 explicitly returns a deep copy to avoid problems with mutable
334 values; mutations are not persisted or shared.
jadmanski4afc3672010-04-30 21:22:54 +0000335 @raise KeyError: raised when no state is associated with var and a
mblighfbf73ae2009-12-19 05:22:42 +0000336 default value is not provided.
337 """
338 if self.has(namespace, name):
339 return copy.deepcopy(self._state[namespace][name])
340 elif default is self.NO_DEFAULT:
341 raise KeyError('No key %s in namespace %s' % (name, namespace))
342 else:
343 return default
344
345
mbligha2c99492010-01-27 22:59:50 +0000346 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000347 def set(self, namespace, name, value):
348 """Saves the value given with the provided name.
349
jadmanski4afc3672010-04-30 21:22:54 +0000350 @param namespace: The namespace that the property should be stored in.
351 @param name: The name the value should be saved with.
352 @param value: The value to save.
mblighfbf73ae2009-12-19 05:22:42 +0000353 """
354 namespace_dict = self._state.setdefault(namespace, {})
355 namespace_dict[name] = copy.deepcopy(value)
mblighfbf73ae2009-12-19 05:22:42 +0000356 logging.debug('Persistent state %s.%s now set to %r', namespace,
357 name, value)
358
359
mbligha2c99492010-01-27 22:59:50 +0000360 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000361 def has(self, namespace, name):
362 """Return a boolean indicating if namespace.name is defined.
363
jadmanski4afc3672010-04-30 21:22:54 +0000364 @param namespace: The namespace to check for a definition.
365 @param name: The name to check for a definition.
mblighfbf73ae2009-12-19 05:22:42 +0000366
jadmanski4afc3672010-04-30 21:22:54 +0000367 @return: True if the given name is defined in the given namespace and
mblighfbf73ae2009-12-19 05:22:42 +0000368 False otherwise.
369 """
370 return namespace in self._state and name in self._state[namespace]
371
372
mbligha2c99492010-01-27 22:59:50 +0000373 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000374 def discard(self, namespace, name):
375 """If namespace.name is a defined value, deletes it.
376
jadmanski4afc3672010-04-30 21:22:54 +0000377 @param namespace: The namespace that the property is stored in.
378 @param name: The name the value is saved with.
mblighfbf73ae2009-12-19 05:22:42 +0000379 """
380 if self.has(namespace, name):
381 del self._state[namespace][name]
382 if len(self._state[namespace]) == 0:
383 del self._state[namespace]
mblighfbf73ae2009-12-19 05:22:42 +0000384 logging.debug('Persistent state %s.%s deleted', namespace, name)
385 else:
386 logging.debug(
387 'Persistent state %s.%s not defined so nothing is discarded',
388 namespace, name)
389
390
mbligha2c99492010-01-27 22:59:50 +0000391 @with_backing_file
mblighfc3da5b2010-01-06 18:37:22 +0000392 def discard_namespace(self, namespace):
393 """Delete all defined namespace.* names.
394
jadmanski4afc3672010-04-30 21:22:54 +0000395 @param namespace: The namespace to be cleared.
mblighfc3da5b2010-01-06 18:37:22 +0000396 """
397 if namespace in self._state:
398 del self._state[namespace]
mblighfc3da5b2010-01-06 18:37:22 +0000399 logging.debug('Persistent state %s.* deleted', namespace)
400
401
mblighfbf73ae2009-12-19 05:22:42 +0000402 @staticmethod
mblighfc3da5b2010-01-06 18:37:22 +0000403 def property_factory(state_attribute, property_attribute, default,
404 namespace='global_properties'):
mblighfbf73ae2009-12-19 05:22:42 +0000405 """
406 Create a property object for an attribute using self.get and self.set.
407
jadmanski4afc3672010-04-30 21:22:54 +0000408 @param state_attribute: A string with the name of the attribute on
mblighfbf73ae2009-12-19 05:22:42 +0000409 job that contains the job_state instance.
jadmanski4afc3672010-04-30 21:22:54 +0000410 @param property_attribute: A string with the name of the attribute
mblighfbf73ae2009-12-19 05:22:42 +0000411 this property is exposed as.
jadmanski4afc3672010-04-30 21:22:54 +0000412 @param default: A default value that should be used for this property
mblighfbf73ae2009-12-19 05:22:42 +0000413 if it is not set.
jadmanski4afc3672010-04-30 21:22:54 +0000414 @param namespace: The namespace to store the attribute value in.
mblighfbf73ae2009-12-19 05:22:42 +0000415
jadmanski4afc3672010-04-30 21:22:54 +0000416 @return: A read-write property object that performs self.get calls
mblighfbf73ae2009-12-19 05:22:42 +0000417 to read the value and self.set calls to set it.
418 """
419 def getter(job):
420 state = getattr(job, state_attribute)
mblighfc3da5b2010-01-06 18:37:22 +0000421 return state.get(namespace, property_attribute, default)
mblighfbf73ae2009-12-19 05:22:42 +0000422 def setter(job, value):
423 state = getattr(job, state_attribute)
mblighfc3da5b2010-01-06 18:37:22 +0000424 state.set(namespace, property_attribute, value)
mblighfbf73ae2009-12-19 05:22:42 +0000425 return property(getter, setter)
426
427
jadmanski4afc3672010-04-30 21:22:54 +0000428class status_log_entry(object):
429 """Represents a single status log entry."""
430
jadmanski2a89dac2010-06-11 14:32:58 +0000431 RENDERED_NONE_VALUE = '----'
432 TIMESTAMP_FIELD = 'timestamp'
433 LOCALTIME_FIELD = 'localtime'
434
Eric Li861b2d52011-02-04 14:50:35 -0800435 # non-space whitespace is forbidden in any fields
436 BAD_CHAR_REGEX = re.compile(r'[\t\n\r\v\f]')
437
xixuan6ae02f02016-11-03 11:12:34 -0700438 def _init_message(self, message):
439 """Handle the message which describs event to be recorded.
440
441 Break the message line into a single-line message that goes into the
442 database, and a block of additional lines that goes into the status
443 log but will never be parsed
444 When detecting a bad char in message, replace it with space instead
445 of raising an exception that cannot be parsed by tko parser.
446
447 @param message: the input message.
448
449 @return: filtered message without bad characters.
450 """
451 message_lines = message.splitlines()
452 if message_lines:
453 self.message = message_lines[0]
454 self.extra_message_lines = message_lines[1:]
455 else:
456 self.message = ''
457 self.extra_message_lines = []
458
459 self.message = self.message.replace('\t', ' ' * 8)
460 self.message = self.BAD_CHAR_REGEX.sub(' ', self.message)
461
462
jadmanski4afc3672010-04-30 21:22:54 +0000463 def __init__(self, status_code, subdir, operation, message, fields,
464 timestamp=None):
465 """Construct a status.log entry.
466
467 @param status_code: A message status code. Must match the codes
468 accepted by autotest_lib.common_lib.log.is_valid_status.
469 @param subdir: A valid job subdirectory, or None.
470 @param operation: Description of the operation, or None.
471 @param message: A printable string describing event to be recorded.
472 @param fields: A dictionary of arbitrary alphanumeric key=value pairs
473 to be included in the log, or None.
474 @param timestamp: An optional integer timestamp, in the same format
475 as a time.time() timestamp. If unspecified, the current time is
476 used.
477
478 @raise ValueError: if any of the parameters are invalid
479 """
jadmanski4afc3672010-04-30 21:22:54 +0000480 if not log.is_valid_status(status_code):
481 raise ValueError('status code %r is not valid' % status_code)
482 self.status_code = status_code
483
Eric Li861b2d52011-02-04 14:50:35 -0800484 if subdir and self.BAD_CHAR_REGEX.search(subdir):
jadmanski4afc3672010-04-30 21:22:54 +0000485 raise ValueError('Invalid character in subdir string')
486 self.subdir = subdir
487
Eric Li861b2d52011-02-04 14:50:35 -0800488 if operation and self.BAD_CHAR_REGEX.search(operation):
jadmanski4afc3672010-04-30 21:22:54 +0000489 raise ValueError('Invalid character in operation string')
490 self.operation = operation
491
xixuan6ae02f02016-11-03 11:12:34 -0700492 self._init_message(message)
jadmanski4afc3672010-04-30 21:22:54 +0000493
494 if not fields:
495 self.fields = {}
496 else:
497 self.fields = fields.copy()
498 for key, value in self.fields.iteritems():
Eric Lid656d562011-04-20 11:48:29 -0700499 if type(value) is int:
500 value = str(value)
Eric Li861b2d52011-02-04 14:50:35 -0800501 if self.BAD_CHAR_REGEX.search(key + value):
jadmanski4afc3672010-04-30 21:22:54 +0000502 raise ValueError('Invalid character in %r=%r field'
503 % (key, value))
504
505 # build up the timestamp
506 if timestamp is None:
507 timestamp = int(time.time())
jadmanski2a89dac2010-06-11 14:32:58 +0000508 self.fields[self.TIMESTAMP_FIELD] = str(timestamp)
509 self.fields[self.LOCALTIME_FIELD] = time.strftime(
510 '%b %d %H:%M:%S', time.localtime(timestamp))
jadmanski4afc3672010-04-30 21:22:54 +0000511
512
513 def is_start(self):
514 """Indicates if this status log is the start of a new nested block.
515
516 @return: A boolean indicating if this entry starts a new nested block.
517 """
518 return self.status_code == 'START'
519
520
521 def is_end(self):
522 """Indicates if this status log is the end of a nested block.
523
524 @return: A boolean indicating if this entry ends a nested block.
525 """
526 return self.status_code.startswith('END ')
527
528
529 def render(self):
530 """Render the status log entry into a text string.
531
532 @return: A text string suitable for writing into a status log file.
533 """
534 # combine all the log line data into a tab-delimited string
jadmanski2a89dac2010-06-11 14:32:58 +0000535 subdir = self.subdir or self.RENDERED_NONE_VALUE
536 operation = self.operation or self.RENDERED_NONE_VALUE
jadmanski4afc3672010-04-30 21:22:54 +0000537 extra_fields = ['%s=%s' % field for field in self.fields.iteritems()]
538 line_items = [self.status_code, subdir, operation]
539 line_items += extra_fields + [self.message]
540 first_line = '\t'.join(line_items)
541
542 # append the extra unparsable lines, two-space indented
543 all_lines = [first_line]
544 all_lines += [' ' + line for line in self.extra_message_lines]
545 return '\n'.join(all_lines)
546
547
jadmanski2a89dac2010-06-11 14:32:58 +0000548 @classmethod
549 def parse(cls, line):
550 """Parse a status log entry from a text string.
551
552 This method is the inverse of render; it should always be true that
553 parse(entry.render()) produces a new status_log_entry equivalent to
554 entry.
555
556 @return: A new status_log_entry instance with fields extracted from the
557 given status line. If the line is an extra message line then None
558 is returned.
559 """
560 # extra message lines are always prepended with two spaces
561 if line.startswith(' '):
562 return None
563
564 line = line.lstrip('\t') # ignore indentation
565 entry_parts = line.split('\t')
566 if len(entry_parts) < 4:
567 raise ValueError('%r is not a valid status line' % line)
568 status_code, subdir, operation = entry_parts[:3]
569 if subdir == cls.RENDERED_NONE_VALUE:
570 subdir = None
571 if operation == cls.RENDERED_NONE_VALUE:
572 operation = None
573 message = entry_parts[-1]
574 fields = dict(part.split('=', 1) for part in entry_parts[3:-1])
575 if cls.TIMESTAMP_FIELD in fields:
576 timestamp = int(fields[cls.TIMESTAMP_FIELD])
577 else:
578 timestamp = None
579 return cls(status_code, subdir, operation, message, fields, timestamp)
580
581
jadmanski4afc3672010-04-30 21:22:54 +0000582class status_indenter(object):
583 """Abstract interface that a status log indenter should use."""
584
585 @property
586 def indent(self):
587 raise NotImplementedError
588
589
590 def increment(self):
591 """Increase indentation by one level."""
592 raise NotImplementedError
593
594
595 def decrement(self):
596 """Decrease indentation by one level."""
597
598
599class status_logger(object):
600 """Represents a status log file. Responsible for translating messages
601 into on-disk status log lines.
602
603 @property global_filename: The filename to write top-level logs to.
604 @property subdir_filename: The filename to write subdir-level logs to.
605 """
606 def __init__(self, job, indenter, global_filename='status',
Aviv Keshetf0c82242017-05-18 22:06:40 -0700607 subdir_filename='status', record_hook=None):
jadmanski4afc3672010-04-30 21:22:54 +0000608 """Construct a logger instance.
609
610 @param job: A reference to the job object this is logging for. Only a
611 weak reference to the job is held, to avoid a
612 status_logger <-> job circular reference.
613 @param indenter: A status_indenter instance, for tracking the
614 indentation level.
615 @param global_filename: An optional filename to initialize the
616 self.global_filename attribute.
617 @param subdir_filename: An optional filename to initialize the
618 self.subdir_filename attribute.
jadmanski2a89dac2010-06-11 14:32:58 +0000619 @param record_hook: An optional function to be called before an entry
jadmanski4afc3672010-04-30 21:22:54 +0000620 is logged. The function should expect a single parameter, a
621 copy of the status_log_entry object.
622 """
623 self._jobref = weakref.ref(job)
624 self._indenter = indenter
625 self.global_filename = global_filename
626 self.subdir_filename = subdir_filename
627 self._record_hook = record_hook
628
jadmanski4afc3672010-04-30 21:22:54 +0000629
630 def render_entry(self, log_entry):
631 """Render a status_log_entry as it would be written to a log file.
632
633 @param log_entry: A status_log_entry instance to be rendered.
634
635 @return: The status log entry, rendered as it would be written to the
636 logs (including indentation).
637 """
638 if log_entry.is_end():
639 indent = self._indenter.indent - 1
640 else:
641 indent = self._indenter.indent
jadmanskibbb026c2010-07-19 16:41:27 +0000642 return '\t' * indent + log_entry.render().rstrip('\n')
jadmanski4afc3672010-04-30 21:22:54 +0000643
644
jadmanski2a89dac2010-06-11 14:32:58 +0000645 def record_entry(self, log_entry, log_in_subdir=True):
jadmanski4afc3672010-04-30 21:22:54 +0000646 """Record a status_log_entry into the appropriate status log files.
647
648 @param log_entry: A status_log_entry instance to be recorded into the
649 status logs.
jadmanski2a89dac2010-06-11 14:32:58 +0000650 @param log_in_subdir: A boolean that indicates (when true) that subdir
651 logs should be written into the subdirectory status log file.
jadmanski4afc3672010-04-30 21:22:54 +0000652 """
653 # acquire a strong reference for the duration of the method
654 job = self._jobref()
655 if job is None:
656 logging.warning('Something attempted to write a status log entry '
657 'after its job terminated, ignoring the attempt.')
658 logging.warning(traceback.format_stack())
659 return
660
jadmanski2a89dac2010-06-11 14:32:58 +0000661 # call the record hook if one was given
662 if self._record_hook:
663 self._record_hook(log_entry)
664
jadmanski4afc3672010-04-30 21:22:54 +0000665 # figure out where we need to log to
666 log_files = [os.path.join(job.resultdir, self.global_filename)]
jadmanski2a89dac2010-06-11 14:32:58 +0000667 if log_in_subdir and log_entry.subdir:
jadmanski4afc3672010-04-30 21:22:54 +0000668 log_files.append(os.path.join(job.resultdir, log_entry.subdir,
669 self.subdir_filename))
670
671 # write out to entry to the log files
672 log_text = self.render_entry(log_entry)
673 for log_file in log_files:
674 fileobj = open(log_file, 'a')
675 try:
676 print >> fileobj, log_text
677 finally:
678 fileobj.close()
679
jadmanski4afc3672010-04-30 21:22:54 +0000680 # adjust the indentation if this was a START or END entry
681 if log_entry.is_start():
682 self._indenter.increment()
683 elif log_entry.is_end():
684 self._indenter.decrement()
685
686
jadmanskida2f1432009-11-06 15:20:09 +0000687class base_job(object):
688 """An abstract base class for the various autotest job classes.
689
jadmanski4afc3672010-04-30 21:22:54 +0000690 @property autodir: The top level autotest directory.
691 @property clientdir: The autotest client directory.
692 @property serverdir: The autotest server directory. [OPTIONAL]
693 @property resultdir: The directory where results should be written out.
694 [WRITABLE]
jadmanskida2f1432009-11-06 15:20:09 +0000695
jadmanski4afc3672010-04-30 21:22:54 +0000696 @property pkgdir: The job packages directory. [WRITABLE]
697 @property tmpdir: The job temporary directory. [WRITABLE]
698 @property testdir: The job test directory. [WRITABLE]
699 @property site_testdir: The job site test directory. [WRITABLE]
jadmanskida2f1432009-11-06 15:20:09 +0000700
jadmanski4afc3672010-04-30 21:22:54 +0000701 @property bindir: The client bin/ directory.
jadmanski4afc3672010-04-30 21:22:54 +0000702 @property profdir: The client profilers/ directory.
703 @property toolsdir: The client tools/ directory.
jadmanskida2f1432009-11-06 15:20:09 +0000704
jadmanski4afc3672010-04-30 21:22:54 +0000705 @property control: A path to the control file to be executed. [OPTIONAL]
706 @property hosts: A set of all live Host objects currently in use by the
707 job. Code running in the context of a local client can safely assume
708 that this set contains only a single entry.
709 @property machines: A list of the machine names associated with the job.
710 @property user: The user executing the job.
711 @property tag: A tag identifying the job. Often used by the scheduler to
712 give a name of the form NUMBER-USERNAME/HOSTNAME.
713 @property args: A list of addtional miscellaneous command-line arguments
714 provided when starting the job.
jadmanskida2f1432009-11-06 15:20:09 +0000715
jadmanski4afc3672010-04-30 21:22:54 +0000716 @property automatic_test_tag: A string which, if set, will be automatically
717 added to the test name when running tests.
mblighfc3da5b2010-01-06 18:37:22 +0000718
jadmanski4afc3672010-04-30 21:22:54 +0000719 @property default_profile_only: A boolean indicating the default value of
720 profile_only used by test.execute. [PERSISTENT]
721 @property drop_caches: A boolean indicating if caches should be dropped
722 before each test is executed.
723 @property drop_caches_between_iterations: A boolean indicating if caches
724 should be dropped before each test iteration is executed.
725 @property run_test_cleanup: A boolean indicating if test.cleanup should be
726 run by default after a test completes, if the run_cleanup argument is
727 not specified. [PERSISTENT]
jadmanskida2f1432009-11-06 15:20:09 +0000728
jadmanski4afc3672010-04-30 21:22:54 +0000729 @property num_tests_run: The number of tests run during the job. [OPTIONAL]
730 @property num_tests_failed: The number of tests failed during the job.
731 [OPTIONAL]
jadmanskida2f1432009-11-06 15:20:09 +0000732
jadmanski4afc3672010-04-30 21:22:54 +0000733 @property harness: An instance of the client test harness. Only available
734 in contexts where client test execution happens. [OPTIONAL]
735 @property logging: An instance of the logging manager associated with the
736 job.
737 @property profilers: An instance of the profiler manager associated with
738 the job.
739 @property sysinfo: An instance of the sysinfo object. Only available in
740 contexts where it's possible to collect sysinfo.
741 @property warning_manager: A class for managing which types of WARN
742 messages should be logged and which should be supressed. [OPTIONAL]
743 @property warning_loggers: A set of readable streams that will be monitored
744 for WARN messages to be logged. [OPTIONAL]
Dan Shif53d1262017-06-19 11:25:25 -0700745 @property max_result_size_KB: Maximum size of test results should be
746 collected in KB. [OPTIONAL]
jadmanskida2f1432009-11-06 15:20:09 +0000747
748 Abstract methods:
749 _find_base_directories [CLASSMETHOD]
750 Returns the location of autodir, clientdir and serverdir
751
752 _find_resultdir
753 Returns the location of resultdir. Gets a copy of any parameters
754 passed into base_job.__init__. Can return None to indicate that
755 no resultdir is to be used.
jadmanski4afc3672010-04-30 21:22:54 +0000756
757 _get_status_logger
758 Returns a status_logger instance for recording job status logs.
jadmanskida2f1432009-11-06 15:20:09 +0000759 """
760
Dan Shif53d1262017-06-19 11:25:25 -0700761 # capture the dependency on several helper classes with factories
jadmanskida2f1432009-11-06 15:20:09 +0000762 _job_directory = job_directory
mblighfbf73ae2009-12-19 05:22:42 +0000763 _job_state = job_state
jadmanskida2f1432009-11-06 15:20:09 +0000764
765
mblighfc3da5b2010-01-06 18:37:22 +0000766 # all the job directory attributes
767 autodir = _job_directory.property_factory('autodir')
768 clientdir = _job_directory.property_factory('clientdir')
769 serverdir = _job_directory.property_factory('serverdir')
770 resultdir = _job_directory.property_factory('resultdir')
771 pkgdir = _job_directory.property_factory('pkgdir')
772 tmpdir = _job_directory.property_factory('tmpdir')
773 testdir = _job_directory.property_factory('testdir')
774 site_testdir = _job_directory.property_factory('site_testdir')
775 bindir = _job_directory.property_factory('bindir')
mblighfc3da5b2010-01-06 18:37:22 +0000776 profdir = _job_directory.property_factory('profdir')
777 toolsdir = _job_directory.property_factory('toolsdir')
mblighfc3da5b2010-01-06 18:37:22 +0000778
779
780 # all the generic persistent properties
mbligh9de6ed72010-01-11 19:01:10 +0000781 tag = _job_state.property_factory('_state', 'tag', '')
mblighfc3da5b2010-01-06 18:37:22 +0000782 default_profile_only = _job_state.property_factory(
783 '_state', 'default_profile_only', False)
784 run_test_cleanup = _job_state.property_factory(
785 '_state', 'run_test_cleanup', True)
mblighfc3da5b2010-01-06 18:37:22 +0000786 automatic_test_tag = _job_state.property_factory(
787 '_state', 'automatic_test_tag', None)
Dan Shif53d1262017-06-19 11:25:25 -0700788 max_result_size_KB = _job_state.property_factory(
789 '_state', 'max_result_size_KB', 0)
Xixuan Wucd36ae02017-11-10 13:51:00 -0800790 fast = _job_state.property_factory(
791 '_state', 'fast', False)
mblighfc3da5b2010-01-06 18:37:22 +0000792
793 # the use_sequence_number property
794 _sequence_number = _job_state.property_factory(
795 '_state', '_sequence_number', None)
796 def _get_use_sequence_number(self):
797 return bool(self._sequence_number)
798 def _set_use_sequence_number(self, value):
799 if value:
800 self._sequence_number = 1
801 else:
802 self._sequence_number = None
803 use_sequence_number = property(_get_use_sequence_number,
804 _set_use_sequence_number)
805
Dan Shi70647ca2015-07-16 22:52:35 -0700806 # parent job id is passed in from autoserv command line. It's only used in
807 # server job. The property is added here for unittest
808 # (base_job_unittest.py) to be consistent on validating public properties of
809 # a base_job object.
810 parent_job_id = None
mblighfc3da5b2010-01-06 18:37:22 +0000811
jadmanskida2f1432009-11-06 15:20:09 +0000812 def __init__(self, *args, **dargs):
813 # initialize the base directories, all others are relative to these
814 autodir, clientdir, serverdir = self._find_base_directories()
815 self._autodir = self._job_directory(autodir)
816 self._clientdir = self._job_directory(clientdir)
Scott Zawalski91493c82013-01-25 16:15:20 -0500817 # TODO(scottz): crosbug.com/38259, needed to pass unittests for now.
818 self.label = None
jadmanskida2f1432009-11-06 15:20:09 +0000819 if serverdir:
820 self._serverdir = self._job_directory(serverdir)
821 else:
822 self._serverdir = None
823
824 # initialize all the other directories relative to the base ones
825 self._initialize_dir_properties()
826 self._resultdir = self._job_directory(
827 self._find_resultdir(*args, **dargs), True)
828 self._execution_contexts = []
829
mblighfbf73ae2009-12-19 05:22:42 +0000830 # initialize all the job state
831 self._state = self._job_state()
832
jadmanskida2f1432009-11-06 15:20:09 +0000833
834 @classmethod
835 def _find_base_directories(cls):
836 raise NotImplementedError()
837
838
839 def _initialize_dir_properties(self):
840 """
841 Initializes all the secondary self.*dir properties. Requires autodir,
842 clientdir and serverdir to already be initialized.
843 """
844 # create some stubs for use as shortcuts
845 def readonly_dir(*args):
846 return self._job_directory(os.path.join(*args))
847 def readwrite_dir(*args):
848 return self._job_directory(os.path.join(*args), True)
849
850 # various client-specific directories
851 self._bindir = readonly_dir(self.clientdir, 'bin')
jadmanskida2f1432009-11-06 15:20:09 +0000852 self._profdir = readonly_dir(self.clientdir, 'profilers')
853 self._pkgdir = readwrite_dir(self.clientdir, 'packages')
854 self._toolsdir = readonly_dir(self.clientdir, 'tools')
855
856 # directories which are in serverdir on a server, clientdir on a client
Fang Dengd9a056f2013-10-29 11:31:27 -0700857 # tmp tests, and site_tests need to be read_write for client, but only
858 # read for server.
jadmanskida2f1432009-11-06 15:20:09 +0000859 if self.serverdir:
860 root = self.serverdir
Aviv Keshet36bf74a2013-08-15 16:09:03 -0700861 r_or_rw_dir = readonly_dir
jadmanskida2f1432009-11-06 15:20:09 +0000862 else:
863 root = self.clientdir
Aviv Keshet36bf74a2013-08-15 16:09:03 -0700864 r_or_rw_dir = readwrite_dir
Aviv Keshet36bf74a2013-08-15 16:09:03 -0700865 self._testdir = r_or_rw_dir(root, 'tests')
866 self._site_testdir = r_or_rw_dir(root, 'site_tests')
jadmanskida2f1432009-11-06 15:20:09 +0000867
868 # various server-specific directories
869 if self.serverdir:
Fang Dengd9a056f2013-10-29 11:31:27 -0700870 self._tmpdir = readwrite_dir(tempfile.gettempdir())
jadmanskida2f1432009-11-06 15:20:09 +0000871 else:
Fang Dengd9a056f2013-10-29 11:31:27 -0700872 self._tmpdir = readwrite_dir(root, 'tmp')
jadmanskida2f1432009-11-06 15:20:09 +0000873
874
875 def _find_resultdir(self, *args, **dargs):
876 raise NotImplementedError()
877
878
879 def push_execution_context(self, resultdir):
880 """
881 Save off the current context of the job and change to the given one.
882
883 In practice method just changes the resultdir, but it may become more
884 extensive in the future. The expected use case is for when a child
885 job needs to be executed in some sort of nested context (for example
886 the way parallel_simple does). The original context can be restored
887 with a pop_execution_context call.
888
jadmanski4afc3672010-04-30 21:22:54 +0000889 @param resultdir: The new resultdir, relative to the current one.
jadmanskida2f1432009-11-06 15:20:09 +0000890 """
891 new_dir = self._job_directory(
892 os.path.join(self.resultdir, resultdir), True)
893 self._execution_contexts.append(self._resultdir)
894 self._resultdir = new_dir
895
896
897 def pop_execution_context(self):
898 """
899 Reverse the effects of the previous push_execution_context call.
900
jadmanski4afc3672010-04-30 21:22:54 +0000901 @raise IndexError: raised when the stack of contexts is empty.
jadmanskida2f1432009-11-06 15:20:09 +0000902 """
903 if not self._execution_contexts:
904 raise IndexError('No old execution context to restore')
905 self._resultdir = self._execution_contexts.pop()
mblighfbf73ae2009-12-19 05:22:42 +0000906
907
908 def get_state(self, name, default=_job_state.NO_DEFAULT):
909 """Returns the value associated with a particular name.
910
jadmanski4afc3672010-04-30 21:22:54 +0000911 @param name: The name the value was saved with.
912 @param default: A default value to return if no state is currently
mblighfbf73ae2009-12-19 05:22:42 +0000913 associated with var.
914
jadmanski4afc3672010-04-30 21:22:54 +0000915 @return: A deep copy of the value associated with name. Note that this
mblighfbf73ae2009-12-19 05:22:42 +0000916 explicitly returns a deep copy to avoid problems with mutable
917 values; mutations are not persisted or shared.
jadmanski4afc3672010-04-30 21:22:54 +0000918 @raise KeyError: raised when no state is associated with var and a
mblighfbf73ae2009-12-19 05:22:42 +0000919 default value is not provided.
920 """
921 try:
922 return self._state.get('public', name, default=default)
923 except KeyError:
924 raise KeyError(name)
925
926
927 def set_state(self, name, value):
928 """Saves the value given with the provided name.
929
jadmanski4afc3672010-04-30 21:22:54 +0000930 @param name: The name the value should be saved with.
931 @param value: The value to save.
mblighfbf73ae2009-12-19 05:22:42 +0000932 """
933 self._state.set('public', name, value)
934
935
mblighfc3da5b2010-01-06 18:37:22 +0000936 def _build_tagged_test_name(self, testname, dargs):
937 """Builds the fully tagged testname and subdirectory for job.run_test.
mblighfbf73ae2009-12-19 05:22:42 +0000938
jadmanski4afc3672010-04-30 21:22:54 +0000939 @param testname: The base name of the test
940 @param dargs: The ** arguments passed to run_test. And arguments
mblighfc3da5b2010-01-06 18:37:22 +0000941 consumed by this method will be removed from the dictionary.
mblighfbf73ae2009-12-19 05:22:42 +0000942
jadmanski4afc3672010-04-30 21:22:54 +0000943 @return: A 3-tuple of the full name of the test, the subdirectory it
mblighfc3da5b2010-01-06 18:37:22 +0000944 should be stored in, and the full tag of the subdir.
mblighfbf73ae2009-12-19 05:22:42 +0000945 """
mblighfc3da5b2010-01-06 18:37:22 +0000946 tag_parts = []
947
948 # build up the parts of the tag used for the test name
Dale Curtis74a314b2011-06-23 14:55:46 -0700949 master_testpath = dargs.get('master_testpath', "")
mblighfc3da5b2010-01-06 18:37:22 +0000950 base_tag = dargs.pop('tag', None)
951 if base_tag:
952 tag_parts.append(str(base_tag))
953 if self.use_sequence_number:
954 tag_parts.append('_%02d_' % self._sequence_number)
955 self._sequence_number += 1
956 if self.automatic_test_tag:
957 tag_parts.append(self.automatic_test_tag)
958 full_testname = '.'.join([testname] + tag_parts)
959
960 # build up the subdir and tag as well
961 subdir_tag = dargs.pop('subdir_tag', None)
962 if subdir_tag:
963 tag_parts.append(subdir_tag)
964 subdir = '.'.join([testname] + tag_parts)
Dale Curtis74a314b2011-06-23 14:55:46 -0700965 subdir = os.path.join(master_testpath, subdir)
mblighfc3da5b2010-01-06 18:37:22 +0000966 tag = '.'.join(tag_parts)
967
968 return full_testname, subdir, tag
mblighfbf73ae2009-12-19 05:22:42 +0000969
970
mblighfc3da5b2010-01-06 18:37:22 +0000971 def _make_test_outputdir(self, subdir):
972 """Creates an output directory for a test to run it.
mblighfbf73ae2009-12-19 05:22:42 +0000973
jadmanski4afc3672010-04-30 21:22:54 +0000974 @param subdir: The subdirectory of the test. Generally computed by
mblighfc3da5b2010-01-06 18:37:22 +0000975 _build_tagged_test_name.
976
jadmanski4afc3672010-04-30 21:22:54 +0000977 @return: A job_directory instance corresponding to the outputdir of
mblighfc3da5b2010-01-06 18:37:22 +0000978 the test.
jadmanski4afc3672010-04-30 21:22:54 +0000979 @raise TestError: If the output directory is invalid.
mblighfbf73ae2009-12-19 05:22:42 +0000980 """
mblighfc3da5b2010-01-06 18:37:22 +0000981 # explicitly check that this subdirectory is new
982 path = os.path.join(self.resultdir, subdir)
983 if os.path.exists(path):
984 msg = ('%s already exists; multiple tests cannot run with the '
985 'same subdirectory' % subdir)
986 raise error.TestError(msg)
mblighfbf73ae2009-12-19 05:22:42 +0000987
mblighfc3da5b2010-01-06 18:37:22 +0000988 # create the outputdir and raise a TestError if it isn't valid
989 try:
990 outputdir = self._job_directory(path, True)
991 return outputdir
992 except self._job_directory.JobDirectoryException, e:
993 logging.exception('%s directory creation failed with %s',
994 subdir, e)
995 raise error.TestError('%s directory creation failed' % subdir)
jadmanski4afc3672010-04-30 21:22:54 +0000996
997
998 def record(self, status_code, subdir, operation, status='',
999 optional_fields=None):
1000 """Record a job-level status event.
1001
1002 Logs an event noteworthy to the Autotest job as a whole. Messages will
1003 be written into a global status log file, as well as a subdir-local
1004 status log file (if subdir is specified).
1005
1006 @param status_code: A string status code describing the type of status
1007 entry being recorded. It must pass log.is_valid_status to be
1008 considered valid.
1009 @param subdir: A specific results subdirectory this also applies to, or
1010 None. If not None the subdirectory must exist.
1011 @param operation: A string describing the operation that was run.
1012 @param status: An optional human-readable message describing the status
1013 entry, for example an error message or "completed successfully".
1014 @param optional_fields: An optional dictionary of addtional named fields
1015 to be included with the status message. Every time timestamp and
1016 localtime entries are generated with the current time and added
1017 to this dictionary.
1018 """
1019 entry = status_log_entry(status_code, subdir, operation, status,
1020 optional_fields)
jadmanski2a89dac2010-06-11 14:32:58 +00001021 self.record_entry(entry)
1022
1023
1024 def record_entry(self, entry, log_in_subdir=True):
1025 """Record a job-level status event, using a status_log_entry.
1026
1027 This is the same as self.record but using an existing status log
1028 entry object rather than constructing one for you.
1029
1030 @param entry: A status_log_entry object
1031 @param log_in_subdir: A boolean that indicates (when true) that subdir
1032 logs should be written into the subdirectory status log file.
1033 """
1034 self._get_status_logger().record_entry(entry, log_in_subdir)