blob: 5ccfd2de0365c5d10f48733f324ef0740e172c85 [file] [log] [blame]
Richard Barnette9aec6932016-06-03 13:31:46 -07001# pylint: disable=missing-docstring
2
jadmanski4afc3672010-04-30 21:22:54 +00003import os, copy, logging, errno, fcntl, time, re, weakref, traceback
Eric Li861b2d52011-02-04 14:50:35 -08004import tarfile
jadmanski4afc3672010-04-30 21:22:54 +00005import cPickle as pickle
Fang Dengd9a056f2013-10-29 11:31:27 -07006import tempfile
jadmanski4afc3672010-04-30 21:22:54 +00007from autotest_lib.client.common_lib import autotemp, error, log
jadmanskida2f1432009-11-06 15:20:09 +00008
9
10class job_directory(object):
11 """Represents a job.*dir directory."""
12
13
mblighfc3da5b2010-01-06 18:37:22 +000014 class JobDirectoryException(error.AutotestError):
15 """Generic job_directory exception superclass."""
16
17
18 class MissingDirectoryException(JobDirectoryException):
jadmanskida2f1432009-11-06 15:20:09 +000019 """Raised when a directory required by the job does not exist."""
20 def __init__(self, path):
21 Exception.__init__(self, 'Directory %s does not exist' % path)
22
23
mblighfc3da5b2010-01-06 18:37:22 +000024 class UncreatableDirectoryException(JobDirectoryException):
jadmanskida2f1432009-11-06 15:20:09 +000025 """Raised when a directory required by the job is missing and cannot
26 be created."""
27 def __init__(self, path, error):
28 msg = 'Creation of directory %s failed with exception %s'
29 msg %= (path, error)
30 Exception.__init__(self, msg)
31
32
mblighfc3da5b2010-01-06 18:37:22 +000033 class UnwritableDirectoryException(JobDirectoryException):
jadmanskida2f1432009-11-06 15:20:09 +000034 """Raised when a writable directory required by the job exists
35 but is not writable."""
36 def __init__(self, path):
37 msg = 'Directory %s exists but is not writable' % path
38 Exception.__init__(self, msg)
39
40
41 def __init__(self, path, is_writable=False):
42 """
43 Instantiate a job directory.
44
jadmanski4afc3672010-04-30 21:22:54 +000045 @param path: The path of the directory. If None a temporary directory
jadmanskida2f1432009-11-06 15:20:09 +000046 will be created instead.
jadmanski4afc3672010-04-30 21:22:54 +000047 @param is_writable: If True, expect the directory to be writable.
jadmanskida2f1432009-11-06 15:20:09 +000048
jadmanski4afc3672010-04-30 21:22:54 +000049 @raise MissingDirectoryException: raised if is_writable=False and the
jadmanskida2f1432009-11-06 15:20:09 +000050 directory does not exist.
jadmanski4afc3672010-04-30 21:22:54 +000051 @raise UnwritableDirectoryException: raised if is_writable=True and
jadmanskida2f1432009-11-06 15:20:09 +000052 the directory exists but is not writable.
jadmanski4afc3672010-04-30 21:22:54 +000053 @raise UncreatableDirectoryException: raised if is_writable=True, the
jadmanskida2f1432009-11-06 15:20:09 +000054 directory does not exist and it cannot be created.
55 """
56 if path is None:
57 if is_writable:
58 self._tempdir = autotemp.tempdir(unique_id='autotest')
59 self.path = self._tempdir.name
60 else:
61 raise self.MissingDirectoryException(path)
62 else:
63 self._tempdir = None
64 self.path = path
65 self._ensure_valid(is_writable)
66
67
68 def _ensure_valid(self, is_writable):
69 """
70 Ensure that this is a valid directory.
71
72 Will check if a directory exists, can optionally also enforce that
73 it be writable. It can optionally create it if necessary. Creation
74 will still fail if the path is rooted in a non-writable directory, or
75 if a file already exists at the given location.
76
77 @param dir_path A path where a directory should be located
78 @param is_writable A boolean indicating that the directory should
79 not only exist, but also be writable.
80
81 @raises MissingDirectoryException raised if is_writable=False and the
82 directory does not exist.
83 @raises UnwritableDirectoryException raised if is_writable=True and
84 the directory is not wrtiable.
85 @raises UncreatableDirectoryException raised if is_writable=True, the
86 directory does not exist and it cannot be created
87 """
mbligh8054b0d2009-11-25 17:38:19 +000088 # ensure the directory exists
89 if is_writable:
90 try:
91 os.makedirs(self.path)
92 except OSError, e:
mblighfbf73ae2009-12-19 05:22:42 +000093 if e.errno != errno.EEXIST or not os.path.isdir(self.path):
jadmanskida2f1432009-11-06 15:20:09 +000094 raise self.UncreatableDirectoryException(self.path, e)
mbligh8054b0d2009-11-25 17:38:19 +000095 elif not os.path.isdir(self.path):
96 raise self.MissingDirectoryException(self.path)
jadmanskida2f1432009-11-06 15:20:09 +000097
98 # if is_writable=True, also check that the directory is writable
99 if is_writable and not os.access(self.path, os.W_OK):
100 raise self.UnwritableDirectoryException(self.path)
101
102
103 @staticmethod
104 def property_factory(attribute):
105 """
106 Create a job.*dir -> job._*dir.path property accessor.
107
108 @param attribute A string with the name of the attribute this is
109 exposed as. '_'+attribute must then be attribute that holds
110 either None or a job_directory-like object.
111
112 @returns A read-only property object that exposes a job_directory path
113 """
114 @property
115 def dir_property(self):
116 underlying_attribute = getattr(self, '_' + attribute)
117 if underlying_attribute is None:
118 return None
119 else:
120 return underlying_attribute.path
121 return dir_property
122
123
mbligha2c99492010-01-27 22:59:50 +0000124# decorator for use with job_state methods
125def with_backing_lock(method):
126 """A decorator to perform a lock-*-unlock cycle.
127
128 When applied to a method, this decorator will automatically wrap
129 calls to the method in a backing file lock and before the call
130 followed by a backing file unlock.
131 """
132 def wrapped_method(self, *args, **dargs):
133 already_have_lock = self._backing_file_lock is not None
134 if not already_have_lock:
135 self._lock_backing_file()
136 try:
137 return method(self, *args, **dargs)
138 finally:
139 if not already_have_lock:
140 self._unlock_backing_file()
141 wrapped_method.__name__ = method.__name__
142 wrapped_method.__doc__ = method.__doc__
143 return wrapped_method
144
145
146# decorator for use with job_state methods
147def with_backing_file(method):
148 """A decorator to perform a lock-read-*-write-unlock cycle.
149
150 When applied to a method, this decorator will automatically wrap
151 calls to the method in a lock-and-read before the call followed by a
152 write-and-unlock. Any operation that is reading or writing state
153 should be decorated with this method to ensure that backing file
154 state is consistently maintained.
155 """
156 @with_backing_lock
157 def wrapped_method(self, *args, **dargs):
158 self._read_from_backing_file()
159 try:
160 return method(self, *args, **dargs)
161 finally:
162 self._write_to_backing_file()
163 wrapped_method.__name__ = method.__name__
164 wrapped_method.__doc__ = method.__doc__
165 return wrapped_method
166
167
168
mblighfbf73ae2009-12-19 05:22:42 +0000169class job_state(object):
170 """A class for managing explicit job and user state, optionally persistent.
171
172 The class allows you to save state by name (like a dictionary). Any state
173 stored in this class should be picklable and deep copyable. While this is
174 not enforced it is recommended that only valid python identifiers be used
175 as names. Additionally, the namespace 'stateful_property' is used for
176 storing the valued associated with properties constructed using the
177 property_factory method.
178 """
179
180 NO_DEFAULT = object()
181 PICKLE_PROTOCOL = 2 # highest protocol available in python 2.4
182
183
184 def __init__(self):
185 """Initialize the job state."""
186 self._state = {}
187 self._backing_file = None
jadmanskifa2e8892010-01-26 20:26:05 +0000188 self._backing_file_initialized = False
189 self._backing_file_lock = None
mblighfbf73ae2009-12-19 05:22:42 +0000190
191
jadmanskifa2e8892010-01-26 20:26:05 +0000192 def _lock_backing_file(self):
193 """Acquire a lock on the backing file."""
194 if self._backing_file:
195 self._backing_file_lock = open(self._backing_file, 'a')
jadmanskia087eae2010-01-29 20:57:57 +0000196 fcntl.flock(self._backing_file_lock, fcntl.LOCK_EX)
jadmanskifa2e8892010-01-26 20:26:05 +0000197
198
199 def _unlock_backing_file(self):
200 """Release a lock on the backing file."""
201 if self._backing_file_lock:
jadmanskia087eae2010-01-29 20:57:57 +0000202 fcntl.flock(self._backing_file_lock, fcntl.LOCK_UN)
jadmanskifa2e8892010-01-26 20:26:05 +0000203 self._backing_file_lock.close()
204 self._backing_file_lock = None
205
206
207 def read_from_file(self, file_path, merge=True):
208 """Read in any state from the file at file_path.
209
210 When merge=True, any state specified only in-memory will be preserved.
211 Any state specified on-disk will be set in-memory, even if an in-memory
212 setting already exists.
213
jadmanski4afc3672010-04-30 21:22:54 +0000214 @param file_path: The path where the state should be read from. It must
jadmanskifa2e8892010-01-26 20:26:05 +0000215 exist but it can be empty.
jadmanski4afc3672010-04-30 21:22:54 +0000216 @param merge: If true, merge the on-disk state with the in-memory
jadmanskifa2e8892010-01-26 20:26:05 +0000217 state. If false, replace the in-memory state with the on-disk
218 state.
219
jadmanski4afc3672010-04-30 21:22:54 +0000220 @warning: This method is intentionally concurrency-unsafe. It makes no
jadmanskifa2e8892010-01-26 20:26:05 +0000221 attempt to control concurrent access to the file at file_path.
222 """
223
224 # we can assume that the file exists
225 if os.path.getsize(file_path) == 0:
226 on_disk_state = {}
227 else:
228 on_disk_state = pickle.load(open(file_path))
229
230 if merge:
231 # merge the on-disk state with the in-memory state
232 for namespace, namespace_dict in on_disk_state.iteritems():
233 in_memory_namespace = self._state.setdefault(namespace, {})
234 for name, value in namespace_dict.iteritems():
235 if name in in_memory_namespace:
236 if in_memory_namespace[name] != value:
237 logging.info('Persistent value of %s.%s from %s '
238 'overridding existing in-memory '
239 'value', namespace, name, file_path)
240 in_memory_namespace[name] = value
241 else:
242 logging.debug('Value of %s.%s is unchanged, '
243 'skipping import', namespace, name)
244 else:
245 logging.debug('Importing %s.%s from state file %s',
246 namespace, name, file_path)
247 in_memory_namespace[name] = value
248 else:
249 # just replace the in-memory state with the on-disk state
250 self._state = on_disk_state
jadmanskifa2e8892010-01-26 20:26:05 +0000251
mbligha2c99492010-01-27 22:59:50 +0000252 # lock the backing file before we refresh it
253 with_backing_lock(self.__class__._write_to_backing_file)(self)
jadmanskifa2e8892010-01-26 20:26:05 +0000254
255
256 def write_to_file(self, file_path):
257 """Write out the current state to the given path.
258
jadmanski4afc3672010-04-30 21:22:54 +0000259 @param file_path: The path where the state should be written out to.
jadmanskifa2e8892010-01-26 20:26:05 +0000260 Must be writable.
261
jadmanski4afc3672010-04-30 21:22:54 +0000262 @warning: This method is intentionally concurrency-unsafe. It makes no
mbligha2c99492010-01-27 22:59:50 +0000263 attempt to control concurrent access to the file at file_path.
jadmanskifa2e8892010-01-26 20:26:05 +0000264 """
265 outfile = open(file_path, 'w')
266 try:
267 pickle.dump(self._state, outfile, self.PICKLE_PROTOCOL)
268 finally:
269 outfile.close()
jadmanskifa2e8892010-01-26 20:26:05 +0000270
271
272 def _read_from_backing_file(self):
273 """Refresh the current state from the backing file.
274
275 If the backing file has never been read before (indicated by checking
276 self._backing_file_initialized) it will merge the file with the
277 in-memory state, rather than overwriting it.
278 """
279 if self._backing_file:
280 merge_backing_file = not self._backing_file_initialized
281 self.read_from_file(self._backing_file, merge=merge_backing_file)
282 self._backing_file_initialized = True
283
284
285 def _write_to_backing_file(self):
286 """Flush the current state to the backing file."""
287 if self._backing_file:
288 self.write_to_file(self._backing_file)
289
290
mbligha2c99492010-01-27 22:59:50 +0000291 @with_backing_file
jadmanskifa2e8892010-01-26 20:26:05 +0000292 def _synchronize_backing_file(self):
293 """Synchronizes the contents of the in-memory and on-disk state."""
294 # state is implicitly synchronized in _with_backing_file methods
295 pass
296
297
298 def set_backing_file(self, file_path):
299 """Change the path used as the backing file for the persistent state.
300
301 When a new backing file is specified if a file already exists then
302 its contents will be added into the current state, with conflicts
303 between the file and memory being resolved in favor of the file
304 contents. The file will then be kept in sync with the (combined)
305 in-memory state. The syncing can be disabled by setting this to None.
306
jadmanski4afc3672010-04-30 21:22:54 +0000307 @param file_path: A path on the filesystem that can be read from and
jadmanskifa2e8892010-01-26 20:26:05 +0000308 written to, or None to turn off the backing store.
309 """
310 self._synchronize_backing_file()
311 self._backing_file = file_path
312 self._backing_file_initialized = False
313 self._synchronize_backing_file()
314
315
mbligha2c99492010-01-27 22:59:50 +0000316 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000317 def get(self, namespace, name, default=NO_DEFAULT):
318 """Returns the value associated with a particular name.
319
jadmanski4afc3672010-04-30 21:22:54 +0000320 @param namespace: The namespace that the property should be stored in.
321 @param name: The name the value was saved with.
322 @param default: A default value to return if no state is currently
mblighfbf73ae2009-12-19 05:22:42 +0000323 associated with var.
324
jadmanski4afc3672010-04-30 21:22:54 +0000325 @return: A deep copy of the value associated with name. Note that this
mblighfbf73ae2009-12-19 05:22:42 +0000326 explicitly returns a deep copy to avoid problems with mutable
327 values; mutations are not persisted or shared.
jadmanski4afc3672010-04-30 21:22:54 +0000328 @raise KeyError: raised when no state is associated with var and a
mblighfbf73ae2009-12-19 05:22:42 +0000329 default value is not provided.
330 """
331 if self.has(namespace, name):
332 return copy.deepcopy(self._state[namespace][name])
333 elif default is self.NO_DEFAULT:
334 raise KeyError('No key %s in namespace %s' % (name, namespace))
335 else:
336 return default
337
338
mbligha2c99492010-01-27 22:59:50 +0000339 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000340 def set(self, namespace, name, value):
341 """Saves the value given with the provided name.
342
jadmanski4afc3672010-04-30 21:22:54 +0000343 @param namespace: The namespace that the property should be stored in.
344 @param name: The name the value should be saved with.
345 @param value: The value to save.
mblighfbf73ae2009-12-19 05:22:42 +0000346 """
347 namespace_dict = self._state.setdefault(namespace, {})
348 namespace_dict[name] = copy.deepcopy(value)
mblighfbf73ae2009-12-19 05:22:42 +0000349 logging.debug('Persistent state %s.%s now set to %r', namespace,
350 name, value)
351
352
mbligha2c99492010-01-27 22:59:50 +0000353 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000354 def has(self, namespace, name):
355 """Return a boolean indicating if namespace.name is defined.
356
jadmanski4afc3672010-04-30 21:22:54 +0000357 @param namespace: The namespace to check for a definition.
358 @param name: The name to check for a definition.
mblighfbf73ae2009-12-19 05:22:42 +0000359
jadmanski4afc3672010-04-30 21:22:54 +0000360 @return: True if the given name is defined in the given namespace and
mblighfbf73ae2009-12-19 05:22:42 +0000361 False otherwise.
362 """
363 return namespace in self._state and name in self._state[namespace]
364
365
mbligha2c99492010-01-27 22:59:50 +0000366 @with_backing_file
mblighfbf73ae2009-12-19 05:22:42 +0000367 def discard(self, namespace, name):
368 """If namespace.name is a defined value, deletes it.
369
jadmanski4afc3672010-04-30 21:22:54 +0000370 @param namespace: The namespace that the property is stored in.
371 @param name: The name the value is saved with.
mblighfbf73ae2009-12-19 05:22:42 +0000372 """
373 if self.has(namespace, name):
374 del self._state[namespace][name]
375 if len(self._state[namespace]) == 0:
376 del self._state[namespace]
mblighfbf73ae2009-12-19 05:22:42 +0000377 logging.debug('Persistent state %s.%s deleted', namespace, name)
378 else:
379 logging.debug(
380 'Persistent state %s.%s not defined so nothing is discarded',
381 namespace, name)
382
383
mbligha2c99492010-01-27 22:59:50 +0000384 @with_backing_file
mblighfc3da5b2010-01-06 18:37:22 +0000385 def discard_namespace(self, namespace):
386 """Delete all defined namespace.* names.
387
jadmanski4afc3672010-04-30 21:22:54 +0000388 @param namespace: The namespace to be cleared.
mblighfc3da5b2010-01-06 18:37:22 +0000389 """
390 if namespace in self._state:
391 del self._state[namespace]
mblighfc3da5b2010-01-06 18:37:22 +0000392 logging.debug('Persistent state %s.* deleted', namespace)
393
394
mblighfbf73ae2009-12-19 05:22:42 +0000395 @staticmethod
mblighfc3da5b2010-01-06 18:37:22 +0000396 def property_factory(state_attribute, property_attribute, default,
397 namespace='global_properties'):
mblighfbf73ae2009-12-19 05:22:42 +0000398 """
399 Create a property object for an attribute using self.get and self.set.
400
jadmanski4afc3672010-04-30 21:22:54 +0000401 @param state_attribute: A string with the name of the attribute on
mblighfbf73ae2009-12-19 05:22:42 +0000402 job that contains the job_state instance.
jadmanski4afc3672010-04-30 21:22:54 +0000403 @param property_attribute: A string with the name of the attribute
mblighfbf73ae2009-12-19 05:22:42 +0000404 this property is exposed as.
jadmanski4afc3672010-04-30 21:22:54 +0000405 @param default: A default value that should be used for this property
mblighfbf73ae2009-12-19 05:22:42 +0000406 if it is not set.
jadmanski4afc3672010-04-30 21:22:54 +0000407 @param namespace: The namespace to store the attribute value in.
mblighfbf73ae2009-12-19 05:22:42 +0000408
jadmanski4afc3672010-04-30 21:22:54 +0000409 @return: A read-write property object that performs self.get calls
mblighfbf73ae2009-12-19 05:22:42 +0000410 to read the value and self.set calls to set it.
411 """
412 def getter(job):
413 state = getattr(job, state_attribute)
mblighfc3da5b2010-01-06 18:37:22 +0000414 return state.get(namespace, property_attribute, default)
mblighfbf73ae2009-12-19 05:22:42 +0000415 def setter(job, value):
416 state = getattr(job, state_attribute)
mblighfc3da5b2010-01-06 18:37:22 +0000417 state.set(namespace, property_attribute, value)
mblighfbf73ae2009-12-19 05:22:42 +0000418 return property(getter, setter)
419
420
jadmanski4afc3672010-04-30 21:22:54 +0000421class status_log_entry(object):
422 """Represents a single status log entry."""
423
jadmanski2a89dac2010-06-11 14:32:58 +0000424 RENDERED_NONE_VALUE = '----'
425 TIMESTAMP_FIELD = 'timestamp'
426 LOCALTIME_FIELD = 'localtime'
427
Eric Li861b2d52011-02-04 14:50:35 -0800428 # non-space whitespace is forbidden in any fields
429 BAD_CHAR_REGEX = re.compile(r'[\t\n\r\v\f]')
430
xixuan6ae02f02016-11-03 11:12:34 -0700431 def _init_message(self, message):
432 """Handle the message which describs event to be recorded.
433
434 Break the message line into a single-line message that goes into the
435 database, and a block of additional lines that goes into the status
436 log but will never be parsed
437 When detecting a bad char in message, replace it with space instead
438 of raising an exception that cannot be parsed by tko parser.
439
440 @param message: the input message.
441
442 @return: filtered message without bad characters.
443 """
444 message_lines = message.splitlines()
445 if message_lines:
446 self.message = message_lines[0]
447 self.extra_message_lines = message_lines[1:]
448 else:
449 self.message = ''
450 self.extra_message_lines = []
451
452 self.message = self.message.replace('\t', ' ' * 8)
453 self.message = self.BAD_CHAR_REGEX.sub(' ', self.message)
454
455
jadmanski4afc3672010-04-30 21:22:54 +0000456 def __init__(self, status_code, subdir, operation, message, fields,
457 timestamp=None):
458 """Construct a status.log entry.
459
460 @param status_code: A message status code. Must match the codes
461 accepted by autotest_lib.common_lib.log.is_valid_status.
462 @param subdir: A valid job subdirectory, or None.
463 @param operation: Description of the operation, or None.
464 @param message: A printable string describing event to be recorded.
465 @param fields: A dictionary of arbitrary alphanumeric key=value pairs
466 to be included in the log, or None.
467 @param timestamp: An optional integer timestamp, in the same format
468 as a time.time() timestamp. If unspecified, the current time is
469 used.
470
471 @raise ValueError: if any of the parameters are invalid
472 """
jadmanski4afc3672010-04-30 21:22:54 +0000473 if not log.is_valid_status(status_code):
474 raise ValueError('status code %r is not valid' % status_code)
475 self.status_code = status_code
476
Eric Li861b2d52011-02-04 14:50:35 -0800477 if subdir and self.BAD_CHAR_REGEX.search(subdir):
jadmanski4afc3672010-04-30 21:22:54 +0000478 raise ValueError('Invalid character in subdir string')
479 self.subdir = subdir
480
Eric Li861b2d52011-02-04 14:50:35 -0800481 if operation and self.BAD_CHAR_REGEX.search(operation):
jadmanski4afc3672010-04-30 21:22:54 +0000482 raise ValueError('Invalid character in operation string')
483 self.operation = operation
484
xixuan6ae02f02016-11-03 11:12:34 -0700485 self._init_message(message)
jadmanski4afc3672010-04-30 21:22:54 +0000486
487 if not fields:
488 self.fields = {}
489 else:
490 self.fields = fields.copy()
491 for key, value in self.fields.iteritems():
Eric Lid656d562011-04-20 11:48:29 -0700492 if type(value) is int:
493 value = str(value)
Eric Li861b2d52011-02-04 14:50:35 -0800494 if self.BAD_CHAR_REGEX.search(key + value):
jadmanski4afc3672010-04-30 21:22:54 +0000495 raise ValueError('Invalid character in %r=%r field'
496 % (key, value))
497
498 # build up the timestamp
499 if timestamp is None:
500 timestamp = int(time.time())
jadmanski2a89dac2010-06-11 14:32:58 +0000501 self.fields[self.TIMESTAMP_FIELD] = str(timestamp)
502 self.fields[self.LOCALTIME_FIELD] = time.strftime(
503 '%b %d %H:%M:%S', time.localtime(timestamp))
jadmanski4afc3672010-04-30 21:22:54 +0000504
505
506 def is_start(self):
507 """Indicates if this status log is the start of a new nested block.
508
509 @return: A boolean indicating if this entry starts a new nested block.
510 """
511 return self.status_code == 'START'
512
513
514 def is_end(self):
515 """Indicates if this status log is the end of a nested block.
516
517 @return: A boolean indicating if this entry ends a nested block.
518 """
519 return self.status_code.startswith('END ')
520
521
522 def render(self):
523 """Render the status log entry into a text string.
524
525 @return: A text string suitable for writing into a status log file.
526 """
527 # combine all the log line data into a tab-delimited string
jadmanski2a89dac2010-06-11 14:32:58 +0000528 subdir = self.subdir or self.RENDERED_NONE_VALUE
529 operation = self.operation or self.RENDERED_NONE_VALUE
jadmanski4afc3672010-04-30 21:22:54 +0000530 extra_fields = ['%s=%s' % field for field in self.fields.iteritems()]
531 line_items = [self.status_code, subdir, operation]
532 line_items += extra_fields + [self.message]
533 first_line = '\t'.join(line_items)
534
535 # append the extra unparsable lines, two-space indented
536 all_lines = [first_line]
537 all_lines += [' ' + line for line in self.extra_message_lines]
538 return '\n'.join(all_lines)
539
540
jadmanski2a89dac2010-06-11 14:32:58 +0000541 @classmethod
542 def parse(cls, line):
543 """Parse a status log entry from a text string.
544
545 This method is the inverse of render; it should always be true that
546 parse(entry.render()) produces a new status_log_entry equivalent to
547 entry.
548
549 @return: A new status_log_entry instance with fields extracted from the
550 given status line. If the line is an extra message line then None
551 is returned.
552 """
553 # extra message lines are always prepended with two spaces
554 if line.startswith(' '):
555 return None
556
557 line = line.lstrip('\t') # ignore indentation
558 entry_parts = line.split('\t')
559 if len(entry_parts) < 4:
560 raise ValueError('%r is not a valid status line' % line)
561 status_code, subdir, operation = entry_parts[:3]
562 if subdir == cls.RENDERED_NONE_VALUE:
563 subdir = None
564 if operation == cls.RENDERED_NONE_VALUE:
565 operation = None
566 message = entry_parts[-1]
567 fields = dict(part.split('=', 1) for part in entry_parts[3:-1])
568 if cls.TIMESTAMP_FIELD in fields:
569 timestamp = int(fields[cls.TIMESTAMP_FIELD])
570 else:
571 timestamp = None
572 return cls(status_code, subdir, operation, message, fields, timestamp)
573
574
jadmanski4afc3672010-04-30 21:22:54 +0000575class status_indenter(object):
576 """Abstract interface that a status log indenter should use."""
577
578 @property
579 def indent(self):
580 raise NotImplementedError
581
582
583 def increment(self):
584 """Increase indentation by one level."""
585 raise NotImplementedError
586
587
588 def decrement(self):
589 """Decrease indentation by one level."""
590
591
592class status_logger(object):
593 """Represents a status log file. Responsible for translating messages
594 into on-disk status log lines.
595
596 @property global_filename: The filename to write top-level logs to.
597 @property subdir_filename: The filename to write subdir-level logs to.
598 """
599 def __init__(self, job, indenter, global_filename='status',
Eric Li861b2d52011-02-04 14:50:35 -0800600 subdir_filename='status', record_hook=None,
601 tap_writer=None):
jadmanski4afc3672010-04-30 21:22:54 +0000602 """Construct a logger instance.
603
604 @param job: A reference to the job object this is logging for. Only a
605 weak reference to the job is held, to avoid a
606 status_logger <-> job circular reference.
607 @param indenter: A status_indenter instance, for tracking the
608 indentation level.
609 @param global_filename: An optional filename to initialize the
610 self.global_filename attribute.
611 @param subdir_filename: An optional filename to initialize the
612 self.subdir_filename attribute.
jadmanski2a89dac2010-06-11 14:32:58 +0000613 @param record_hook: An optional function to be called before an entry
jadmanski4afc3672010-04-30 21:22:54 +0000614 is logged. The function should expect a single parameter, a
615 copy of the status_log_entry object.
Eric Li861b2d52011-02-04 14:50:35 -0800616 @param tap_writer: An instance of the class TAPReport for addionally
617 writing TAP files
jadmanski4afc3672010-04-30 21:22:54 +0000618 """
619 self._jobref = weakref.ref(job)
620 self._indenter = indenter
621 self.global_filename = global_filename
622 self.subdir_filename = subdir_filename
623 self._record_hook = record_hook
Eric Li861b2d52011-02-04 14:50:35 -0800624 if tap_writer is None:
625 self._tap_writer = TAPReport(None)
626 else:
627 self._tap_writer = tap_writer
jadmanski4afc3672010-04-30 21:22:54 +0000628
jadmanski4afc3672010-04-30 21:22:54 +0000629
630 def render_entry(self, log_entry):
631 """Render a status_log_entry as it would be written to a log file.
632
633 @param log_entry: A status_log_entry instance to be rendered.
634
635 @return: The status log entry, rendered as it would be written to the
636 logs (including indentation).
637 """
638 if log_entry.is_end():
639 indent = self._indenter.indent - 1
640 else:
641 indent = self._indenter.indent
jadmanskibbb026c2010-07-19 16:41:27 +0000642 return '\t' * indent + log_entry.render().rstrip('\n')
jadmanski4afc3672010-04-30 21:22:54 +0000643
644
jadmanski2a89dac2010-06-11 14:32:58 +0000645 def record_entry(self, log_entry, log_in_subdir=True):
jadmanski4afc3672010-04-30 21:22:54 +0000646 """Record a status_log_entry into the appropriate status log files.
647
648 @param log_entry: A status_log_entry instance to be recorded into the
649 status logs.
jadmanski2a89dac2010-06-11 14:32:58 +0000650 @param log_in_subdir: A boolean that indicates (when true) that subdir
651 logs should be written into the subdirectory status log file.
jadmanski4afc3672010-04-30 21:22:54 +0000652 """
653 # acquire a strong reference for the duration of the method
654 job = self._jobref()
655 if job is None:
656 logging.warning('Something attempted to write a status log entry '
657 'after its job terminated, ignoring the attempt.')
658 logging.warning(traceback.format_stack())
659 return
660
jadmanski2a89dac2010-06-11 14:32:58 +0000661 # call the record hook if one was given
662 if self._record_hook:
663 self._record_hook(log_entry)
664
jadmanski4afc3672010-04-30 21:22:54 +0000665 # figure out where we need to log to
666 log_files = [os.path.join(job.resultdir, self.global_filename)]
jadmanski2a89dac2010-06-11 14:32:58 +0000667 if log_in_subdir and log_entry.subdir:
jadmanski4afc3672010-04-30 21:22:54 +0000668 log_files.append(os.path.join(job.resultdir, log_entry.subdir,
669 self.subdir_filename))
670
671 # write out to entry to the log files
672 log_text = self.render_entry(log_entry)
673 for log_file in log_files:
674 fileobj = open(log_file, 'a')
675 try:
676 print >> fileobj, log_text
677 finally:
678 fileobj.close()
679
Eric Li861b2d52011-02-04 14:50:35 -0800680 # write to TAPRecord instance
681 if log_entry.is_end() and self._tap_writer.do_tap_report:
682 self._tap_writer.record(log_entry, self._indenter.indent, log_files)
683
jadmanski4afc3672010-04-30 21:22:54 +0000684 # adjust the indentation if this was a START or END entry
685 if log_entry.is_start():
686 self._indenter.increment()
687 elif log_entry.is_end():
688 self._indenter.decrement()
689
690
Eric Li861b2d52011-02-04 14:50:35 -0800691class TAPReport(object):
692 """
693 Deal with TAP reporting for the Autotest client.
694 """
695
696 job_statuses = {
697 "TEST_NA": False,
698 "ABORT": False,
699 "ERROR": False,
700 "FAIL": False,
701 "WARN": False,
702 "GOOD": True,
703 "START": True,
704 "END GOOD": True,
705 "ALERT": False,
706 "RUNNING": False,
707 "NOSTATUS": False
708 }
709
710
711 def __init__(self, enable, resultdir=None, global_filename='status'):
712 """
713 @param enable: Set self.do_tap_report to trigger TAP reporting.
714 @param resultdir: Path where the TAP report files will be written.
715 @param global_filename: File name of the status files .tap extensions
716 will be appended.
717 """
718 self.do_tap_report = enable
719 if resultdir is not None:
720 self.resultdir = os.path.abspath(resultdir)
721 self._reports_container = {}
722 self._keyval_container = {} # {'path1': [entries],}
723 self.global_filename = global_filename
724
725
726 @classmethod
727 def tap_ok(self, success, counter, message):
728 """
729 return a TAP message string.
730
731 @param success: True for positive message string.
732 @param counter: number of TAP line in plan.
733 @param message: additional message to report in TAP line.
734 """
735 if success:
736 message = "ok %s - %s" % (counter, message)
737 else:
738 message = "not ok %s - %s" % (counter, message)
739 return message
740
741
742 def record(self, log_entry, indent, log_files):
743 """
744 Append a job-level status event to self._reports_container. All
745 events will be written to TAP log files at the end of the test run.
746 Otherwise, it's impossilble to determine the TAP plan.
747
748 @param log_entry: A string status code describing the type of status
749 entry being recorded. It must pass log.is_valid_status to be
750 considered valid.
751 @param indent: Level of the log_entry to determine the operation if
752 log_entry.operation is not given.
753 @param log_files: List of full path of files the TAP report will be
754 written to at the end of the test.
755 """
756 for log_file in log_files:
757 log_file_path = os.path.dirname(log_file)
758 key = log_file_path.split(self.resultdir, 1)[1].strip(os.sep)
759 if not key:
760 key = 'root'
761
762 if not self._reports_container.has_key(key):
763 self._reports_container[key] = []
764
765 if log_entry.operation:
766 operation = log_entry.operation
767 elif indent == 1:
768 operation = "job"
769 else:
770 operation = "unknown"
771 entry = self.tap_ok(
772 self.job_statuses.get(log_entry.status_code, False),
773 len(self._reports_container[key]) + 1, operation + "\n"
774 )
775 self._reports_container[key].append(entry)
776
777
778 def record_keyval(self, path, dictionary, type_tag=None):
779 """
780 Append a key-value pairs of dictionary to self._keyval_container in
781 TAP format. Once finished write out the keyval.tap file to the file
782 system.
783
784 If type_tag is None, then the key must be composed of alphanumeric
785 characters (or dashes + underscores). However, if type-tag is not
786 null then the keys must also have "{type_tag}" as a suffix. At
787 the moment the only valid values of type_tag are "attr" and "perf".
788
789 @param path: The full path of the keyval.tap file to be created
790 @param dictionary: The keys and values.
791 @param type_tag: The type of the values
792 """
793 self._keyval_container.setdefault(path, [0, []])
794 self._keyval_container[path][0] += 1
795
796 if type_tag is None:
797 key_regex = re.compile(r'^[-\.\w]+$')
798 else:
799 if type_tag not in ('attr', 'perf'):
800 raise ValueError('Invalid type tag: %s' % type_tag)
801 escaped_tag = re.escape(type_tag)
802 key_regex = re.compile(r'^[-\.\w]+\{%s\}$' % escaped_tag)
803 self._keyval_container[path][1].extend([
804 self.tap_ok(True, self._keyval_container[path][0], "results"),
805 "\n ---\n",
806 ])
807 try:
808 for key in sorted(dictionary.keys()):
809 if not key_regex.search(key):
810 raise ValueError('Invalid key: %s' % key)
811 self._keyval_container[path][1].append(
812 ' %s: %s\n' % (key.replace('{', '_').rstrip('}'),
813 dictionary[key])
814 )
815 finally:
816 self._keyval_container[path][1].append(" ...\n")
817 self._write_keyval()
818
819
820 def _write_reports(self):
821 """
822 Write TAP reports to file.
823 """
824 for key in self._reports_container.keys():
825 if key == 'root':
826 sub_dir = ''
827 else:
828 sub_dir = key
829 tap_fh = open(os.sep.join(
830 [self.resultdir, sub_dir, self.global_filename]
831 ) + ".tap", 'w')
832 tap_fh.write('1..' + str(len(self._reports_container[key])) + '\n')
833 tap_fh.writelines(self._reports_container[key])
834 tap_fh.close()
835
836
837 def _write_keyval(self):
838 """
839 Write the self._keyval_container key values to a file.
840 """
841 for path in self._keyval_container.keys():
842 tap_fh = open(path + ".tap", 'w')
843 tap_fh.write('1..' + str(self._keyval_container[path][0]) + '\n')
844 tap_fh.writelines(self._keyval_container[path][1])
845 tap_fh.close()
846
847
848 def write(self):
849 """
850 Write the TAP reports to files.
851 """
852 self._write_reports()
853
854
855 def _write_tap_archive(self):
856 """
857 Write a tar archive containing all the TAP files and
858 a meta.yml containing the file names.
859 """
860 os.chdir(self.resultdir)
861 tap_files = []
862 for rel_path, d, files in os.walk('.'):
863 tap_files.extend(["/".join(
864 [rel_path, f]) for f in files if f.endswith('.tap')])
865 meta_yaml = open('meta.yml', 'w')
866 meta_yaml.write('file_order:\n')
867 tap_tar = tarfile.open(self.resultdir + '/tap.tar.gz', 'w:gz')
868 for f in tap_files:
869 meta_yaml.write(" - " + f.lstrip('./') + "\n")
870 tap_tar.add(f)
871 meta_yaml.close()
872 tap_tar.add('meta.yml')
873 tap_tar.close()
874
875
jadmanskida2f1432009-11-06 15:20:09 +0000876class base_job(object):
877 """An abstract base class for the various autotest job classes.
878
jadmanski4afc3672010-04-30 21:22:54 +0000879 @property autodir: The top level autotest directory.
880 @property clientdir: The autotest client directory.
881 @property serverdir: The autotest server directory. [OPTIONAL]
882 @property resultdir: The directory where results should be written out.
883 [WRITABLE]
jadmanskida2f1432009-11-06 15:20:09 +0000884
jadmanski4afc3672010-04-30 21:22:54 +0000885 @property pkgdir: The job packages directory. [WRITABLE]
886 @property tmpdir: The job temporary directory. [WRITABLE]
887 @property testdir: The job test directory. [WRITABLE]
888 @property site_testdir: The job site test directory. [WRITABLE]
jadmanskida2f1432009-11-06 15:20:09 +0000889
jadmanski4afc3672010-04-30 21:22:54 +0000890 @property bindir: The client bin/ directory.
jadmanski4afc3672010-04-30 21:22:54 +0000891 @property profdir: The client profilers/ directory.
892 @property toolsdir: The client tools/ directory.
jadmanskida2f1432009-11-06 15:20:09 +0000893
jadmanski4afc3672010-04-30 21:22:54 +0000894 @property control: A path to the control file to be executed. [OPTIONAL]
895 @property hosts: A set of all live Host objects currently in use by the
896 job. Code running in the context of a local client can safely assume
897 that this set contains only a single entry.
898 @property machines: A list of the machine names associated with the job.
899 @property user: The user executing the job.
900 @property tag: A tag identifying the job. Often used by the scheduler to
901 give a name of the form NUMBER-USERNAME/HOSTNAME.
Scott Zawalski91493c82013-01-25 16:15:20 -0500902 @property test_retry: The number of times to retry a test if the test did
903 not complete successfully.
jadmanski4afc3672010-04-30 21:22:54 +0000904 @property args: A list of addtional miscellaneous command-line arguments
905 provided when starting the job.
jadmanskida2f1432009-11-06 15:20:09 +0000906
jadmanski4afc3672010-04-30 21:22:54 +0000907 @property automatic_test_tag: A string which, if set, will be automatically
908 added to the test name when running tests.
mblighfc3da5b2010-01-06 18:37:22 +0000909
jadmanski4afc3672010-04-30 21:22:54 +0000910 @property default_profile_only: A boolean indicating the default value of
911 profile_only used by test.execute. [PERSISTENT]
912 @property drop_caches: A boolean indicating if caches should be dropped
913 before each test is executed.
914 @property drop_caches_between_iterations: A boolean indicating if caches
915 should be dropped before each test iteration is executed.
916 @property run_test_cleanup: A boolean indicating if test.cleanup should be
917 run by default after a test completes, if the run_cleanup argument is
918 not specified. [PERSISTENT]
jadmanskida2f1432009-11-06 15:20:09 +0000919
jadmanski4afc3672010-04-30 21:22:54 +0000920 @property num_tests_run: The number of tests run during the job. [OPTIONAL]
921 @property num_tests_failed: The number of tests failed during the job.
922 [OPTIONAL]
jadmanskida2f1432009-11-06 15:20:09 +0000923
jadmanski4afc3672010-04-30 21:22:54 +0000924 @property harness: An instance of the client test harness. Only available
925 in contexts where client test execution happens. [OPTIONAL]
926 @property logging: An instance of the logging manager associated with the
927 job.
928 @property profilers: An instance of the profiler manager associated with
929 the job.
930 @property sysinfo: An instance of the sysinfo object. Only available in
931 contexts where it's possible to collect sysinfo.
932 @property warning_manager: A class for managing which types of WARN
933 messages should be logged and which should be supressed. [OPTIONAL]
934 @property warning_loggers: A set of readable streams that will be monitored
935 for WARN messages to be logged. [OPTIONAL]
jadmanskida2f1432009-11-06 15:20:09 +0000936
937 Abstract methods:
938 _find_base_directories [CLASSMETHOD]
939 Returns the location of autodir, clientdir and serverdir
940
941 _find_resultdir
942 Returns the location of resultdir. Gets a copy of any parameters
943 passed into base_job.__init__. Can return None to indicate that
944 no resultdir is to be used.
jadmanski4afc3672010-04-30 21:22:54 +0000945
946 _get_status_logger
947 Returns a status_logger instance for recording job status logs.
jadmanskida2f1432009-11-06 15:20:09 +0000948 """
949
mblighfc3da5b2010-01-06 18:37:22 +0000950 # capture the dependency on several helper classes with factories
jadmanskida2f1432009-11-06 15:20:09 +0000951 _job_directory = job_directory
mblighfbf73ae2009-12-19 05:22:42 +0000952 _job_state = job_state
jadmanskida2f1432009-11-06 15:20:09 +0000953
954
mblighfc3da5b2010-01-06 18:37:22 +0000955 # all the job directory attributes
956 autodir = _job_directory.property_factory('autodir')
957 clientdir = _job_directory.property_factory('clientdir')
958 serverdir = _job_directory.property_factory('serverdir')
959 resultdir = _job_directory.property_factory('resultdir')
960 pkgdir = _job_directory.property_factory('pkgdir')
961 tmpdir = _job_directory.property_factory('tmpdir')
962 testdir = _job_directory.property_factory('testdir')
963 site_testdir = _job_directory.property_factory('site_testdir')
964 bindir = _job_directory.property_factory('bindir')
mblighfc3da5b2010-01-06 18:37:22 +0000965 profdir = _job_directory.property_factory('profdir')
966 toolsdir = _job_directory.property_factory('toolsdir')
mblighfc3da5b2010-01-06 18:37:22 +0000967
968
969 # all the generic persistent properties
mbligh9de6ed72010-01-11 19:01:10 +0000970 tag = _job_state.property_factory('_state', 'tag', '')
Scott Zawalski91493c82013-01-25 16:15:20 -0500971 test_retry = _job_state.property_factory('_state', 'test_retry', 0)
mblighfc3da5b2010-01-06 18:37:22 +0000972 default_profile_only = _job_state.property_factory(
973 '_state', 'default_profile_only', False)
974 run_test_cleanup = _job_state.property_factory(
975 '_state', 'run_test_cleanup', True)
mblighfc3da5b2010-01-06 18:37:22 +0000976 automatic_test_tag = _job_state.property_factory(
977 '_state', 'automatic_test_tag', None)
978
979 # the use_sequence_number property
980 _sequence_number = _job_state.property_factory(
981 '_state', '_sequence_number', None)
982 def _get_use_sequence_number(self):
983 return bool(self._sequence_number)
984 def _set_use_sequence_number(self, value):
985 if value:
986 self._sequence_number = 1
987 else:
988 self._sequence_number = None
989 use_sequence_number = property(_get_use_sequence_number,
990 _set_use_sequence_number)
991
Dan Shi70647ca2015-07-16 22:52:35 -0700992 # parent job id is passed in from autoserv command line. It's only used in
993 # server job. The property is added here for unittest
994 # (base_job_unittest.py) to be consistent on validating public properties of
995 # a base_job object.
996 parent_job_id = None
mblighfc3da5b2010-01-06 18:37:22 +0000997
jadmanskida2f1432009-11-06 15:20:09 +0000998 def __init__(self, *args, **dargs):
999 # initialize the base directories, all others are relative to these
1000 autodir, clientdir, serverdir = self._find_base_directories()
1001 self._autodir = self._job_directory(autodir)
1002 self._clientdir = self._job_directory(clientdir)
Scott Zawalski91493c82013-01-25 16:15:20 -05001003 # TODO(scottz): crosbug.com/38259, needed to pass unittests for now.
1004 self.label = None
jadmanskida2f1432009-11-06 15:20:09 +00001005 if serverdir:
1006 self._serverdir = self._job_directory(serverdir)
1007 else:
1008 self._serverdir = None
1009
1010 # initialize all the other directories relative to the base ones
1011 self._initialize_dir_properties()
1012 self._resultdir = self._job_directory(
1013 self._find_resultdir(*args, **dargs), True)
1014 self._execution_contexts = []
1015
mblighfbf73ae2009-12-19 05:22:42 +00001016 # initialize all the job state
1017 self._state = self._job_state()
1018
Eric Li861b2d52011-02-04 14:50:35 -08001019 # initialize tap reporting
1020 if dargs.has_key('options'):
1021 self._tap = self._tap_init(dargs['options'].tap_report)
1022 else:
1023 self._tap = self._tap_init(False)
jadmanskida2f1432009-11-06 15:20:09 +00001024
1025 @classmethod
1026 def _find_base_directories(cls):
1027 raise NotImplementedError()
1028
1029
1030 def _initialize_dir_properties(self):
1031 """
1032 Initializes all the secondary self.*dir properties. Requires autodir,
1033 clientdir and serverdir to already be initialized.
1034 """
1035 # create some stubs for use as shortcuts
1036 def readonly_dir(*args):
1037 return self._job_directory(os.path.join(*args))
1038 def readwrite_dir(*args):
1039 return self._job_directory(os.path.join(*args), True)
1040
1041 # various client-specific directories
1042 self._bindir = readonly_dir(self.clientdir, 'bin')
jadmanskida2f1432009-11-06 15:20:09 +00001043 self._profdir = readonly_dir(self.clientdir, 'profilers')
1044 self._pkgdir = readwrite_dir(self.clientdir, 'packages')
1045 self._toolsdir = readonly_dir(self.clientdir, 'tools')
1046
1047 # directories which are in serverdir on a server, clientdir on a client
Fang Dengd9a056f2013-10-29 11:31:27 -07001048 # tmp tests, and site_tests need to be read_write for client, but only
1049 # read for server.
jadmanskida2f1432009-11-06 15:20:09 +00001050 if self.serverdir:
1051 root = self.serverdir
Aviv Keshet36bf74a2013-08-15 16:09:03 -07001052 r_or_rw_dir = readonly_dir
jadmanskida2f1432009-11-06 15:20:09 +00001053 else:
1054 root = self.clientdir
Aviv Keshet36bf74a2013-08-15 16:09:03 -07001055 r_or_rw_dir = readwrite_dir
Aviv Keshet36bf74a2013-08-15 16:09:03 -07001056 self._testdir = r_or_rw_dir(root, 'tests')
1057 self._site_testdir = r_or_rw_dir(root, 'site_tests')
jadmanskida2f1432009-11-06 15:20:09 +00001058
1059 # various server-specific directories
1060 if self.serverdir:
Fang Dengd9a056f2013-10-29 11:31:27 -07001061 self._tmpdir = readwrite_dir(tempfile.gettempdir())
jadmanskida2f1432009-11-06 15:20:09 +00001062 else:
Fang Dengd9a056f2013-10-29 11:31:27 -07001063 self._tmpdir = readwrite_dir(root, 'tmp')
jadmanskida2f1432009-11-06 15:20:09 +00001064
1065
1066 def _find_resultdir(self, *args, **dargs):
1067 raise NotImplementedError()
1068
1069
1070 def push_execution_context(self, resultdir):
1071 """
1072 Save off the current context of the job and change to the given one.
1073
1074 In practice method just changes the resultdir, but it may become more
1075 extensive in the future. The expected use case is for when a child
1076 job needs to be executed in some sort of nested context (for example
1077 the way parallel_simple does). The original context can be restored
1078 with a pop_execution_context call.
1079
jadmanski4afc3672010-04-30 21:22:54 +00001080 @param resultdir: The new resultdir, relative to the current one.
jadmanskida2f1432009-11-06 15:20:09 +00001081 """
1082 new_dir = self._job_directory(
1083 os.path.join(self.resultdir, resultdir), True)
1084 self._execution_contexts.append(self._resultdir)
1085 self._resultdir = new_dir
1086
1087
1088 def pop_execution_context(self):
1089 """
1090 Reverse the effects of the previous push_execution_context call.
1091
jadmanski4afc3672010-04-30 21:22:54 +00001092 @raise IndexError: raised when the stack of contexts is empty.
jadmanskida2f1432009-11-06 15:20:09 +00001093 """
1094 if not self._execution_contexts:
1095 raise IndexError('No old execution context to restore')
1096 self._resultdir = self._execution_contexts.pop()
mblighfbf73ae2009-12-19 05:22:42 +00001097
1098
1099 def get_state(self, name, default=_job_state.NO_DEFAULT):
1100 """Returns the value associated with a particular name.
1101
jadmanski4afc3672010-04-30 21:22:54 +00001102 @param name: The name the value was saved with.
1103 @param default: A default value to return if no state is currently
mblighfbf73ae2009-12-19 05:22:42 +00001104 associated with var.
1105
jadmanski4afc3672010-04-30 21:22:54 +00001106 @return: A deep copy of the value associated with name. Note that this
mblighfbf73ae2009-12-19 05:22:42 +00001107 explicitly returns a deep copy to avoid problems with mutable
1108 values; mutations are not persisted or shared.
jadmanski4afc3672010-04-30 21:22:54 +00001109 @raise KeyError: raised when no state is associated with var and a
mblighfbf73ae2009-12-19 05:22:42 +00001110 default value is not provided.
1111 """
1112 try:
1113 return self._state.get('public', name, default=default)
1114 except KeyError:
1115 raise KeyError(name)
1116
1117
1118 def set_state(self, name, value):
1119 """Saves the value given with the provided name.
1120
jadmanski4afc3672010-04-30 21:22:54 +00001121 @param name: The name the value should be saved with.
1122 @param value: The value to save.
mblighfbf73ae2009-12-19 05:22:42 +00001123 """
1124 self._state.set('public', name, value)
1125
1126
mblighfc3da5b2010-01-06 18:37:22 +00001127 def _build_tagged_test_name(self, testname, dargs):
1128 """Builds the fully tagged testname and subdirectory for job.run_test.
mblighfbf73ae2009-12-19 05:22:42 +00001129
jadmanski4afc3672010-04-30 21:22:54 +00001130 @param testname: The base name of the test
1131 @param dargs: The ** arguments passed to run_test. And arguments
mblighfc3da5b2010-01-06 18:37:22 +00001132 consumed by this method will be removed from the dictionary.
mblighfbf73ae2009-12-19 05:22:42 +00001133
jadmanski4afc3672010-04-30 21:22:54 +00001134 @return: A 3-tuple of the full name of the test, the subdirectory it
mblighfc3da5b2010-01-06 18:37:22 +00001135 should be stored in, and the full tag of the subdir.
mblighfbf73ae2009-12-19 05:22:42 +00001136 """
mblighfc3da5b2010-01-06 18:37:22 +00001137 tag_parts = []
1138
1139 # build up the parts of the tag used for the test name
Dale Curtis74a314b2011-06-23 14:55:46 -07001140 master_testpath = dargs.get('master_testpath', "")
mblighfc3da5b2010-01-06 18:37:22 +00001141 base_tag = dargs.pop('tag', None)
1142 if base_tag:
1143 tag_parts.append(str(base_tag))
1144 if self.use_sequence_number:
1145 tag_parts.append('_%02d_' % self._sequence_number)
1146 self._sequence_number += 1
1147 if self.automatic_test_tag:
1148 tag_parts.append(self.automatic_test_tag)
1149 full_testname = '.'.join([testname] + tag_parts)
1150
1151 # build up the subdir and tag as well
1152 subdir_tag = dargs.pop('subdir_tag', None)
1153 if subdir_tag:
1154 tag_parts.append(subdir_tag)
1155 subdir = '.'.join([testname] + tag_parts)
Dale Curtis74a314b2011-06-23 14:55:46 -07001156 subdir = os.path.join(master_testpath, subdir)
mblighfc3da5b2010-01-06 18:37:22 +00001157 tag = '.'.join(tag_parts)
1158
1159 return full_testname, subdir, tag
mblighfbf73ae2009-12-19 05:22:42 +00001160
1161
mblighfc3da5b2010-01-06 18:37:22 +00001162 def _make_test_outputdir(self, subdir):
1163 """Creates an output directory for a test to run it.
mblighfbf73ae2009-12-19 05:22:42 +00001164
jadmanski4afc3672010-04-30 21:22:54 +00001165 @param subdir: The subdirectory of the test. Generally computed by
mblighfc3da5b2010-01-06 18:37:22 +00001166 _build_tagged_test_name.
1167
jadmanski4afc3672010-04-30 21:22:54 +00001168 @return: A job_directory instance corresponding to the outputdir of
mblighfc3da5b2010-01-06 18:37:22 +00001169 the test.
jadmanski4afc3672010-04-30 21:22:54 +00001170 @raise TestError: If the output directory is invalid.
mblighfbf73ae2009-12-19 05:22:42 +00001171 """
mblighfc3da5b2010-01-06 18:37:22 +00001172 # explicitly check that this subdirectory is new
1173 path = os.path.join(self.resultdir, subdir)
1174 if os.path.exists(path):
1175 msg = ('%s already exists; multiple tests cannot run with the '
1176 'same subdirectory' % subdir)
1177 raise error.TestError(msg)
mblighfbf73ae2009-12-19 05:22:42 +00001178
mblighfc3da5b2010-01-06 18:37:22 +00001179 # create the outputdir and raise a TestError if it isn't valid
1180 try:
1181 outputdir = self._job_directory(path, True)
1182 return outputdir
1183 except self._job_directory.JobDirectoryException, e:
1184 logging.exception('%s directory creation failed with %s',
1185 subdir, e)
1186 raise error.TestError('%s directory creation failed' % subdir)
jadmanski4afc3672010-04-30 21:22:54 +00001187
Eric Li861b2d52011-02-04 14:50:35 -08001188 def _tap_init(self, enable):
1189 """Initialize TAP reporting
1190 """
1191 return TAPReport(enable, resultdir=self.resultdir)
1192
jadmanski4afc3672010-04-30 21:22:54 +00001193
1194 def record(self, status_code, subdir, operation, status='',
1195 optional_fields=None):
1196 """Record a job-level status event.
1197
1198 Logs an event noteworthy to the Autotest job as a whole. Messages will
1199 be written into a global status log file, as well as a subdir-local
1200 status log file (if subdir is specified).
1201
1202 @param status_code: A string status code describing the type of status
1203 entry being recorded. It must pass log.is_valid_status to be
1204 considered valid.
1205 @param subdir: A specific results subdirectory this also applies to, or
1206 None. If not None the subdirectory must exist.
1207 @param operation: A string describing the operation that was run.
1208 @param status: An optional human-readable message describing the status
1209 entry, for example an error message or "completed successfully".
1210 @param optional_fields: An optional dictionary of addtional named fields
1211 to be included with the status message. Every time timestamp and
1212 localtime entries are generated with the current time and added
1213 to this dictionary.
1214 """
1215 entry = status_log_entry(status_code, subdir, operation, status,
1216 optional_fields)
jadmanski2a89dac2010-06-11 14:32:58 +00001217 self.record_entry(entry)
1218
1219
1220 def record_entry(self, entry, log_in_subdir=True):
1221 """Record a job-level status event, using a status_log_entry.
1222
1223 This is the same as self.record but using an existing status log
1224 entry object rather than constructing one for you.
1225
1226 @param entry: A status_log_entry object
1227 @param log_in_subdir: A boolean that indicates (when true) that subdir
1228 logs should be written into the subdirectory status log file.
1229 """
1230 self._get_status_logger().record_entry(entry, log_in_subdir)