blob: b8626aa1273fde13a1c5b66fd0fb2eef1a7bf9fb [file] [log] [blame]
Sergei Trofimov4e6afe92015-10-09 09:30:04 +01001# Copyright 2013-2015 ARM Limited
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14#
15
16
17"""
18Miscellaneous functions that don't fit anywhere else.
19
20"""
21from __future__ import division
22import os
23import sys
24import re
25import string
26import threading
27import signal
28import subprocess
29import pkgutil
30import logging
31import random
Sergei Trofimov6d854fd2016-09-06 09:57:58 +010032import ctypes
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010033from operator import itemgetter
34from itertools import groupby
35from functools import partial
36
Michele Di Giorgio539e9b32016-06-22 17:54:59 +010037import wrapt
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010038
Sergei Trofimov28891a82017-02-08 11:21:06 +000039from devlib.exception import HostError, TimeoutError
Sergei Trofimova9265032017-02-08 11:14:40 +000040
41
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010042# ABI --> architectures list
43ABI_MAP = {
44 'armeabi': ['armeabi', 'armv7', 'armv7l', 'armv7el', 'armv7lh'],
45 'arm64': ['arm64', 'armv8', 'arm64-v8a', 'aarch64'],
46}
47
48# Vendor ID --> CPU part ID --> CPU variant ID --> Core Name
49# None means variant is not used.
50CPU_PART_MAP = {
51 0x41: { # ARM
52 0x926: {None: 'ARM926'},
53 0x946: {None: 'ARM946'},
54 0x966: {None: 'ARM966'},
55 0xb02: {None: 'ARM11MPCore'},
56 0xb36: {None: 'ARM1136'},
57 0xb56: {None: 'ARM1156'},
58 0xb76: {None: 'ARM1176'},
59 0xc05: {None: 'A5'},
60 0xc07: {None: 'A7'},
61 0xc08: {None: 'A8'},
62 0xc09: {None: 'A9'},
Sergei Trofimov08b36e72016-09-06 16:01:09 +010063 0xc0e: {None: 'A17'},
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010064 0xc0f: {None: 'A15'},
65 0xc14: {None: 'R4'},
66 0xc15: {None: 'R5'},
Sergei Trofimov08b36e72016-09-06 16:01:09 +010067 0xc17: {None: 'R7'},
68 0xc18: {None: 'R8'},
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010069 0xc20: {None: 'M0'},
Sergei Trofimov08b36e72016-09-06 16:01:09 +010070 0xc60: {None: 'M0+'},
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010071 0xc21: {None: 'M1'},
72 0xc23: {None: 'M3'},
73 0xc24: {None: 'M4'},
74 0xc27: {None: 'M7'},
Sergei Trofimov08b36e72016-09-06 16:01:09 +010075 0xd01: {None: 'A32'},
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010076 0xd03: {None: 'A53'},
Sergei Trofimov08b36e72016-09-06 16:01:09 +010077 0xd04: {None: 'A35'},
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010078 0xd07: {None: 'A57'},
79 0xd08: {None: 'A72'},
Sergei Trofimov08b36e72016-09-06 16:01:09 +010080 0xd09: {None: 'A73'},
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010081 },
82 0x4e: { # Nvidia
83 0x0: {None: 'Denver'},
84 },
85 0x51: { # Qualcomm
86 0x02d: {None: 'Scorpion'},
87 0x04d: {None: 'MSM8960'},
88 0x06f: { # Krait
89 0x2: 'Krait400',
90 0x3: 'Krait450',
91 },
Sergei Trofimov5d492ca2016-11-29 13:09:46 +000092 0x205: {0x1: 'KryoSilver'},
93 0x211: {0x1: 'KryoGold'},
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010094 },
95 0x56: { # Marvell
96 0x131: {
97 0x2: 'Feroceon 88F6281',
98 }
99 },
100}
101
102
103def get_cpu_name(implementer, part, variant):
104 part_data = CPU_PART_MAP.get(implementer, {}).get(part, {})
105 if None in part_data: # variant does not determine core Name for this vendor
106 name = part_data[None]
107 else:
108 name = part_data.get(variant)
109 return name
110
111
112def preexec_function():
113 # Ignore the SIGINT signal by setting the handler to the standard
114 # signal handler SIG_IGN.
115 signal.signal(signal.SIGINT, signal.SIG_IGN)
116 # Change process group in case we have to kill the subprocess and all of
117 # its children later.
118 # TODO: this is Unix-specific; would be good to find an OS-agnostic way
119 # to do this in case we wanna port WA to Windows.
120 os.setpgrp()
121
122
123check_output_logger = logging.getLogger('check_output')
124
125
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100126def check_output(command, timeout=None, ignore=None, inputtext=None, **kwargs):
127 """This is a version of subprocess.check_output that adds a timeout parameter to kill
128 the subprocess if it does not return within the specified time."""
129 # pylint: disable=too-many-branches
130 if ignore is None:
131 ignore = []
132 elif isinstance(ignore, int):
133 ignore = [ignore]
134 elif not isinstance(ignore, list) and ignore != 'all':
135 message = 'Invalid value for ignore parameter: "{}"; must be an int or a list'
136 raise ValueError(message.format(ignore))
137 if 'stdout' in kwargs:
138 raise ValueError('stdout argument not allowed, it will be overridden.')
139
140 def callback(pid):
141 try:
142 check_output_logger.debug('{} timed out; sending SIGKILL'.format(pid))
143 os.killpg(pid, signal.SIGKILL)
144 except OSError:
145 pass # process may have already terminated.
146
147 process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
148 stdin=subprocess.PIPE,
149 preexec_fn=preexec_function, **kwargs)
150
151 if timeout:
152 timer = threading.Timer(timeout, callback, [process.pid, ])
153 timer.start()
154
155 try:
156 output, error = process.communicate(inputtext)
157 finally:
158 if timeout:
159 timer.cancel()
160
161 retcode = process.poll()
162 if retcode:
163 if retcode == -9: # killed, assume due to timeout callback
164 raise TimeoutError(command, output='\n'.join([output, error]))
165 elif ignore != 'all' and retcode not in ignore:
166 raise subprocess.CalledProcessError(retcode, command, output='\n'.join([output, error]))
167 return output, error
168
169
170def walk_modules(path):
171 """
172 Given package name, return a list of all modules (including submodules, etc)
173 in that package.
174
Sergei Trofimov28891a82017-02-08 11:21:06 +0000175 :raises HostError: if an exception is raised while trying to import one of the
176 modules under ``path``. The exception will have addtional
177 attributes set: ``module`` will be set to the qualified name
178 of the originating module, and ``orig_exc`` will contain
179 the original exception.
180
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100181 """
Sergei Trofimov28891a82017-02-08 11:21:06 +0000182
183 def __try_import(path):
184 try:
185 return __import__(path, {}, {}, [''])
186 except Exception as e:
187 he = HostError('Could not load {}: {}'.format(path, str(e)))
188 he.module = path
Sergei Trofimov1dd69502017-02-23 09:13:05 +0000189 he.exc_info = sys.exc_info()
Sergei Trofimov28891a82017-02-08 11:21:06 +0000190 he.orig_exc = e
191 raise he
192
193 root_mod = __try_import(path)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100194 mods = [root_mod]
Sergei Trofimovfef7c162017-02-23 13:15:27 +0000195 if not hasattr(root_mod, '__path__'):
196 # root is a module not a package -- nothing to walk
197 return mods
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100198 for _, name, ispkg in pkgutil.iter_modules(root_mod.__path__):
199 submod_path = '.'.join([path, name])
200 if ispkg:
201 mods.extend(walk_modules(submod_path))
202 else:
Sergei Trofimov28891a82017-02-08 11:21:06 +0000203 submod = __try_import(submod_path)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100204 mods.append(submod)
205 return mods
206
207
208def ensure_directory_exists(dirpath):
209 """A filter for directory paths to ensure they exist."""
210 if not os.path.isdir(dirpath):
211 os.makedirs(dirpath)
212 return dirpath
213
214
215def ensure_file_directory_exists(filepath):
216 """
217 A filter for file paths to ensure the directory of the
218 file exists and the file can be created there. The file
219 itself is *not* going to be created if it doesn't already
220 exist.
221
222 """
223 ensure_directory_exists(os.path.dirname(filepath))
224 return filepath
225
226
227def merge_dicts(*args, **kwargs):
228 if not len(args) >= 2:
229 raise ValueError('Must specify at least two dicts to merge.')
230 func = partial(_merge_two_dicts, **kwargs)
231 return reduce(func, args)
232
233
234def _merge_two_dicts(base, other, list_duplicates='all', match_types=False, # pylint: disable=R0912,R0914
235 dict_type=dict, should_normalize=True, should_merge_lists=True):
236 """Merge dicts normalizing their keys."""
237 merged = dict_type()
238 base_keys = base.keys()
239 other_keys = other.keys()
240 norm = normalize if should_normalize else lambda x, y: x
241
242 base_only = []
243 other_only = []
244 both = []
245 union = []
246 for k in base_keys:
247 if k in other_keys:
248 both.append(k)
249 else:
250 base_only.append(k)
251 union.append(k)
252 for k in other_keys:
253 if k in base_keys:
254 union.append(k)
255 else:
256 union.append(k)
257 other_only.append(k)
258
259 for k in union:
260 if k in base_only:
261 merged[k] = norm(base[k], dict_type)
262 elif k in other_only:
263 merged[k] = norm(other[k], dict_type)
264 elif k in both:
265 base_value = base[k]
266 other_value = other[k]
267 base_type = type(base_value)
268 other_type = type(other_value)
269 if (match_types and (base_type != other_type) and
270 (base_value is not None) and (other_value is not None)):
271 raise ValueError('Type mismatch for {} got {} ({}) and {} ({})'.format(k, base_value, base_type,
272 other_value, other_type))
273 if isinstance(base_value, dict):
274 merged[k] = _merge_two_dicts(base_value, other_value, list_duplicates, match_types, dict_type)
275 elif isinstance(base_value, list):
276 if should_merge_lists:
277 merged[k] = _merge_two_lists(base_value, other_value, list_duplicates, dict_type)
278 else:
279 merged[k] = _merge_two_lists([], other_value, list_duplicates, dict_type)
280
281 elif isinstance(base_value, set):
282 merged[k] = norm(base_value.union(other_value), dict_type)
283 else:
284 merged[k] = norm(other_value, dict_type)
285 else: # Should never get here
286 raise AssertionError('Unexpected merge key: {}'.format(k))
287
288 return merged
289
290
291def merge_lists(*args, **kwargs):
292 if not len(args) >= 2:
293 raise ValueError('Must specify at least two lists to merge.')
294 func = partial(_merge_two_lists, **kwargs)
295 return reduce(func, args)
296
297
298def _merge_two_lists(base, other, duplicates='all', dict_type=dict): # pylint: disable=R0912
299 """
300 Merge lists, normalizing their entries.
301
302 parameters:
303
304 :base, other: the two lists to be merged. ``other`` will be merged on
305 top of base.
306 :duplicates: Indicates the strategy of handling entries that appear
307 in both lists. ``all`` will keep occurrences from both
308 lists; ``first`` will only keep occurrences from
309 ``base``; ``last`` will only keep occurrences from
310 ``other``;
311
312 .. note:: duplicate entries that appear in the *same* list
313 will never be removed.
314
315 """
316 if not isiterable(base):
317 base = [base]
318 if not isiterable(other):
319 other = [other]
320 if duplicates == 'all':
321 merged_list = []
322 for v in normalize(base, dict_type) + normalize(other, dict_type):
323 if not _check_remove_item(merged_list, v):
324 merged_list.append(v)
325 return merged_list
326 elif duplicates == 'first':
327 base_norm = normalize(base, dict_type)
328 merged_list = normalize(base, dict_type)
329 for v in base_norm:
330 _check_remove_item(merged_list, v)
331 for v in normalize(other, dict_type):
332 if not _check_remove_item(merged_list, v):
333 if v not in base_norm:
334 merged_list.append(v) # pylint: disable=no-member
335 return merged_list
336 elif duplicates == 'last':
337 other_norm = normalize(other, dict_type)
338 merged_list = []
339 for v in normalize(base, dict_type):
340 if not _check_remove_item(merged_list, v):
341 if v not in other_norm:
342 merged_list.append(v)
343 for v in other_norm:
344 if not _check_remove_item(merged_list, v):
345 merged_list.append(v)
346 return merged_list
347 else:
348 raise ValueError('Unexpected value for list duplicates argument: {}. '.format(duplicates) +
349 'Must be in {"all", "first", "last"}.')
350
351
352def _check_remove_item(the_list, item):
353 """Helper function for merge_lists that implements checking wether an items
354 should be removed from the list and doing so if needed. Returns ``True`` if
355 the item has been removed and ``False`` otherwise."""
356 if not isinstance(item, basestring):
357 return False
358 if not item.startswith('~'):
359 return False
360 actual_item = item[1:]
361 if actual_item in the_list:
362 del the_list[the_list.index(actual_item)]
363 return True
364
365
366def normalize(value, dict_type=dict):
367 """Normalize values. Recursively normalizes dict keys to be lower case,
368 no surrounding whitespace, underscore-delimited strings."""
369 if isinstance(value, dict):
370 normalized = dict_type()
371 for k, v in value.iteritems():
372 key = k.strip().lower().replace(' ', '_')
373 normalized[key] = normalize(v, dict_type)
374 return normalized
375 elif isinstance(value, list):
376 return [normalize(v, dict_type) for v in value]
377 elif isinstance(value, tuple):
378 return tuple([normalize(v, dict_type) for v in value])
379 else:
380 return value
381
382
383def convert_new_lines(text):
384 """ Convert new lines to a common format. """
385 return text.replace('\r\n', '\n').replace('\r', '\n')
386
387
388def escape_quotes(text):
389 """Escape quotes, and escaped quotes, in the specified text."""
390 return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\\\'').replace('\"', '\\\"')
391
392
393def escape_single_quotes(text):
394 """Escape single quotes, and escaped single quotes, in the specified text."""
395 return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\'\\\'\'')
396
397
398def escape_double_quotes(text):
399 """Escape double quotes, and escaped double quotes, in the specified text."""
400 return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\"', '\\\"')
401
402
403def getch(count=1):
404 """Read ``count`` characters from standard input."""
405 if os.name == 'nt':
406 import msvcrt # pylint: disable=F0401
407 return ''.join([msvcrt.getch() for _ in xrange(count)])
408 else: # assume Unix
409 import tty # NOQA
410 import termios # NOQA
411 fd = sys.stdin.fileno()
412 old_settings = termios.tcgetattr(fd)
413 try:
414 tty.setraw(sys.stdin.fileno())
415 ch = sys.stdin.read(count)
416 finally:
417 termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
418 return ch
419
420
421def isiterable(obj):
422 """Returns ``True`` if the specified object is iterable and
423 *is not a string type*, ``False`` otherwise."""
424 return hasattr(obj, '__iter__') and not isinstance(obj, basestring)
425
426
427def as_relative(path):
428 """Convert path to relative by stripping away the leading '/' on UNIX or
429 the equivant on other platforms."""
430 path = os.path.splitdrive(path)[1]
431 return path.lstrip(os.sep)
432
433
434def get_cpu_mask(cores):
435 """Return a string with the hex for the cpu mask for the specified core numbers."""
436 mask = 0
437 for i in cores:
438 mask |= 1 << i
439 return '0x{0:x}'.format(mask)
440
441
442def which(name):
443 """Platform-independent version of UNIX which utility."""
444 if os.name == 'nt':
445 paths = os.getenv('PATH').split(os.pathsep)
446 exts = os.getenv('PATHEXT').split(os.pathsep)
447 for path in paths:
448 testpath = os.path.join(path, name)
449 if os.path.isfile(testpath):
450 return testpath
451 for ext in exts:
452 testpathext = testpath + ext
453 if os.path.isfile(testpathext):
454 return testpathext
455 return None
456 else: # assume UNIX-like
457 try:
458 return check_output(['which', name])[0].strip() # pylint: disable=E1103
459 except subprocess.CalledProcessError:
460 return None
461
462
463_bash_color_regex = re.compile('\x1b\\[[0-9;]+m')
464
465
466def strip_bash_colors(text):
467 return _bash_color_regex.sub('', text)
468
469
470def get_random_string(length):
471 """Returns a random ASCII string of the specified length)."""
472 return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
473
474
475class LoadSyntaxError(Exception):
476
477 def __init__(self, message, filepath, lineno):
478 super(LoadSyntaxError, self).__init__(message)
479 self.filepath = filepath
480 self.lineno = lineno
481
482 def __str__(self):
483 message = 'Syntax Error in {}, line {}:\n\t{}'
484 return message.format(self.filepath, self.lineno, self.message)
485
486
487RAND_MOD_NAME_LEN = 30
488BAD_CHARS = string.punctuation + string.whitespace
489TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
490
491
492def to_identifier(text):
493 """Converts text to a valid Python identifier by replacing all
494 whitespace and punctuation."""
495 return re.sub('_+', '_', text.translate(TRANS_TABLE))
496
497
498def unique(alist):
499 """
500 Returns a list containing only unique elements from the input list (but preserves
501 order, unlike sets).
502
503 """
504 result = []
505 for item in alist:
506 if item not in result:
507 result.append(item)
508 return result
509
510
511def ranges_to_list(ranges_string):
512 """Converts a sysfs-style ranges string, e.g. ``"0,2-4"``, into a list ,e.g ``[0,2,3,4]``"""
513 values = []
514 for rg in ranges_string.split(','):
515 if '-' in rg:
516 first, last = map(int, rg.split('-'))
517 values.extend(xrange(first, last + 1))
518 else:
519 values.append(int(rg))
520 return values
521
522
523def list_to_ranges(values):
524 """Converts a list, e.g ``[0,2,3,4]``, into a sysfs-style ranges string, e.g. ``"0,2-4"``"""
525 range_groups = []
526 for _, g in groupby(enumerate(values), lambda (i, x): i - x):
527 range_groups.append(map(itemgetter(1), g))
528 range_strings = []
529 for group in range_groups:
530 if len(group) == 1:
531 range_strings.append(str(group[0]))
532 else:
533 range_strings.append('{}-{}'.format(group[0], group[-1]))
534 return ','.join(range_strings)
535
536
537def list_to_mask(values, base=0x0):
538 """Converts the specified list of integer values into
539 a bit mask for those values. Optinally, the list can be
540 applied to an existing mask."""
541 for v in values:
542 base |= (1 << v)
543 return base
544
545
546def mask_to_list(mask):
547 """Converts the specfied integer bitmask into a list of
548 indexes of bits that are set in the mask."""
549 size = len(bin(mask)) - 2 # because of "0b"
550 return [size - i - 1 for i in xrange(size)
551 if mask & (1 << size - i - 1)]
552
553
554__memo_cache = {}
555
556
Sergei Trofimovd7aac2b2016-09-02 13:22:09 +0100557def reset_memo_cache():
558 __memo_cache.clear()
559
560
Sergei Trofimov6d854fd2016-09-06 09:57:58 +0100561def __get_memo_id(obj):
562 """
563 An object's id() may be re-used after an object is freed, so it's not
564 sufficiently unique to identify params for the memo cache (two different
565 params may end up with the same id). this attempts to generate a more unique
566 ID string.
567 """
568 obj_id = id(obj)
Sergei Trofimovcae239d2016-10-06 08:44:42 +0100569 try:
Sergei Trofimov09ec88e2016-10-04 17:57:46 +0100570 return '{}/{}'.format(obj_id, hash(obj))
Sergei Trofimovcae239d2016-10-06 08:44:42 +0100571 except TypeError: # obj is not hashable
Sergei Trofimov09ec88e2016-10-04 17:57:46 +0100572 obj_pyobj = ctypes.cast(obj_id, ctypes.py_object)
573 # TODO: Note: there is still a possibility of a clash here. If Two
574 # different objects get assigned the same ID, an are large and are
575 # identical in the first thirty two bytes. This shouldn't be much of an
576 # issue in the current application of memoizing Target calls, as it's very
577 # unlikely that a target will get passed large params; but may cause
578 # problems in other applications, e.g. when memoizing results of operations
579 # on large arrays. I can't really think of a good way around that apart
580 # form, e.g., md5 hashing the entire raw object, which will have an
581 # undesirable impact on performance.
582 num_bytes = min(ctypes.sizeof(obj_pyobj), 32)
583 obj_bytes = ctypes.string_at(ctypes.addressof(obj_pyobj), num_bytes)
584 return '{}/{}'.format(obj_id, obj_bytes)
Sergei Trofimov6d854fd2016-09-06 09:57:58 +0100585
586
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100587@wrapt.decorator
588def memoized(wrapped, instance, args, kwargs):
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100589 """A decorator for memoizing functions and methods."""
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100590 func_id = repr(wrapped)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100591
592 def memoize_wrapper(*args, **kwargs):
Sergei Trofimov6d854fd2016-09-06 09:57:58 +0100593 id_string = func_id + ','.join([__get_memo_id(a) for a in args])
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100594 id_string += ','.join('{}={}'.format(k, v)
595 for k, v in kwargs.iteritems())
596 if id_string not in __memo_cache:
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100597 __memo_cache[id_string] = wrapped(*args, **kwargs)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100598 return __memo_cache[id_string]
599
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100600 return memoize_wrapper(*args, **kwargs)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100601