blob: 95c93e10eee944b8687681eed9ea7e260d921596 [file] [log] [blame]
Sergei Trofimov4e6afe92015-10-09 09:30:04 +01001# Copyright 2013-2015 ARM Limited
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14#
15
16
17"""
18Miscellaneous functions that don't fit anywhere else.
19
20"""
21from __future__ import division
22import os
23import sys
24import re
25import string
26import threading
27import signal
28import subprocess
29import pkgutil
30import logging
31import random
32from operator import itemgetter
33from itertools import groupby
34from functools import partial
35
Michele Di Giorgio539e9b32016-06-22 17:54:59 +010036import wrapt
Sergei Trofimov4e6afe92015-10-09 09:30:04 +010037
38# ABI --> architectures list
39ABI_MAP = {
40 'armeabi': ['armeabi', 'armv7', 'armv7l', 'armv7el', 'armv7lh'],
41 'arm64': ['arm64', 'armv8', 'arm64-v8a', 'aarch64'],
42}
43
44# Vendor ID --> CPU part ID --> CPU variant ID --> Core Name
45# None means variant is not used.
46CPU_PART_MAP = {
47 0x41: { # ARM
48 0x926: {None: 'ARM926'},
49 0x946: {None: 'ARM946'},
50 0x966: {None: 'ARM966'},
51 0xb02: {None: 'ARM11MPCore'},
52 0xb36: {None: 'ARM1136'},
53 0xb56: {None: 'ARM1156'},
54 0xb76: {None: 'ARM1176'},
55 0xc05: {None: 'A5'},
56 0xc07: {None: 'A7'},
57 0xc08: {None: 'A8'},
58 0xc09: {None: 'A9'},
59 0xc0f: {None: 'A15'},
60 0xc14: {None: 'R4'},
61 0xc15: {None: 'R5'},
62 0xc20: {None: 'M0'},
63 0xc21: {None: 'M1'},
64 0xc23: {None: 'M3'},
65 0xc24: {None: 'M4'},
66 0xc27: {None: 'M7'},
67 0xd03: {None: 'A53'},
68 0xd07: {None: 'A57'},
69 0xd08: {None: 'A72'},
70 },
71 0x4e: { # Nvidia
72 0x0: {None: 'Denver'},
73 },
74 0x51: { # Qualcomm
75 0x02d: {None: 'Scorpion'},
76 0x04d: {None: 'MSM8960'},
77 0x06f: { # Krait
78 0x2: 'Krait400',
79 0x3: 'Krait450',
80 },
81 },
82 0x56: { # Marvell
83 0x131: {
84 0x2: 'Feroceon 88F6281',
85 }
86 },
87}
88
89
90def get_cpu_name(implementer, part, variant):
91 part_data = CPU_PART_MAP.get(implementer, {}).get(part, {})
92 if None in part_data: # variant does not determine core Name for this vendor
93 name = part_data[None]
94 else:
95 name = part_data.get(variant)
96 return name
97
98
99def preexec_function():
100 # Ignore the SIGINT signal by setting the handler to the standard
101 # signal handler SIG_IGN.
102 signal.signal(signal.SIGINT, signal.SIG_IGN)
103 # Change process group in case we have to kill the subprocess and all of
104 # its children later.
105 # TODO: this is Unix-specific; would be good to find an OS-agnostic way
106 # to do this in case we wanna port WA to Windows.
107 os.setpgrp()
108
109
110check_output_logger = logging.getLogger('check_output')
111
112
113# Defined here rather than in devlib.exceptions due to module load dependencies
114class TimeoutError(Exception):
115 """Raised when a subprocess command times out. This is basically a ``WAError``-derived version
116 of ``subprocess.CalledProcessError``, the thinking being that while a timeout could be due to
117 programming error (e.g. not setting long enough timers), it is often due to some failure in the
118 environment, and there fore should be classed as a "user error"."""
119
120 def __init__(self, command, output):
121 super(TimeoutError, self).__init__('Timed out: {}'.format(command))
122 self.command = command
123 self.output = output
124
125 def __str__(self):
126 return '\n'.join([self.message, 'OUTPUT:', self.output or ''])
127
128
129def check_output(command, timeout=None, ignore=None, inputtext=None, **kwargs):
130 """This is a version of subprocess.check_output that adds a timeout parameter to kill
131 the subprocess if it does not return within the specified time."""
132 # pylint: disable=too-many-branches
133 if ignore is None:
134 ignore = []
135 elif isinstance(ignore, int):
136 ignore = [ignore]
137 elif not isinstance(ignore, list) and ignore != 'all':
138 message = 'Invalid value for ignore parameter: "{}"; must be an int or a list'
139 raise ValueError(message.format(ignore))
140 if 'stdout' in kwargs:
141 raise ValueError('stdout argument not allowed, it will be overridden.')
142
143 def callback(pid):
144 try:
145 check_output_logger.debug('{} timed out; sending SIGKILL'.format(pid))
146 os.killpg(pid, signal.SIGKILL)
147 except OSError:
148 pass # process may have already terminated.
149
150 process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
151 stdin=subprocess.PIPE,
152 preexec_fn=preexec_function, **kwargs)
153
154 if timeout:
155 timer = threading.Timer(timeout, callback, [process.pid, ])
156 timer.start()
157
158 try:
159 output, error = process.communicate(inputtext)
160 finally:
161 if timeout:
162 timer.cancel()
163
164 retcode = process.poll()
165 if retcode:
166 if retcode == -9: # killed, assume due to timeout callback
167 raise TimeoutError(command, output='\n'.join([output, error]))
168 elif ignore != 'all' and retcode not in ignore:
169 raise subprocess.CalledProcessError(retcode, command, output='\n'.join([output, error]))
170 return output, error
171
172
173def walk_modules(path):
174 """
175 Given package name, return a list of all modules (including submodules, etc)
176 in that package.
177
178 """
179 root_mod = __import__(path, {}, {}, [''])
180 mods = [root_mod]
181 for _, name, ispkg in pkgutil.iter_modules(root_mod.__path__):
182 submod_path = '.'.join([path, name])
183 if ispkg:
184 mods.extend(walk_modules(submod_path))
185 else:
186 submod = __import__(submod_path, {}, {}, [''])
187 mods.append(submod)
188 return mods
189
190
191def ensure_directory_exists(dirpath):
192 """A filter for directory paths to ensure they exist."""
193 if not os.path.isdir(dirpath):
194 os.makedirs(dirpath)
195 return dirpath
196
197
198def ensure_file_directory_exists(filepath):
199 """
200 A filter for file paths to ensure the directory of the
201 file exists and the file can be created there. The file
202 itself is *not* going to be created if it doesn't already
203 exist.
204
205 """
206 ensure_directory_exists(os.path.dirname(filepath))
207 return filepath
208
209
210def merge_dicts(*args, **kwargs):
211 if not len(args) >= 2:
212 raise ValueError('Must specify at least two dicts to merge.')
213 func = partial(_merge_two_dicts, **kwargs)
214 return reduce(func, args)
215
216
217def _merge_two_dicts(base, other, list_duplicates='all', match_types=False, # pylint: disable=R0912,R0914
218 dict_type=dict, should_normalize=True, should_merge_lists=True):
219 """Merge dicts normalizing their keys."""
220 merged = dict_type()
221 base_keys = base.keys()
222 other_keys = other.keys()
223 norm = normalize if should_normalize else lambda x, y: x
224
225 base_only = []
226 other_only = []
227 both = []
228 union = []
229 for k in base_keys:
230 if k in other_keys:
231 both.append(k)
232 else:
233 base_only.append(k)
234 union.append(k)
235 for k in other_keys:
236 if k in base_keys:
237 union.append(k)
238 else:
239 union.append(k)
240 other_only.append(k)
241
242 for k in union:
243 if k in base_only:
244 merged[k] = norm(base[k], dict_type)
245 elif k in other_only:
246 merged[k] = norm(other[k], dict_type)
247 elif k in both:
248 base_value = base[k]
249 other_value = other[k]
250 base_type = type(base_value)
251 other_type = type(other_value)
252 if (match_types and (base_type != other_type) and
253 (base_value is not None) and (other_value is not None)):
254 raise ValueError('Type mismatch for {} got {} ({}) and {} ({})'.format(k, base_value, base_type,
255 other_value, other_type))
256 if isinstance(base_value, dict):
257 merged[k] = _merge_two_dicts(base_value, other_value, list_duplicates, match_types, dict_type)
258 elif isinstance(base_value, list):
259 if should_merge_lists:
260 merged[k] = _merge_two_lists(base_value, other_value, list_duplicates, dict_type)
261 else:
262 merged[k] = _merge_two_lists([], other_value, list_duplicates, dict_type)
263
264 elif isinstance(base_value, set):
265 merged[k] = norm(base_value.union(other_value), dict_type)
266 else:
267 merged[k] = norm(other_value, dict_type)
268 else: # Should never get here
269 raise AssertionError('Unexpected merge key: {}'.format(k))
270
271 return merged
272
273
274def merge_lists(*args, **kwargs):
275 if not len(args) >= 2:
276 raise ValueError('Must specify at least two lists to merge.')
277 func = partial(_merge_two_lists, **kwargs)
278 return reduce(func, args)
279
280
281def _merge_two_lists(base, other, duplicates='all', dict_type=dict): # pylint: disable=R0912
282 """
283 Merge lists, normalizing their entries.
284
285 parameters:
286
287 :base, other: the two lists to be merged. ``other`` will be merged on
288 top of base.
289 :duplicates: Indicates the strategy of handling entries that appear
290 in both lists. ``all`` will keep occurrences from both
291 lists; ``first`` will only keep occurrences from
292 ``base``; ``last`` will only keep occurrences from
293 ``other``;
294
295 .. note:: duplicate entries that appear in the *same* list
296 will never be removed.
297
298 """
299 if not isiterable(base):
300 base = [base]
301 if not isiterable(other):
302 other = [other]
303 if duplicates == 'all':
304 merged_list = []
305 for v in normalize(base, dict_type) + normalize(other, dict_type):
306 if not _check_remove_item(merged_list, v):
307 merged_list.append(v)
308 return merged_list
309 elif duplicates == 'first':
310 base_norm = normalize(base, dict_type)
311 merged_list = normalize(base, dict_type)
312 for v in base_norm:
313 _check_remove_item(merged_list, v)
314 for v in normalize(other, dict_type):
315 if not _check_remove_item(merged_list, v):
316 if v not in base_norm:
317 merged_list.append(v) # pylint: disable=no-member
318 return merged_list
319 elif duplicates == 'last':
320 other_norm = normalize(other, dict_type)
321 merged_list = []
322 for v in normalize(base, dict_type):
323 if not _check_remove_item(merged_list, v):
324 if v not in other_norm:
325 merged_list.append(v)
326 for v in other_norm:
327 if not _check_remove_item(merged_list, v):
328 merged_list.append(v)
329 return merged_list
330 else:
331 raise ValueError('Unexpected value for list duplicates argument: {}. '.format(duplicates) +
332 'Must be in {"all", "first", "last"}.')
333
334
335def _check_remove_item(the_list, item):
336 """Helper function for merge_lists that implements checking wether an items
337 should be removed from the list and doing so if needed. Returns ``True`` if
338 the item has been removed and ``False`` otherwise."""
339 if not isinstance(item, basestring):
340 return False
341 if not item.startswith('~'):
342 return False
343 actual_item = item[1:]
344 if actual_item in the_list:
345 del the_list[the_list.index(actual_item)]
346 return True
347
348
349def normalize(value, dict_type=dict):
350 """Normalize values. Recursively normalizes dict keys to be lower case,
351 no surrounding whitespace, underscore-delimited strings."""
352 if isinstance(value, dict):
353 normalized = dict_type()
354 for k, v in value.iteritems():
355 key = k.strip().lower().replace(' ', '_')
356 normalized[key] = normalize(v, dict_type)
357 return normalized
358 elif isinstance(value, list):
359 return [normalize(v, dict_type) for v in value]
360 elif isinstance(value, tuple):
361 return tuple([normalize(v, dict_type) for v in value])
362 else:
363 return value
364
365
366def convert_new_lines(text):
367 """ Convert new lines to a common format. """
368 return text.replace('\r\n', '\n').replace('\r', '\n')
369
370
371def escape_quotes(text):
372 """Escape quotes, and escaped quotes, in the specified text."""
373 return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\\\'').replace('\"', '\\\"')
374
375
376def escape_single_quotes(text):
377 """Escape single quotes, and escaped single quotes, in the specified text."""
378 return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\'\\\'\'')
379
380
381def escape_double_quotes(text):
382 """Escape double quotes, and escaped double quotes, in the specified text."""
383 return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\"', '\\\"')
384
385
386def getch(count=1):
387 """Read ``count`` characters from standard input."""
388 if os.name == 'nt':
389 import msvcrt # pylint: disable=F0401
390 return ''.join([msvcrt.getch() for _ in xrange(count)])
391 else: # assume Unix
392 import tty # NOQA
393 import termios # NOQA
394 fd = sys.stdin.fileno()
395 old_settings = termios.tcgetattr(fd)
396 try:
397 tty.setraw(sys.stdin.fileno())
398 ch = sys.stdin.read(count)
399 finally:
400 termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
401 return ch
402
403
404def isiterable(obj):
405 """Returns ``True`` if the specified object is iterable and
406 *is not a string type*, ``False`` otherwise."""
407 return hasattr(obj, '__iter__') and not isinstance(obj, basestring)
408
409
410def as_relative(path):
411 """Convert path to relative by stripping away the leading '/' on UNIX or
412 the equivant on other platforms."""
413 path = os.path.splitdrive(path)[1]
414 return path.lstrip(os.sep)
415
416
417def get_cpu_mask(cores):
418 """Return a string with the hex for the cpu mask for the specified core numbers."""
419 mask = 0
420 for i in cores:
421 mask |= 1 << i
422 return '0x{0:x}'.format(mask)
423
424
425def which(name):
426 """Platform-independent version of UNIX which utility."""
427 if os.name == 'nt':
428 paths = os.getenv('PATH').split(os.pathsep)
429 exts = os.getenv('PATHEXT').split(os.pathsep)
430 for path in paths:
431 testpath = os.path.join(path, name)
432 if os.path.isfile(testpath):
433 return testpath
434 for ext in exts:
435 testpathext = testpath + ext
436 if os.path.isfile(testpathext):
437 return testpathext
438 return None
439 else: # assume UNIX-like
440 try:
441 return check_output(['which', name])[0].strip() # pylint: disable=E1103
442 except subprocess.CalledProcessError:
443 return None
444
445
446_bash_color_regex = re.compile('\x1b\\[[0-9;]+m')
447
448
449def strip_bash_colors(text):
450 return _bash_color_regex.sub('', text)
451
452
453def get_random_string(length):
454 """Returns a random ASCII string of the specified length)."""
455 return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
456
457
458class LoadSyntaxError(Exception):
459
460 def __init__(self, message, filepath, lineno):
461 super(LoadSyntaxError, self).__init__(message)
462 self.filepath = filepath
463 self.lineno = lineno
464
465 def __str__(self):
466 message = 'Syntax Error in {}, line {}:\n\t{}'
467 return message.format(self.filepath, self.lineno, self.message)
468
469
470RAND_MOD_NAME_LEN = 30
471BAD_CHARS = string.punctuation + string.whitespace
472TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
473
474
475def to_identifier(text):
476 """Converts text to a valid Python identifier by replacing all
477 whitespace and punctuation."""
478 return re.sub('_+', '_', text.translate(TRANS_TABLE))
479
480
481def unique(alist):
482 """
483 Returns a list containing only unique elements from the input list (but preserves
484 order, unlike sets).
485
486 """
487 result = []
488 for item in alist:
489 if item not in result:
490 result.append(item)
491 return result
492
493
494def ranges_to_list(ranges_string):
495 """Converts a sysfs-style ranges string, e.g. ``"0,2-4"``, into a list ,e.g ``[0,2,3,4]``"""
496 values = []
497 for rg in ranges_string.split(','):
498 if '-' in rg:
499 first, last = map(int, rg.split('-'))
500 values.extend(xrange(first, last + 1))
501 else:
502 values.append(int(rg))
503 return values
504
505
506def list_to_ranges(values):
507 """Converts a list, e.g ``[0,2,3,4]``, into a sysfs-style ranges string, e.g. ``"0,2-4"``"""
508 range_groups = []
509 for _, g in groupby(enumerate(values), lambda (i, x): i - x):
510 range_groups.append(map(itemgetter(1), g))
511 range_strings = []
512 for group in range_groups:
513 if len(group) == 1:
514 range_strings.append(str(group[0]))
515 else:
516 range_strings.append('{}-{}'.format(group[0], group[-1]))
517 return ','.join(range_strings)
518
519
520def list_to_mask(values, base=0x0):
521 """Converts the specified list of integer values into
522 a bit mask for those values. Optinally, the list can be
523 applied to an existing mask."""
524 for v in values:
525 base |= (1 << v)
526 return base
527
528
529def mask_to_list(mask):
530 """Converts the specfied integer bitmask into a list of
531 indexes of bits that are set in the mask."""
532 size = len(bin(mask)) - 2 # because of "0b"
533 return [size - i - 1 for i in xrange(size)
534 if mask & (1 << size - i - 1)]
535
536
537__memo_cache = {}
538
539
Sergei Trofimovd7aac2b2016-09-02 13:22:09 +0100540def reset_memo_cache():
541 __memo_cache.clear()
542
543
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100544@wrapt.decorator
545def memoized(wrapped, instance, args, kwargs):
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100546 """A decorator for memoizing functions and methods."""
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100547 func_id = repr(wrapped)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100548
549 def memoize_wrapper(*args, **kwargs):
550 id_string = func_id + ','.join([str(id(a)) for a in args])
551 id_string += ','.join('{}={}'.format(k, v)
552 for k, v in kwargs.iteritems())
553 if id_string not in __memo_cache:
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100554 __memo_cache[id_string] = wrapped(*args, **kwargs)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100555 return __memo_cache[id_string]
556
Michele Di Giorgio539e9b32016-06-22 17:54:59 +0100557 return memoize_wrapper(*args, **kwargs)
Sergei Trofimov4e6afe92015-10-09 09:30:04 +0100558