From b231e3ec42bb966832a7eb488cc9bc8eef67dcea Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Tue, 25 Jul 2023 19:08:18 +0100 Subject: [PATCH 01/24] lisa._generic: Add OneOf FEATURE Allow checking that a value is part of a specific list of allowed values. --- lisa/_generic.py | 78 +++++++++++++++++++++++++++++++++--------------- 1 file changed, 54 insertions(+), 24 deletions(-) diff --git a/lisa/_generic.py b/lisa/_generic.py index 3b1fae86b..1649f8a43 100644 --- a/lisa/_generic.py +++ b/lisa/_generic.py @@ -47,16 +47,7 @@ def _isinstance(x, type_): else: raise TypeError(f'Cannot handle type hint: {type_}') - -class GenericContainerMetaBase(type): - """ - Base class for the metaclass of generic containers. - - They are parameterized with the ``type_`` class attribute, and classes can - also be created by indexing on classes with :class:`GenericContainerBase` - metaclass. The ``type_`` class attribute will be set with what is passed as - the key. - """ +class MetaBase(type): def __instancecheck__(cls, instance): try: cls.instancecheck(instance) @@ -69,6 +60,29 @@ class GenericContainerMetaBase(type): # assert Container[Foo] is Container[Foo] @functools.lru_cache(maxsize=None, typed=True) def __getitem__(cls, type_): + NewClass = cls.getitem(type_) + + NewClass.__module__ = cls.__module__ + + # Since this type name is not resolvable, avoid cross reference + # warnings from Sphinx + sphinx_register_nitpick_ignore(NewClass) + return NewClass + + +class GenericContainerMetaBase(MetaBase): + """ + Base class for the metaclass of generic containers. + + They are parameterized with the ``type_`` class attribute, and classes can + also be created by indexing on classes with :class:`GenericBase` + metaclass. The ``type_`` class attribute will be set with what is passed as + the key. + """ + + # Fully memoize the function so that this always holds: + # assert Container[Foo] is Container[Foo] + def getitem(cls, type_): class NewClass(cls): _type = type_ @@ -104,23 +118,16 @@ class GenericContainerMetaBase(type): attrgetter('__qualname__'), type_param_name, ) - NewClass.__module__ = cls.__module__ - - # Since this type name is not resolvable, avoid cross reference - # warnings from Sphinx - sphinx_register_nitpick_ignore(NewClass) - return NewClass -class GenericContainerBase: +class GenericBase: """ Base class for generic containers. """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - type(self).instancecheck(self) + def __new__(cls, obj): + cls.instancecheck(obj) + return obj class GenericMappingMeta(GenericContainerMetaBase, type(Mapping)): @@ -144,7 +151,7 @@ class GenericMappingMeta(GenericContainerMetaBase, type(Mapping)): raise TypeError(f'Value of {type(v).__qualname__} key "{k}" should be of type {v_type.__qualname__}', k) -class TypedDict(GenericContainerBase, dict, metaclass=GenericMappingMeta): +class TypedDict(GenericBase, dict, metaclass=GenericMappingMeta): """ Subclass of dict providing keys and values type check. """ @@ -169,14 +176,37 @@ class GenericSortedSequenceMeta(GenericSequenceMeta): raise TypeError(f'Item #{i} "{x}" is higher than the next item "{y}", but the list must be sorted') -class TypedList(GenericContainerBase, list, metaclass=GenericSequenceMeta): +class TypedList(GenericBase, list, metaclass=GenericSequenceMeta): """ Subclass of list providing keys and values type check. """ -class SortedTypedList(GenericContainerBase, list, metaclass=GenericSortedSequenceMeta): +class SortedTypedList(GenericBase, list, metaclass=GenericSortedSequenceMeta): """ Subclass of list providing keys and values type check, and also check the list is sorted in ascending order. """ + + +class OneOfMeta(MetaBase): + def getitem(cls, allowed): + class NewClass(cls): + _allowed = allowed + + NewClass.__qualname__ = f'{cls.__qualname__}[{", ".join(map(repr, allowed))}]' + return NewClass + + def instancecheck(cls, instance): + allowed = cls._allowed + if instance not in allowed: + raise ValueError(f'Value {repr(instance)} is not allowed. It must be one of: {", ".join(map(repr, allowed))}') + + +class OneOf(GenericBase, metaclass=OneOfMeta): + """ + Check that the provided value is part of a specific set of allowed values. + """ + def __new__(cls, obj): + cls.instancecheck(obj) + return obj -- GitLab From 3444e336a813f986c5acaaffe10892c4270f2540 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Tue, 25 Jul 2023 18:39:37 +0100 Subject: [PATCH 02/24] lsia.trace: Fix events-namespaces FtraceConf key type FIX Fix wrong bracketing. --- lisa/trace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lisa/trace.py b/lisa/trace.py index 63e04e8f2..17a4324fc 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -5628,7 +5628,7 @@ class FtraceConf(SimpleMultiSrcConf, HideExekallID): """ STRUCTURE = TopLevelKeyDesc('ftrace-conf', 'FTrace configuration', ( KeyDesc('events', 'FTrace events to trace', [TypedList[str], TraceEventCheckerBase]), - KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [TypedList[Union[str, None], None]]), + KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [TypedList[Union[str, None]], None]), KeyDesc('functions', 'FTrace functions to trace', [TypedList[str]]), KeyDesc('buffer-size', 'FTrace buffer size', [int]), KeyDesc('trace-clock', 'Clock used while tracing (see "trace_clock" in ftrace.txt kernel doc)', [str, None]), -- GitLab From 92b31e0075067592bbb15663436f65b6c4634733 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Fri, 21 Jul 2023 14:42:43 +0100 Subject: [PATCH 03/24] lisa.utils: Make set_nested_key() return mapping FEATURE set_nested_key() now returns the passed mapping instead of returning None. --- lisa/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lisa/utils.py b/lisa/utils.py index 8032cc3c9..e11d2f0db 100644 --- a/lisa/utils.py +++ b/lisa/utils.py @@ -1649,6 +1649,7 @@ def set_nested_key(mapping, key_path, val, level=None): :type level: collections.abc.Callable """ assert key_path + input_mapping = mapping if level is None: # This should work for dict and most basic structures @@ -1663,6 +1664,7 @@ def set_nested_key(mapping, key_path, val, level=None): mapping = new_level mapping[key_path[-1]] = val + return input_mapping def loopify(items): -- GitLab From eaa69a3649e1eb89f2cac4623b0bbd3161015632 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Mon, 24 Jul 2023 11:50:47 +0100 Subject: [PATCH 04/24] lisa.utils: Normalize DirCache keys FEATURE Normalize cache keys passed to DirCache to ease the burden on the caller. --- lisa/utils.py | 43 +++++++++++++++++++++++++++++++++++++++---- 1 file changed, 39 insertions(+), 4 deletions(-) diff --git a/lisa/utils.py b/lisa/utils.py index e11d2f0db..4f5130286 100644 --- a/lisa/utils.py +++ b/lisa/utils.py @@ -3681,10 +3681,33 @@ class DirCache(Loggable): """ Return the token associated with the given ``key``. """ + def normalize(x): + def with_typ(key): + return ( + x.__class__.__module__, + x.__class__.__qualname__, + key, + ) + + if isinstance(x, str): + return x + elif isinstance(x, Mapping): + return with_typ(sorted( + (normalize(k), normalize(v)) + for k, v in x.items() + )) + elif isinstance(x, Iterable): + return with_typ(tuple(map(normalize, x))) + else: + return with_typ(repr(x)) + + key = normalize(key) + key = repr(key).encode('utf-8') + h = hashlib.sha256() - for x in key: - h.update(repr(x).encode('utf-8')) + h.update(key) token = h.hexdigest() + return token def _get_path(self, key): @@ -3715,8 +3738,20 @@ class DirCache(Loggable): :param key: Key of the cache entry. All the components of the key must be isomorphic to their ``repr()``, otherwise the cache will be hit - in cases where it should not. - :type key: tuple(str) + in cases where it should not. For convenience, some types are + normalized: + + * :class:`~collections.abc.Mapping` is only considered for its keys + and values and type name. Keys are sorted are sorted. If the + passed object contains other relevant metadata, it should be + rendered to a string first by the caller. + + * :class:`~collections.abc.Iterable` keys are normalized and the + object is only considered as an iterable. If other relevant + metadata is contained in the object, it should be rendered to a + string by the caller. + + :type key: object .. note:: The return folder must never be modified, as it would lead to races. -- GitLab From 352f402c743c321c927e05756c49ab2a46b398ed Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Mon, 24 Jul 2023 12:33:56 +0100 Subject: [PATCH 05/24] lisa.conf: Add DelegatedLevelKeyDesc FEATURE Add a way of delegating a sub-tree of configuration to another configuration class. This allows splitting an existing configuration into other configuration classes while retaining backward compatibility. --- lisa/conf.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/lisa/conf.py b/lisa/conf.py index 685b7f933..2693c7335 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -654,6 +654,38 @@ class LevelKeyDesc(KeyDescBase, Mapping): return help_ +class DelegatedLevelKeyDesc(LevelKeyDesc): + """ + Level key descriptor that imports the keys from another + :class:`~lisa.conf.MultiSrcConfABC` subclass. + + :param conf: Configuration class to extract keys from. + :type conf: MultiSrcConfABC + + :Variable keyword arguments: Forwarded to :class:`lisa.conf.LevelKeyDesc`. + + This allows embedding a configuration inside another one, mostly to be able + to split a configuration class while preserving backward compatibility. + + .. note:: Only the children keys are taken from the passed level, other + information such as ``value_path`` are ignored and must be set + explicitly. + """ + + def __init__(self, name, help, conf, **kwargs): + # Make a deepcopy to ensure we will not change the parent attribute of + # an existing structure. + level = copy.deepcopy(conf.STRUCTURE) + + children = level.values() + super().__init__( + name=name, + help=help, + children=children, + **kwargs + ) + + class TopLevelKeyDescBase(LevelKeyDesc): """ Top-level key descriptor, which defines the top-level key to use in the -- GitLab From e76c2175c27e24be60066b56bd7064027f55d5ff Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Mon, 17 Jul 2023 15:20:49 +0100 Subject: [PATCH 06/24] lisa.conf: Allow LevelKeyDesc to accept leaf values FEATURE LevelKeyDesc gained a value_path parameter that specifies the relative path to a leaf key that will accept the value if a non-mapping value is set for the level in input configuration. This allows turning a leaf setting into a level in order to expand the configuration options, while retaining backward compatibility on the configuration file format. Note that this change will modify the layout of the configuration so consuming code will need to be updated. --- lisa/conf.py | 157 ++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 110 insertions(+), 47 deletions(-) diff --git a/lisa/conf.py b/lisa/conf.py index 2693c7335..8771ff17a 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -564,12 +564,25 @@ class LevelKeyDesc(KeyDescBase, Mapping): under that level :type children: collections.abc.Sequence + :param value_path: Relative path to a sub-key that will receive assignment + to that level for non-mapping types. This allows turning a leaf key into a + level while preserving backward compatibility, as long as: + + * The key did not accept mapping values, otherwise it would be + ambiguous and is therefore rejected. + + * The old leaf key has a matching new leaf key, that is a sub-key + of the new level key. + + In practice, that allows turning a single knob into a tree of settings. + :type value_path: list(str) or None + Children keys will get this key assigned as a parent when passed to the constructor. """ - def __init__(self, name, help, children): + def __init__(self, name, help, children, value_path=None): # pylint: disable=redefined-builtin super().__init__(name=name, help=help) self.children = children @@ -578,6 +591,29 @@ class LevelKeyDesc(KeyDescBase, Mapping): for key_desc in self.children: key_desc.parent = self + self.value_path = value_path + + @property + def key_desc(self): + path = self.value_path + if path is None: + raise AttributeError(f'{self} does not define a value path for direct assignment') + else: + return get_nested_key(self, path) + + def __getattr__(self, attr): + # If the property raised an exception, __getattr__ is tried so we need + # to fail explicitly in order to avoid infinite recursion + if attr == 'key_desc': + raise AttributeError('recursive key_desc lookup') + else: + try: + key_desc = self.key_desc + except Exception as e: + raise AttributeError(str(e)) + else: + return getattr(key_desc, attr) + @property def _key_map(self): return { @@ -1082,6 +1118,14 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): .. note:: Since the dosctring is interpreted as a template, "{" and "}" characters must be doubled to appear in the final output. + + .. attention:: The layout of the configuration is typically guaranteed to + be backward-compatible in terms of accepted shape of input, but layout + of the configuration might change. This means that the path to a given + key could change as long as old input is still accepted. Types of + values can also be widened, so third party code re-using config classes + from :mod:`lisa` might have to evolve along the changes of + configuration. """ @abc.abstractmethod @@ -1274,24 +1318,26 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): return self def format_conf(conf): - conf = conf or {} - # Make sure that mappings won't be too long - max_mapping_len = 10 - key_val = sorted(conf.items()) - if len(key_val) > max_mapping_len: - key_val = key_val[:max_mapping_len] - key_val.append((PlaceHolder(), PlaceHolder())) - - def format_val(val): - if isinstance(val, Mapping): - return format_conf(val) - else: - return NonEscapedValue(val) + if isinstance(conf, Mapping): + # Make sure that mappings won't be too long + max_mapping_len = 10 + key_val = sorted(conf.items()) + if len(key_val) > max_mapping_len: + key_val = key_val[:max_mapping_len] + key_val.append((PlaceHolder(), PlaceHolder())) + + def format_val(val): + if isinstance(val, Mapping): + return format_conf(val) + else: + return NonEscapedValue(val) - return { - key: format_val(val) - for key, val in key_val - } + return { + key: format_val(val) + for key, val in key_val + } + else: + return conf logger = self.logger if logger.isEnabledFor(logging.DEBUG): @@ -1313,37 +1359,50 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): ) def _add_src(self, src, conf, filter_none=False, fallback=False): - conf = conf or {} - # Filter-out None values, so they won't override actual data from - # another source - if filter_none: - conf = { + conf = {} if conf is None else conf + + if isinstance(conf, Mapping): + # Filter-out None values, so they won't override actual data from + # another source + if filter_none: + conf = { + k: v for k, v in conf.items() + if v is not None + } + + # only validate at that level, since sublevel will take care of + # filtering then validating their own level + validated_conf = { k: v for k, v in conf.items() - if v is not None + if not isinstance(self._structure[k], LevelKeyDesc) } - - # only validate at that level, since sublevel will take care of - # filtering then validating their own level - validated_conf = { - k: v for k, v in conf.items() - if not isinstance(self._structure[k], LevelKeyDesc) - } - self._structure.validate_val(validated_conf) - - for key, val in conf.items(): - key_desc = self._structure[key] - # Dispatch the nested mapping to the right sublevel - if isinstance(key_desc, LevelKeyDesc): - # sublevels have already been initialized when the root object - # was created. - self._sublevel_map[key]._add_src(src, val, filter_none=filter_none, fallback=fallback) - # Derived keys cannot be set, since they are purely derived from - # other keys - elif isinstance(key_desc, DerivedKeyDesc): - raise ValueError(f'Cannot set a value for a derived key "{key_desc.qualname}"', key_desc.qualname) - # Otherwise that is a leaf value that we store at that level + self._structure.validate_val(validated_conf) + + for key, val in conf.items(): + key_desc = self._structure[key] + # Dispatch the nested mapping to the right sublevel + if isinstance(key_desc, LevelKeyDesc): + # sublevels have already been initialized when the root object + # was created. + self._sublevel_map[key]._add_src(src, val, filter_none=filter_none, fallback=fallback) + # Derived keys cannot be set, since they are purely derived from + # other keys + elif isinstance(key_desc, DerivedKeyDesc): + raise ValueError(f'Cannot set a value for a derived key "{key_desc.qualname}"', key_desc.qualname) + # Otherwise that is a leaf value that we store at that level + else: + self._key_map.setdefault(key, {})[src] = val + else: + # Non-mapping value are allowed if the level defines a subkey + # to assign to. We then craft a conf that sets that specific + # value. + key_desc = self._structure + value_path = key_desc.value_path + if value_path is None: + raise ValueError(f'Cannot set a value for the key level "{key_desc.qualname}"', key_desc.qualname) else: - self._key_map.setdefault(key, {})[src] = val + conf = set_nested_key({}, list(value_path), conf) + self._add_src(src, conf, filter_none=filter_none, fallback=fallback) if src not in self._src_prio: if fallback: @@ -2034,7 +2093,11 @@ class Configurable(abc.ABC): ':param {param}: {help}\n:type {param}: {type}\n'.format( param=param, help=key_desc.help, - type=' or '.join(get_cls_name(t) for t in key_desc.classinfo), + type=( + 'collections.abc.Mapping' + if isinstance(key_desc, LevelKeyDesc) else + ' or '.join(get_cls_name(t) for t in key_desc.classinfo) + ), ) for param, key_desc in cls._get_param_key_desc_map().items() -- GitLab From 9eef3b5016756dc76b264bc529887e94c28d6d99 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Tue, 25 Jul 2023 11:59:55 +0100 Subject: [PATCH 07/24] lisa.conf: Simplify recursion detection in DerivedKeyDesc Remove the use of thread-local storage. --- lisa/conf.py | 46 +++++++++++++++++----------------------------- 1 file changed, 17 insertions(+), 29 deletions(-) diff --git a/lisa/conf.py b/lisa/conf.py index 8771ff17a..8ef28dc04 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -69,8 +69,8 @@ class DeferredValue: key = key_desc.qualname if key_desc else '' raise KeyComputationRecursionError(f'Recursion error while computing deferred value for key: {key}', key) - self._is_computing = True try: + self._is_computing = True return self.callback(*self.args, **self.kwargs) finally: self._is_computing = False @@ -402,17 +402,7 @@ class DerivedKeyDesc(KeyDesc): super().__init__(name=name, help=help, classinfo=classinfo, newtype=newtype) self._base_key_paths = base_key_paths self._compute = compute - self._compute_stack_tls = threading.local() - - def _get_compute_stack(self, conf): - try: - stack = self._compute_stack_tls.stack - except AttributeError: - stack = weakref.WeakKeyDictionary() - self._compute_stack_tls.stack = stack - - key = conf._as_hashable - return stack.setdefault(key, []) + self._is_computing_in = set() @property def help(self): @@ -520,27 +510,25 @@ class DerivedKeyDesc(KeyDesc): return True def compute_val(self, conf, eval_deferred=True): - stack = self._get_compute_stack(conf) - - if stack: + conf_id = id(conf) + if conf_id in self._is_computing_in: key = self.qualname raise KeyComputationRecursionError(f'Recursion error while computing derived key: {key}', key) else: - stack.append(self) - - try: - # If there is non evaluated base, transitively return a closure rather - # than computing now. - if not eval_deferred and self.get_non_evaluated_base_keys(conf): - val = DeferredValue(self.compute_val, conf=conf, eval_deferred=True) - else: - base_conf = self._get_base_conf(conf) - val = self._compute(base_conf) - self.validate_val(val) - finally: - stack.pop() + try: + self._is_computing_in.add(conf_id) + # If there is non evaluated base, transitively return a closure rather + # than computing now. + if not eval_deferred and self.get_non_evaluated_base_keys(conf): + val = DeferredValue(self.compute_val, conf=conf, eval_deferred=True) + else: + base_conf = self._get_base_conf(conf) + val = self._compute(base_conf) + self.validate_val(val) + finally: + self._is_computing_in.remove(conf_id) - return val + return val def get_src(self, conf): return ','.join( -- GitLab From afd13c54f783eddc70029357c4dbd485fb7724fa Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Tue, 25 Jul 2023 16:51:59 +0100 Subject: [PATCH 08/24] lisa.conf: Add MultiSrcConf.add_src(inplace=True) parameter FEATURE Allow adding a configuration source without mutating the original configuration by returning a modified copy. --- lisa/conf.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/lisa/conf.py b/lisa/conf.py index 8ef28dc04..a28a314c3 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -1258,7 +1258,7 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): conf.force_src_nested(src_override) return conf - def add_src(self, src, conf, filter_none=False, fallback=False): + def add_src(self, src, conf, filter_none=False, fallback=False, inplace=True): """ Add a source of configuration. @@ -1280,6 +1280,10 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): priority override is setup. :type fallback: bool + :param inplace: If ``True``, the object is modified. If ``False``, a + mutated copy is returned and the original object is left unmodified. + :type inplace: bool + This method provides a way to update the configuration, by importing a mapping as a new source. """ @@ -1341,6 +1345,8 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): filename=filename if filename else '', lineno=lineno if lineno else '', )) + + self = self if inplace else copy.copy(self) return self._add_src( src, conf, filter_none=filter_none, fallback=fallback @@ -1398,6 +1404,8 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): else: self._src_prio.insert(0, src) + return self + def set_default_src(self, src_prio): """ Set the default source priority list. -- GitLab From 469cce56121165db3c8898ee4c6036a9a94b7601 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Tue, 25 Jul 2023 18:05:19 +0100 Subject: [PATCH 09/24] lisa.conf: Use unicode for MultiSrcConf display FEATURE Use unicode box characters to pretty-print MultiSrcConf. --- lisa/conf.py | 41 ++++++++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/lisa/conf.py b/lisa/conf.py index a28a314c3..ac0e1f20e 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -158,12 +158,15 @@ class KeyDescBase(abc.ABC): return self.parent.path + curr @abc.abstractmethod - def get_help(self, style=None): + def get_help(self, style=None, last=False): """ Get a help message describing the key. :param style: When "rst", ResStructuredText formatting may be applied :param style: str + + :param last: ``True`` if this is the last item in a list. + :type last: bool """ @abc.abstractmethod @@ -281,7 +284,7 @@ class KeyDesc(KeyDescBase): if not isinstance(val, DeferredValue): checkinstance(key, val, classinfo) - def get_help(self, style=None): + def get_help(self, style=None, last=False): base_fmt = '{prefix}{key} ({classinfo}){prefixed_help}.' if style == 'rst': prefix = '* ' @@ -292,7 +295,7 @@ class KeyDesc(KeyDescBase): key = '' fmt = '{key}{help}\ntype: {classinfo}' else: - prefix = '|- ' + prefix = ('└' if last else '├') + ' ' key = self.name fmt = base_fmt @@ -652,9 +655,9 @@ class LevelKeyDesc(KeyDescBase, Mapping): for key, val in conf.items(): self[key].validate_val(val) - def get_help(self, style=None): + def get_help(self, style=None, last=False): idt = self.INDENTATION - prefix = '*' if style == 'rst' else '+-' + prefix = '*' if style == 'rst' else ('└' if last else '├') # Nasty hack: adding an empty ResStructuredText comment between levels # of nested list avoids getting extra blank line between list items. # That prevents ResStructuredText from thinking each item must be a @@ -668,9 +671,13 @@ class LevelKeyDesc(KeyDescBase, Mapping): help=' ' + self.help if self.help else '', ) nl = '\n' + idt + last = len(self.children) - 1 help_ += nl.join( - key_desc.get_help(style=style).replace('\n', nl) - for key_desc in self.children + key_desc.get_help( + style=style, + last=i == last, + ).replace('\n', nl) + for i, key_desc in enumerate(self.children) ) if style == 'rst': help_ += '\n\n..\n' @@ -737,11 +744,11 @@ class TopLevelKeyDescBase(LevelKeyDesc): def _check_name(cls, name): pass - def get_help(self, style=None): + def get_help(self, style=None, **kwargs): if style == 'yaml': return self.help else: - return super().get_help(style=style) + return super().get_help(style=style, **kwargs) class TopLevelKeyDesc(TopLevelKeyDescBase): @@ -1778,10 +1785,14 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): yield key, val - for k, v in itertools.chain( + items = list(itertools.chain( self.items(eval_deferred=eval_deferred), derived_items() - ): + )) + _last = len(items) - 1 + + for i, (k, v) in enumerate(items): + last = i == _last v_cls = type(v) key_desc = self._structure[k] @@ -1800,12 +1811,8 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): else: v = ' ' + v - if is_sublevel: - k_str = '+- ' + k - v_prefix = ' ' - else: - k_str = '|- ' + k - v_prefix = '| ' + k_str = ('└' if last else '├') + ' ' + k + v_prefix = ' ' if is_sublevel else '| ' v = v.replace('\n', '\n' + v_prefix) -- GitLab From 0d8e6625a3ed944d6c14d782e82c2ab245a52ef8 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Mon, 17 Jul 2023 15:09:15 +0100 Subject: [PATCH 10/24] lisa._kmod: Add build-env settings FEATURE Allow settings for build-env, such as alpine version and package list to install. --- lisa/_kmod.py | 318 ++++++++++++++++++++++++++++++++++--------------- lisa/target.py | 27 ++++- 2 files changed, 244 insertions(+), 101 deletions(-) diff --git a/lisa/_kmod.py b/lisa/_kmod.py index 7a5bbb262..98bc15103 100644 --- a/lisa/_kmod.py +++ b/lisa/_kmod.py @@ -131,6 +131,7 @@ import hashlib from operator import itemgetter from shlex import quote from io import BytesIO +from collections.abc import Mapping from elftools.elf.elffile import ELFFile @@ -138,7 +139,7 @@ from devlib.target import KernelVersion, TypedKernelConfig, KernelConfigTristate from devlib.host import LocalConnection from devlib.exception import TargetStableError -from lisa.utils import nullcontext, Loggable, LISA_CACHE_HOME, checksum, DirCache, chain_cm, memoized, LISA_HOST_ABI, subprocess_log, SerializeViaConstructor, destroyablecontextmanager, ContextManagerExit, ignore_exceps +from lisa.utils import nullcontext, Loggable, LISA_CACHE_HOME, checksum, DirCache, chain_cm, memoized, LISA_HOST_ABI, subprocess_log, SerializeViaConstructor, destroyablecontextmanager, ContextManagerExit, ignore_exceps, get_nested_key from lisa._assets import ASSETS_PATH, HOST_PATH, ABI_BINARIES_FOLDER from lisa._unshare import ensure_root import lisa._git as git @@ -200,12 +201,95 @@ def _kbuild_make_cmd(path, targets, cc, make_vars): @destroyablecontextmanager -def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overlay_backend=None): +def _make_build_chroot(cc, make_vars, bind_paths=None, version=None, overlay_backend=None, packages=None): """ Create a chroot folder ready to be used to build a kernel. """ logger = logging.getLogger(f'{__name__}.alpine_chroot') + def is_clang(cc): + return cc.startswith('clang') + + def default_packages(cc): + # Default packages needed to compile a linux kernel module + packages = [ + 'bash', + 'binutils', + 'coreutils', + 'diffutils', + 'make', + 'file', + 'gawk', + 'sed', + 'musl-dev', + 'elfutils-dev', + 'gmp-dev', + 'libffi-dev', + 'openssl-dev', + 'linux-headers', + 'musl', + 'bison', + 'flex', + 'python3', + ] + + if is_clang(cc): + try: + _, version = cc.split('-', 1) + except ValueError: + # apk understands "clang" even if there is no clang package + version = '' + + packages.extend([ + 'lld', + f'llvm{version}', + f'clang{version}', + ]) + else: + packages.append(cc) + + return packages + + if (version, packages) != (None, None) and None in (version, packages): + raise ValueError('Both version and packages need to be set or none of them') + else: + version = version or '3.18.0' + packages = default_packages(cc) if packages is None else packages + + make_vars = make_vars or {} + target_arch = make_vars.get('ARCH', LISA_HOST_ABI) + + use_qemu = ( + # Since clang binaries support cross compilation without issues, + # there is no need to use QEMU that will slow everything down. + (not is_clang(cc)) and + target_arch != LISA_HOST_ABI + ) + + chroot_arch = target_arch if use_qemu else LISA_HOST_ABI + + bind_paths = { + **dict(bind_paths or {}), + ABI_BINARIES_FOLDER[chroot_arch]: '/usr/local/bin' + } + + with _make_alpine_chroot( + version=version, + arch=chroot_arch, + packages=packages, + bind_paths=bind_paths, + overlay_backend=overlay_backend, + ) as chroot: + try: + yield chroot + except ContextManagerExit: + pass + + +@destroyablecontextmanager +def _make_alpine_chroot(version, packages=None, arch=None, bind_paths=None, overlay_backend=None): + logger = logging.getLogger(f'{__name__}.alpine_chroot') + def mount_binds(chroot, bind_paths, mount=True): for src, dst in bind_paths.items(): dst = Path(dst).resolve() @@ -219,7 +303,7 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla _subprocess_log(cmd, logger=logger, level=logging.DEBUG) def populate(key, path, init_cache=True): - version, arch, packages, use_qemu = key + version, alpine_arch, packages = key path = path.resolve() # Packages have already been installed, so we can speed things up a @@ -228,10 +312,10 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla packages = packages.split(' ') _version = version.split('.') - minor = '.'.join(_version[:-1]) + minor = '.'.join(_version[:2]) url = _ALPINE_ROOTFS_URL.format( minor=minor, - arch=arch, + arch=alpine_arch, version=version, ) @@ -249,61 +333,23 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla shutil.copy('/etc/resolv.conf', path / 'etc' / 'resolv.conf') - if packages: - cmd = _make_chroot_cmd(path, ['apk', 'add', *packages]) - _subprocess_log(cmd, logger=logger, level=logging.DEBUG) + def install_packages(packages): + if packages: + cmd = _make_build_chroot_cmd(path, ['apk', 'add', *sorted(set(packages))]) + _subprocess_log(cmd, logger=logger, level=logging.DEBUG) - packages = [ - 'bash', - 'binutils', - 'coreutils', - 'diffutils', - 'make', - 'file', - 'gawk', - 'sed', - 'musl-dev', - 'elfutils-dev', - 'gmp-dev', - 'libffi-dev', - 'openssl-dev', - 'linux-headers', - 'musl', - 'bison', - 'flex', - 'python3', - ] - make_vars = make_vars or {} + install_packages(packages) - is_clang = cc.startswith('clang') - if is_clang: - try: - _, version = cc.split('-', 1) - except ValueError: - # apk understands "clang" even if there is no clang package - version = '' - - packages.extend([ - 'lld', - f'llvm{version}', - f'clang{version}', - ]) - else: - packages.append(cc) - - target_arch = make_vars.get('ARCH', LISA_HOST_ABI) + # Ensure we have a full version number with 3 components + version = version.split('.') + version = version + ['0' for _ in range(3 - len(version))] + version = '.'.join(version) - use_qemu = ( - # Since clang binaries support cross compilation without issues, - # there is no need to use QEMU that will slow everything down. - (not is_clang) and - target_arch != LISA_HOST_ABI - ) + arch = arch or LISA_HOST_ABI + use_qemu = arch != LISA_HOST_ABI - chroot_arch = target_arch if use_qemu else LISA_HOST_ABI - - qemu_msg = ' using QEMU userspace emulation' if use_qemu else '' - logger.debug(f'Using Alpine v{alpine_version} chroot with architecture {chroot_arch}{qemu_msg}.') + qemu_msg = f' using QEMU userspace emulation to emulate {arch} on {LISA_HOST_ABI}' if use_qemu else '' + logger.debug(f'Using Alpine v{version} chroot with architecture {arch}{qemu_msg}.') # Check that QEMU userspace emulation is setup if we need it if use_qemu: @@ -311,22 +357,15 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla 'arm64': 'aarch64', 'armeabi': 'arm', 'armv7': 'arm', - }.get(chroot_arch, chroot_arch) + }.get(arch, arch) binfmt_path = Path('/proc/sys/fs/binfmt_misc/', f'qemu-{qemu_arch}') if not binfmt_path.exists(): raise ValueError(f'Alpine chroot is setup for {qemu_arch} architecture but QEMU userspace emulation is not installed on the host (missing {binfmt_path})') - - # Add LISA static binaries inside the chroot - bind_paths = { - **dict(bind_paths or {}), - ABI_BINARIES_FOLDER[chroot_arch]: '/usr/local/bin' - } - alpine_arch = { 'arm64': 'aarch64', 'armeabi': 'armv7', - }.get(chroot_arch, chroot_arch) + }.get(arch, arch) dir_cache = DirCache( category='alpine_chroot', @@ -334,10 +373,9 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla ) key = ( - alpine_version, + version, alpine_arch, - ' '.join(sorted(packages)), - use_qemu, + ' '.join(sorted(packages or [])), ) cache_path = dir_cache.get_entry(key) with _overlay_folders([cache_path], backend=overlay_backend) as path: @@ -351,7 +389,7 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla mount_binds(path, bind_paths, mount=False) -def _make_chroot_cmd(chroot, cmd): +def _make_build_chroot_cmd(chroot, cmd): chroot = Path(chroot).resolve() cmd = ' '.join(map(quote, map(str, cmd))) # Source /etc/profile to get sane defaults for e.g. PATH. Otherwise, we @@ -687,6 +725,9 @@ class KernelTree(Loggable, SerializeViaConstructor): * ``None``: defaults to ``host``. :type build_env: str or None + :param build_env_settings: Settings for the chosen build environment. + :type build_env_settings: collections.abc.Mapping + :param overlay_backend: Backend used to create folder overlays. One of: * ``overlayfs``: Use overlayfs Linux filesystem. This is the fastest @@ -708,9 +749,9 @@ class KernelTree(Loggable, SerializeViaConstructor): # On top of that, the kernel does not handle clang < 10.0.1 _MIN_CLANG_VERSION = 11 - def __init__(self, path_cm, cc, make_vars, build_env=None, overlay_backend=None): + def __init__(self, path_cm, cc, make_vars, build_env=None, overlay_backend=None, build_env_settings=None): self._make_path_cm = path_cm - self.build_env = self._resolve_build_env(build_env) + self.build_env, self.build_env_settings = self._resolve_build_env(build_env, build_env_settings) self.make_vars = make_vars or {} self.overlay_backend = self._resolve_overlay_backend(overlay_backend) self._path_cm = None @@ -719,8 +760,11 @@ class KernelTree(Loggable, SerializeViaConstructor): self.cc = cc @staticmethod - def _resolve_build_env(build_env): - return build_env or 'host' + def _resolve_build_env(build_env, build_env_settings): + return ( + build_env or 'host', + build_env_settings or {} + ) @staticmethod def _resolve_overlay_backend(overlay_backend): @@ -849,7 +893,7 @@ class KernelTree(Loggable, SerializeViaConstructor): @classmethod - def _prepare_tree(cls, path, cc, make_vars, build_env, apply_overlays, overlay_backend): + def _prepare_tree(cls, path, cc, make_vars, build_env, apply_overlays, overlay_backend, build_env_settings): logger = cls.get_logger() path = Path(path) @@ -910,11 +954,22 @@ class KernelTree(Loggable, SerializeViaConstructor): if build_env == 'alpine': + settings = (build_env_settings or {}).get('alpine', {}) + version = settings.get('version', None) + alpine_packages = settings.get('packages', None) + @contextlib.contextmanager def cmd_cm(cmds): - with _make_chroot(cc=cc, bind_paths=bind_paths, make_vars=make_vars, overlay_backend=overlay_backend) as chroot: + with _make_build_chroot( + cc=cc, + bind_paths=bind_paths, + make_vars=make_vars, + overlay_backend=overlay_backend, + version=version, + packages=alpine_packages, + ) as chroot: yield [ - _make_chroot_cmd(chroot, cmd) if cmd else None + _make_build_chroot_cmd(chroot, cmd) if cmd else None for cmd in cmds ] else: @@ -941,7 +996,7 @@ class KernelTree(Loggable, SerializeViaConstructor): @classmethod - def _process_make_vars(cls, build_env, make_vars, abi=None): + def _process_make_vars(cls, build_env, build_env_settings, make_vars, abi=None): env = { k: str(v) for k, v in ( @@ -969,7 +1024,7 @@ class KernelTree(Loggable, SerializeViaConstructor): arch = _any_abi_to_kernel_arch(abi) make_vars['ARCH'] = arch - make_vars, cc = cls._resolve_toolchain(abi, make_vars, build_env) + make_vars, cc = cls._resolve_toolchain(abi, make_vars, build_env, build_env_settings) if build_env == 'alpine': if cc.startswith('clang'): @@ -991,9 +1046,34 @@ class KernelTree(Loggable, SerializeViaConstructor): return (make_vars, cc) @classmethod - def _check_cc_version(cls, cc): + def _make_toolchain_env(cls, toolchain_path, env=None): + env = env or os.environ + return { + **os.environ, + 'PATH': ':'.join(( + toolchain_path, + os.environ.get('PATH', '') + )) + } + + @classmethod + def _make_toolchain_env_from_settings(cls, build_env_settings, env=None): + build_env_settings = build_env_settings or {} + try: + toolchain_path = get_nested_key(build_env_settings, ['host', 'toolchain-path']) + except KeyError: + env = os.environ + else: + env = cls._make_toolchain_env(toolchain_path, env=env) + + return env + + + @classmethod + def _check_cc_version(cls, cc, toolchain_path): if cc == 'clang': - version = subprocess.check_output([cc, '--version']) + env = cls._make_toolchain_env(toolchain_path) + version = subprocess.check_output([cc, '--version'], env=env) m = re.match(rb'.*clang version ([0-9]+)\.', version) if m: major = int(m.group(1)) @@ -1005,9 +1085,10 @@ class KernelTree(Loggable, SerializeViaConstructor): return False @classmethod - def _resolve_toolchain(cls, abi, make_vars, build_env): + def _resolve_toolchain(cls, abi, make_vars, build_env, build_env_settings): logger = cls.get_logger() - build_env = KernelTree._resolve_build_env(build_env) + build_env, build_env_settings = KernelTree._resolve_build_env(build_env, build_env_settings) + env = cls._make_toolchain_env_from_settings(build_env_settings) if abi == LISA_HOST_ABI: toolchain = None @@ -1075,6 +1156,8 @@ class KernelTree(Loggable, SerializeViaConstructor): # Only run the check on host build env, as other build envs are # expected to be correctly configured. if build_env == 'host' and commands: + toolchain_path = build_env_settings.get('host', {}).get('toolchain-path', None) + for cc, cmd in commands.items(): pretty_cmd = ' '.join(cmd) try: @@ -1082,7 +1165,8 @@ class KernelTree(Loggable, SerializeViaConstructor): cmd, # Most basic compiler input that will not do anything. input=b';', - stderr=subprocess.STDOUT + stderr=subprocess.STDOUT, + env=env, ) except subprocess.CalledProcessError as e: logger.debug(f'Checking {cc} compiler: {pretty_cmd} failed with:\n{e.output.decode()}') @@ -1091,7 +1175,7 @@ class KernelTree(Loggable, SerializeViaConstructor): logger.debug(f'Checking {cc} compiler: {e}') continue else: - if cls._check_cc_version(cc): + if cls._check_cc_version(cc, toolchain_path): break else: raise ValueError(f'Could not find a working toolchain for CROSS_COMPILE={toolchain}') @@ -1114,7 +1198,7 @@ class KernelTree(Loggable, SerializeViaConstructor): @classmethod @SerializeViaConstructor.constructor - def from_target(cls, target, tree_path=None, make_vars=None, cache=True, build_env=None, overlay_backend=None): + def from_target(cls, target, tree_path=None, make_vars=None, cache=True, build_env=None, overlay_backend=None, build_env_settings=None): """ Build the tree from the given :class:`lisa.target.Target`. @@ -1160,11 +1244,15 @@ class KernelTree(Loggable, SerializeViaConstructor): :param overlay_backend: See :class:`lisa._kmod.KernelTree`. :type overlay_backend: str or None + + :param build_env_settings: See :class:`lisa._kmod.KernelTree`. + :type build_env_settings: collections.abc.Mapping or None """ make_vars, cc = cls._process_make_vars( make_vars=make_vars, abi=target.plat_info['abi'], build_env=build_env, + build_env_settings=build_env_settings, ) kernel_info = target.plat_info['kernel'] @@ -1245,6 +1333,7 @@ class KernelTree(Loggable, SerializeViaConstructor): tree_path=tree_path, build_env=build_env, overlay_backend=overlay_backend, + build_env_settings=build_env_settings, ) as tree: yield tree._to_spec() @@ -1298,6 +1387,7 @@ class KernelTree(Loggable, SerializeViaConstructor): make_vars=make_vars, build_env=build_env, overlay_backend=overlay_backend, + build_env_settings=build_env_settings, ) as tree: yield tree._to_spec() @@ -1345,12 +1435,13 @@ class KernelTree(Loggable, SerializeViaConstructor): cc=cc, make_vars=make_vars, build_env=build_env, + build_env_settings=build_env_settings, overlay_backend=overlay_backend, ) @classmethod @SerializeViaConstructor.constructor - def from_path(cls, path, make_vars=None, cache=True, build_env=None): + def from_path(cls, path, make_vars=None, cache=True, build_env=None, build_env_settings=None): """ Build a tree from the given ``path`` to sources. """ @@ -1359,11 +1450,12 @@ class KernelTree(Loggable, SerializeViaConstructor): make_vars=make_vars, cache=cache, build_env=build_env, + build_env_settings=build_env_settings, ) @classmethod @SerializeViaConstructor.constructor - def from_overlays(cls, version=None, tree_path=None, overlays=None, make_vars=None, cache=True, build_env=None, overlay_backend=None): + def from_overlays(cls, version=None, tree_path=None, overlays=None, make_vars=None, cache=True, build_env=None, overlay_backend=None, build_env_settings=None): """ Build a tree from the given overlays, to be applied on a source tree. @@ -1378,8 +1470,9 @@ class KernelTree(Loggable, SerializeViaConstructor): make_vars, cc = cls._process_make_vars( make_vars=make_vars, build_env=build_env, + build_env_settings=build_env_settings, ) - build_env = KernelTree._resolve_build_env(build_env) + build_env, build_env_settings = KernelTree._resolve_build_env(build_env, build_env_settings) overlay_backend = KernelTree._resolve_overlay_backend(overlay_backend) if tree_path: @@ -1418,6 +1511,7 @@ class KernelTree(Loggable, SerializeViaConstructor): cc=cc, make_vars=make_vars, build_env=build_env, + build_env_settings=build_env_settings, apply_overlays=functools.partial(apply_overlays, path), overlay_backend=overlay_backend, ) @@ -1438,6 +1532,18 @@ class KernelTree(Loggable, SerializeViaConstructor): # * All the variables passed to "make". This is very important # as things such as a toolchain change can make a kernel tree # unsuitable for compiling a module. + def mapping_key(mapping): + if isinstance(mapping, Mapping): + return [ + # We need to take checksum the make variables + # as well, as it can influence the kernel tree + # a great deal (e.g. changing toolchain) + (mapping_key(k), mapping_key(v)) + for k, v in sorted((mapping or {}).items()) + ] + else: + return str(mapping) + key = ( sorted( overlay._get_checksum() @@ -1447,13 +1553,12 @@ class KernelTree(Loggable, SerializeViaConstructor): str(build_env), str(overlay_backend), str(cc), - ] + [ - # We need to take checksum the make variables - # as well, as it can influence the kernel tree - # a great deal (e.g. changing toolchain) - f'{k}={v}' - for k, v in sorted((make_vars or {}).items()) - ] + ] + + # We need to take checksum the make variables + # as well, as it can influence the kernel tree + # a great deal (e.g. changing toolchain) + mapping_key(make_vars) + + mapping_key(build_env_settings) ) def populate(key, path): @@ -1534,6 +1639,7 @@ class KernelTree(Loggable, SerializeViaConstructor): cc=cc, make_vars=make_vars, build_env=build_env, + build_env_settings=build_env_settings, ) @classmethod @@ -1675,6 +1781,7 @@ class KmodSrc(Loggable): # "inherit" the build env from the KernelTree as we must use the same # environment as what was used for "make modules_prepare" build_env = kernel_tree.build_env + build_env_settings = kernel_tree.build_env_settings bind_paths = {tree_path: tree_path} logger = self.logger @@ -1722,9 +1829,20 @@ class KmodSrc(Loggable): return filenames[0] if build_env == 'alpine': + settings = (build_env_settings or {}).get('alpine', {}) + alpine_version = settings.get('version', None) + alpine_packages = settings.get('packages', None) + @contextlib.contextmanager def cmd_cm(): - with _make_chroot(cc=cc, bind_paths=bind_paths, make_vars=make_vars, overlay_backend=overlay_backend) as chroot: + with _make_build_chroot( + cc=cc, + bind_paths=bind_paths, + make_vars=make_vars, + overlay_backend=overlay_backend, + version=alpine_version, + packages=alpine_packages, + ) as chroot: # Do not use a CM here to avoid choking on permission # issues. Since the chroot itself will be entirely # removed it's not a problem. @@ -1734,7 +1852,7 @@ class KmodSrc(Loggable): mod_path=f'/{mod_path.relative_to(chroot)}', make_vars=make_vars, ) - yield (mod_path, _make_chroot_cmd(chroot, cmd), {}) + yield (mod_path, _make_build_chroot_cmd(chroot, cmd), {}) else: @contextlib.contextmanager def cmd_cm(): @@ -1744,7 +1862,9 @@ class KmodSrc(Loggable): mod_path=mod_path, make_vars=make_vars, ) - yield (mod_path, cmd, {'PATH': HOST_PATH}) + + env = self._make_toolchain_env_from_settings(build_env_settings, env={'PATH': HOST_PATH}) + yield (mod_path, cmd, {'PATH': env['PATH']}) with cmd_cm() as (mod_path, cmd, env): mod_path = Path(mod_path) @@ -1896,7 +2016,7 @@ class DynamicKmod(Loggable): # * the kernel tree # * the make variables # * the module name - return (kernel_checksum, kernel_tree.build_env, src.checksum, *var_tokens) + return (kernel_checksum, kernel_tree.build_env, kernel_tree.build_env_settings, src.checksum, var_tokens) def get_bin(kernel_tree): return src.compile( diff --git a/lisa/target.py b/lisa/target.py index 4be9daacd..19c7cccb2 100644 --- a/lisa/target.py +++ b/lisa/target.py @@ -160,7 +160,27 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): LevelKeyDesc('kernel', 'kernel information', ( KeyDesc('src', 'Path to kernel source tree matching the kernel running on the target used to build modules', [str, None]), LevelKeyDesc('modules', 'kernel modules', ( - KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (host system)', [str]), + LevelKeyDesc( + 'build-env', 'Settings specific to a given build-env ', + ( + KeyDesc('kind', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [str]), + # At this level we have all the build-env specific + # parameters. Generic parameters go straight into the + # "modules" level. Most people will not need to use the + # "settings" level. + LevelKeyDesc('settings', 'build-env settings', ( + LevelKeyDesc('host', 'Settings for host build-env', ( + KeyDesc('toolchain-path', 'Folder to prepend to PATH when executing toolchain command in the host build env', [str]), + )), + LevelKeyDesc('alpine', 'Settings for Alpine linux build-env', ( + KeyDesc('version', 'Alpine linux version, e.g. 3.18.0', [None, str]), + KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, TypedList[str]]), + )), + )), + ), + value_path=('kind',), + ), + # At this level we have generic parameters that apply regardless of the build environment KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [TypedDict[str, object]]), KeyDesc('overlay-backend', 'Backend to use for overlaying folders while building modules. Can be "overlayfs" (overlayfs filesystem, recommended) or "copy (plain folder copy)', [str]), )), @@ -260,7 +280,8 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): 'wait_boot_timeout': ['wait-boot', 'timeout'], 'kernel_src': ['kernel', 'src'], - 'kmod_build_env': ['kernel', 'modules', 'build-env'], + 'kmod_build_env': ['kernel', 'modules', 'build-env', 'kind'], + 'kmod_build_env_settings': ['kernel', 'modules', 'build-env', 'settings'], 'kmod_make_vars': ['kernel', 'modules', 'make-variables'], 'kmod_overlay_backend': ['kernel', 'modules', 'overlay-backend'], } @@ -271,6 +292,7 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, kernel_src=None, kmod_build_env=None, kmod_make_vars=None, kmod_overlay_backend=None, devlib_max_async=None, + kmod_build_env_settings=None, ): # Set it temporarily to avoid breaking __getattr__ self._devlib_loadable_modules = set() @@ -285,6 +307,7 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): self._kmod_tree_spec = dict( tree_path=kernel_src, build_env=kmod_build_env, + build_env_settings=kmod_build_env_settings, make_vars=kmod_make_vars, overlay_backend=kmod_overlay_backend, ) -- GitLab From 221f99abb34a2047d5d197b39b04f4c3265107e8 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Mon, 24 Jul 2023 11:29:54 +0100 Subject: [PATCH 11/24] lisa.target: Split module build conf FEATURE Split and enrich module build conf. Build environments can now take parameters such as Alpine linux version and a toolchain path for host environment. --- lisa/_kmod.py | 487 ++++++++++++++++++++++++------------------------- lisa/conf.py | 5 +- lisa/target.py | 108 ++++++----- 3 files changed, 295 insertions(+), 305 deletions(-) diff --git a/lisa/_kmod.py b/lisa/_kmod.py index 98bc15103..bef07c807 100644 --- a/lisa/_kmod.py +++ b/lisa/_kmod.py @@ -38,7 +38,6 @@ Here is an example of such module:: lazy_platinfo=True, kernel_src='/path/to/kernel/tree/', kmod_build_env='alpine', - # kmod_make_vars={'CC': 'clang'}, ) # Example module from: https://tldp.org/LDP/lkmpg/2.6/html/x279.html @@ -143,17 +142,32 @@ from lisa.utils import nullcontext, Loggable, LISA_CACHE_HOME, checksum, DirCach from lisa._assets import ASSETS_PATH, HOST_PATH, ABI_BINARIES_FOLDER from lisa._unshare import ensure_root import lisa._git as git +from lisa.conf import SimpleMultiSrcConf, TopLevelKeyDesc, LevelKeyDesc, KeyDesc +from lisa._generic import TypedList, TypedDict, OneOf _ALPINE_ROOTFS_URL = 'https://dl-cdn.alpinelinux.org/alpine/v{minor}/releases/{arch}/alpine-minirootfs-{version}-{arch}.tar.gz' -def _any_abi_to_kernel_arch(abi): +def _abi_to_kernel_arch(abi): + """ + Convert a devlib ABI into a valid ARCH= for the kernel + """ return { 'armeabi': 'arm', - 'armv7': 'arm', - 'aarch64': 'arm64', }.get(abi, abi) +def _kernel_arch_to_abi(arch): + """ + Convert a kernel arch to a devlib ABI + """ + if arch == 'arm64': + return 'arm64' + elif 'arm' in arch: + return 'armeabi' + else: + return arch + + def _url_path(url): return PurePosixPath( urllib.parse.unquote( @@ -195,13 +209,13 @@ def _kbuild_make_cmd(path, targets, cc, make_vars): var_cc = make_vars.get('CC', cc) if var_cc != cc: pretty_cmd = ' '.join(map(quote, map(str, cmd))) - raise ValueError(f'The kernel tree was prepared using CC={cc} so the make command cannot be ran with CC={var_cc}: {pretty_cmd}') + raise ValueError(f'The kernel build env was prepared using CC={cc} so the make command cannot be ran with CC={var_cc}: {pretty_cmd}') return cmd @destroyablecontextmanager -def _make_build_chroot(cc, make_vars, bind_paths=None, version=None, overlay_backend=None, packages=None): +def _make_build_chroot(cc, abi, bind_paths=None, version=None, overlay_backend=None, packages=None): """ Create a chroot folder ready to be used to build a kernel. """ @@ -256,26 +270,23 @@ def _make_build_chroot(cc, make_vars, bind_paths=None, version=None, overlay_bac version = version or '3.18.0' packages = default_packages(cc) if packages is None else packages - make_vars = make_vars or {} - target_arch = make_vars.get('ARCH', LISA_HOST_ABI) - use_qemu = ( # Since clang binaries support cross compilation without issues, # there is no need to use QEMU that will slow everything down. (not is_clang(cc)) and - target_arch != LISA_HOST_ABI + abi != LISA_HOST_ABI ) - chroot_arch = target_arch if use_qemu else LISA_HOST_ABI + chroot_abi = abi if use_qemu else LISA_HOST_ABI bind_paths = { **dict(bind_paths or {}), - ABI_BINARIES_FOLDER[chroot_arch]: '/usr/local/bin' + ABI_BINARIES_FOLDER[chroot_abi]: '/usr/local/bin' } with _make_alpine_chroot( version=version, - arch=chroot_arch, + abi=chroot_abi, packages=packages, bind_paths=bind_paths, overlay_backend=overlay_backend, @@ -287,7 +298,7 @@ def _make_build_chroot(cc, make_vars, bind_paths=None, version=None, overlay_bac @destroyablecontextmanager -def _make_alpine_chroot(version, packages=None, arch=None, bind_paths=None, overlay_backend=None): +def _make_alpine_chroot(version, packages=None, abi=None, bind_paths=None, overlay_backend='overlayfs'): logger = logging.getLogger(f'{__name__}.alpine_chroot') def mount_binds(chroot, bind_paths, mount=True): @@ -309,8 +320,6 @@ def _make_alpine_chroot(version, packages=None, arch=None, bind_paths=None, over # Packages have already been installed, so we can speed things up a # bit if init_cache: - packages = packages.split(' ') - _version = version.split('.') minor = '.'.join(_version[:2]) url = _ALPINE_ROOTFS_URL.format( @@ -345,11 +354,11 @@ def _make_alpine_chroot(version, packages=None, arch=None, bind_paths=None, over version = version + ['0' for _ in range(3 - len(version))] version = '.'.join(version) - arch = arch or LISA_HOST_ABI - use_qemu = arch != LISA_HOST_ABI + abi = abi or LISA_HOST_ABI + use_qemu = abi != LISA_HOST_ABI - qemu_msg = f' using QEMU userspace emulation to emulate {arch} on {LISA_HOST_ABI}' if use_qemu else '' - logger.debug(f'Using Alpine v{version} chroot with architecture {arch}{qemu_msg}.') + qemu_msg = f' using QEMU userspace emulation to emulate {abi} on {LISA_HOST_ABI}' if use_qemu else '' + logger.debug(f'Using Alpine v{version} chroot with ABI {abi}{qemu_msg}.') # Check that QEMU userspace emulation is setup if we need it if use_qemu: @@ -357,7 +366,7 @@ def _make_alpine_chroot(version, packages=None, arch=None, bind_paths=None, over 'arm64': 'aarch64', 'armeabi': 'arm', 'armv7': 'arm', - }.get(arch, arch) + }.get(abi, abi) binfmt_path = Path('/proc/sys/fs/binfmt_misc/', f'qemu-{qemu_arch}') if not binfmt_path.exists(): raise ValueError(f'Alpine chroot is setup for {qemu_arch} architecture but QEMU userspace emulation is not installed on the host (missing {binfmt_path})') @@ -365,7 +374,7 @@ def _make_alpine_chroot(version, packages=None, arch=None, bind_paths=None, over alpine_arch = { 'arm64': 'aarch64', 'armeabi': 'armv7', - }.get(arch, arch) + }.get(abi, abi) dir_cache = DirCache( category='alpine_chroot', @@ -375,7 +384,7 @@ def _make_alpine_chroot(version, packages=None, arch=None, bind_paths=None, over key = ( version, alpine_arch, - ' '.join(sorted(packages or [])), + sorted(set(packages or [])), ) cache_path = dir_cache.get_entry(key) with _overlay_folders([cache_path], backend=overlay_backend) as path: @@ -399,7 +408,7 @@ def _make_build_chroot_cmd(chroot, cmd): @destroyablecontextmanager -def _overlay_folders(lowers, upper=None, backend=None, copy_filter=None): +def _overlay_folders(lowers, backend, upper=None, copy_filter=None): """ Overlay folders on top of each other. @@ -425,7 +434,6 @@ def _overlay_folders(lowers, upper=None, backend=None, copy_filter=None): :type backend: str or None """ logger = logging.getLogger(f'{__name__}.overlay') - backend = KernelTree._resolve_overlay_backend(backend) def make_dir(root, name): path = Path(root) / name @@ -702,43 +710,53 @@ class TarOverlay(_PathOverlayBase): tar.extractall(dst) -class KernelTree(Loggable, SerializeViaConstructor): +class _KernelBuildEnvConf(SimpleMultiSrcConf): + STRUCTURE = TopLevelKeyDesc('kernel-build-env-conf', 'Build environment settings', + ( + KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [OneOf['host', 'alpine']]), + LevelKeyDesc('build-env-settings', 'build-env settings', ( + LevelKeyDesc('host', 'Settings for host build-env', ( + KeyDesc('toolchain-path', 'Folder to prepend to PATH when executing toolchain command in the host build env', [str]), + )), + LevelKeyDesc('alpine', 'Settings for Alpine linux build-env', ( + KeyDesc('version', 'Alpine linux version, e.g. 3.18.0', [None, str]), + KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, TypedList[str]]), + )), + )), + + KeyDesc('overlay-backend', 'Backend to use for overlaying folders while building modules. Can be "overlayfs" (overlayfs filesystem, recommended and fastest) or "copy (plain folder copy)', [str]), + KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [TypedDict[str, object]]), + ), + ) + + DEFAULT_SRC = { + 'build-env': 'host', + 'overlay-backend': 'overlayfs', + } + + +class _KernelBuildEnv(Loggable, SerializeViaConstructor): """ :param path_cm: Context manager factory expected to return a path to a - prepared kernel tree. + prepared kernel build env. :type path_cm: collections.abc.Callable - :param cc: Compiler used to prepare the kernel tree. Can be e.g. ``"gcc"``, - ``"clang"``, ``"clang-14"`` etc. - :type cc: str or None - - :param make_vars: Variables passed on ``make`` command line when preparing - the kernel tree. - :type make_vars: dict(str, str) - - :param build_env: Build environment to use. Can be one of: + :param build_conf: Build environment configuration. If specified as a + string, it can be one of: * ``alpine``: Alpine linux chroot, providing a controlled environment * ``host``: No specific env is setup, whatever the host is using will be picked. * ``None``: defaults to ``host``. - :type build_env: str or None - - :param build_env_settings: Settings for the chosen build environment. - :type build_env_settings: collections.abc.Mapping - - :param overlay_backend: Backend used to create folder overlays. One of: - * ``overlayfs``: Use overlayfs Linux filesystem. This is the fastest - and the recommanded option. - * ``copy``: Use plain folder copies. This can be used as an alternative - if overlayfs cannot be used for some reason. - * ``None``: default to ``overlayfs``. + Otherwise, pass an instance of :class:`_KernelBuildEnvConf` of a mapping with + the same structure. + :type build_conf: collections.abc.Mapping or str or None """ # Preserve checksum attribute when serializing, as it will allow hitting - # the module cache without actually setting up the kernel tree in many + # the module cache without actually setting up the kernel build env in many # cases. _SERIALIZE_PRESERVED_ATTRS = {'checksum'} @@ -749,32 +767,43 @@ class KernelTree(Loggable, SerializeViaConstructor): # On top of that, the kernel does not handle clang < 10.0.1 _MIN_CLANG_VERSION = 11 - def __init__(self, path_cm, cc, make_vars, build_env=None, overlay_backend=None, build_env_settings=None): + def __init__(self, path_cm, build_conf=None): self._make_path_cm = path_cm - self.build_env, self.build_env_settings = self._resolve_build_env(build_env, build_env_settings) - self.make_vars = make_vars or {} - self.overlay_backend = self._resolve_overlay_backend(overlay_backend) + self.conf, self.cc, self.abi = self._resolve_conf(build_conf) + self._path_cm = None self.path = None self.checksum = None - self.cc = cc - @staticmethod - def _resolve_build_env(build_env, build_env_settings): - return ( - build_env or 'host', - build_env_settings or {} - ) + @classmethod + def _resolve_conf(cls, conf, abi=None): + def make_conf(conf): + if isinstance(conf, _KernelBuildEnvConf): + return conf + elif conf is None: + return _KernelBuildEnvConf() + elif isinstance(conf, str): + return _KernelBuildEnvConf.from_map({ + 'build-env': conf + }) + elif isinstance(conf, Mapping): + return _KernelBuildEnvConf.from_map(conf) + else: + raise TypeError(f'Unsupported value type for build_conf: {conf}') + + conf = make_conf(conf) + make_vars, cc, abi = cls._process_make_vars(conf, abi=abi) + conf.add_src(src='processed make-variables', conf={'make-variables': make_vars}) - @staticmethod - def _resolve_overlay_backend(overlay_backend): - return overlay_backend or 'overlayfs' + return (conf, cc, abi) + + _SPEC_KEYS = ('path', 'checksum') def _to_spec(self): - return dict( - path=self.path, - checksum=self.checksum, - ) + return { + attr: getattr(self, attr) + for attr in self._SPEC_KEYS + } def _update_spec(self, spec): def update(x): @@ -782,7 +811,7 @@ class KernelTree(Loggable, SerializeViaConstructor): if val is not None: setattr(self, x, val) if spec: - for attr in ('path', 'checksum'): + for attr in self._SPEC_KEYS: update(attr) # It is expected that the same object can be used more than once, so @@ -893,7 +922,7 @@ class KernelTree(Loggable, SerializeViaConstructor): @classmethod - def _prepare_tree(cls, path, cc, make_vars, build_env, apply_overlays, overlay_backend, build_env_settings): + def _prepare_tree(cls, path, cc, abi, build_conf, apply_overlays): logger = cls.get_logger() path = Path(path) @@ -902,7 +931,7 @@ class KernelTree(Loggable, SerializeViaConstructor): path=path, targets=targets, cc=cc, - make_vars=make_vars, + make_vars=build_conf.get('make-variables', {}), ) cmds = [ @@ -932,7 +961,7 @@ class KernelTree(Loggable, SerializeViaConstructor): bind_paths = {path: path} - def fixup_kernel_tree(): + def fixup_kernel_build_env(): # TODO: re-assess # The headers in /sys/kheaders.tar.xz generated by @@ -953,17 +982,19 @@ class KernelTree(Loggable, SerializeViaConstructor): _path.write_bytes(content) - if build_env == 'alpine': - settings = (build_env_settings or {}).get('alpine', {}) + if build_conf['build-env'] == 'alpine': + settings = build_conf['build-env-settings']['alpine'] version = settings.get('version', None) alpine_packages = settings.get('packages', None) + make_vars = build_conf.get('make-variables', {}) + overlay_backend = build_conf['overlay-backend'] @contextlib.contextmanager def cmd_cm(cmds): with _make_build_chroot( cc=cc, + abi=abi, bind_paths=bind_paths, - make_vars=make_vars, overlay_backend=overlay_backend, version=version, packages=alpine_packages, @@ -985,18 +1016,18 @@ class KernelTree(Loggable, SerializeViaConstructor): # Apply the overlays before running make, so that it sees the # correct headers and conf etc apply_overlays() - fixup_kernel_tree() + fixup_kernel_build_env() _subprocess_log(post, logger=logger, level=logging.DEBUG) # Re-apply the overlays, since we could have overwritten important # things, such as include/linux/vermagic.h apply_overlays() - fixup_kernel_tree() + fixup_kernel_build_env() @classmethod - def _process_make_vars(cls, build_env, build_env_settings, make_vars, abi=None): + def _process_make_vars(cls, build_conf, abi): env = { k: str(v) for k, v in ( @@ -1011,7 +1042,7 @@ class KernelTree(Loggable, SerializeViaConstructor): make_vars = { **env, - **dict(make_vars or {}) + **dict(build_conf.get('make-variables', {})) } make_vars = { @@ -1019,14 +1050,27 @@ class KernelTree(Loggable, SerializeViaConstructor): for k, v in make_vars.items() } - if abi is None: - abi = make_vars.get('ARCH', LISA_HOST_ABI) + try: + arch = make_vars['ARCH'] + except KeyError: + if abi: + arch = _abi_to_kernel_arch(abi) + else: + raise ValueError('The ABI must be specified or the ARCH make variable') + + abi = abi or _kernel_arch_to_abi(arch) - arch = _any_abi_to_kernel_arch(abi) make_vars['ARCH'] = arch - make_vars, cc = cls._resolve_toolchain(abi, make_vars, build_env, build_env_settings) + build_conf = build_conf.add_src( + src='make-variables', + conf={ + 'make-variables': make_vars + }, + inplace=False, + ) + make_vars, cc = cls._resolve_toolchain(abi, build_conf) - if build_env == 'alpine': + if build_conf['build-env'] == 'alpine': if cc.startswith('clang'): make_vars['LLVM'] = '1' else: @@ -1043,31 +1087,28 @@ class KernelTree(Loggable, SerializeViaConstructor): # then be re-filtered right before invoking make to remove CC=gcc as it # can confuse KBuild. make_vars['CC'] = cc - return (make_vars, cc) + assert 'ARCH' in make_vars + return (make_vars, cc, arch) @classmethod - def _make_toolchain_env(cls, toolchain_path, env=None): + def _make_toolchain_env(cls, toolchain_path=None, env=None): env = env or os.environ - return { - **os.environ, - 'PATH': ':'.join(( - toolchain_path, - os.environ.get('PATH', '') - )) - } + if toolchain_path is not None: + path = env.get('PATH', '') + env = { + **env, + 'PATH': ':'.join((toolchain_path, path)) + } + + return {**os.environ, **env} @classmethod - def _make_toolchain_env_from_settings(cls, build_env_settings, env=None): - build_env_settings = build_env_settings or {} - try: - toolchain_path = get_nested_key(build_env_settings, ['host', 'toolchain-path']) - except KeyError: - env = os.environ + def _make_toolchain_env_from_conf(cls, build_conf, env=None): + if build_conf['build-env'] == 'host': + toolchain_path = build_conf['build-env-settings']['host'].get('toolchain-path') else: - env = cls._make_toolchain_env(toolchain_path, env=env) - - return env - + toolchain_path = None + return cls._make_toolchain_env(toolchain_path, env=env) @classmethod def _check_cc_version(cls, cc, toolchain_path): @@ -1085,10 +1126,11 @@ class KernelTree(Loggable, SerializeViaConstructor): return False @classmethod - def _resolve_toolchain(cls, abi, make_vars, build_env, build_env_settings): + def _resolve_toolchain(cls, abi, build_conf): logger = cls.get_logger() - build_env, build_env_settings = KernelTree._resolve_build_env(build_env, build_env_settings) - env = cls._make_toolchain_env_from_settings(build_env_settings) + env = cls._make_toolchain_env_from_conf(build_conf) + + make_vars = build_conf.get('make-variables', {}) if abi == LISA_HOST_ABI: toolchain = None @@ -1099,12 +1141,12 @@ class KernelTree(Loggable, SerializeViaConstructor): try: toolchain = os.environ['CROSS_COMPILE'] except KeyError: - if abi in ('arm64', 'aarch64'): + if abi == 'arm64': toolchain = 'aarch64-linux-gnu-' - elif 'arm' in abi: + elif abi == 'armeabi': toolchain = 'arm-linux-gnueabi-' else: - raise KeyError('CROSS_COMPILE env var needs to be set') + raise KeyError(f'ABI {abi} not recognized, CROSS_COMPILE env var needs to be set') logger.debug(f'CROSS_COMPILE env var not set, assuming "{toolchain}"') @@ -1140,7 +1182,7 @@ class KernelTree(Loggable, SerializeViaConstructor): # Default to clang on alpine, as it will be in a high-enough version # and since Alpine does not ship any cross-toolchain for GCC, this will # avoid having to use QEMU userspace emulation which is really slow. - elif build_env == 'alpine': + elif build_conf['build-env'] == 'alpine': cc = 'clang' if 'LLVM' in make_vars: @@ -1155,8 +1197,8 @@ class KernelTree(Loggable, SerializeViaConstructor): # Only run the check on host build env, as other build envs are # expected to be correctly configured. - if build_env == 'host' and commands: - toolchain_path = build_env_settings.get('host', {}).get('toolchain-path', None) + if build_conf['build-env'] == 'host' and commands: + toolchain_path = build_conf['build-env-settings']['host'].get('toolchain-path', None) for cc, cmd in commands.items(): pretty_cmd = ' '.join(cmd) @@ -1198,7 +1240,7 @@ class KernelTree(Loggable, SerializeViaConstructor): @classmethod @SerializeViaConstructor.constructor - def from_target(cls, target, tree_path=None, make_vars=None, cache=True, build_env=None, overlay_backend=None, build_env_settings=None): + def from_target(cls, target, tree_path=None, cache=True, build_conf=None): """ Build the tree from the given :class:`lisa.target.Target`. @@ -1233,35 +1275,25 @@ class KernelTree(Loggable, SerializeViaConstructor): downloading a tarball from kernel.org for the matching version.) :type tree_path: str or None - :param make_vars: Variables passed on ``make`` command line. - :type make_vars: dict(str, object) - :param cache: If ``True``, will attempt to cache intermediate steps. :type cache: bool - :param build_env: See :class:`lisa._kmod.KernelTree`. - :type build_env: str or None - - :param overlay_backend: See :class:`lisa._kmod.KernelTree`. - :type overlay_backend: str or None - - :param build_env_settings: See :class:`lisa._kmod.KernelTree`. - :type build_env_settings: collections.abc.Mapping or None + :param build_conf: See :class:`lisa._kmod._KernelBuildEnv`. + :type build_conf: str or None """ - make_vars, cc = cls._process_make_vars( - make_vars=make_vars, - abi=target.plat_info['abi'], - build_env=build_env, - build_env_settings=build_env_settings, - ) - kernel_info = target.plat_info['kernel'] + plat_info = target.plat_info + abi = plat_info['abi'] + kernel_info = plat_info['kernel'] + + build_conf, cc, _abi = cls._resolve_conf(build_conf, abi=abi) + assert _abi == abi @contextlib.contextmanager def from_installed_headers(): """ Get the kernel tree from /lib/modules """ - if build_env == 'alpine': + if build_conf['build-env'] == 'alpine': raise ValueError(f'Building from /lib/modules is not supported with the Alpine build environment as /lib/modules might not be self contained (i.e. symlinks pointing outside)') else: if isinstance(target.conn, LocalConnection): @@ -1328,12 +1360,9 @@ class KernelTree(Loggable, SerializeViaConstructor): with cls.from_overlays( version=version, overlays=overlays, - make_vars=make_vars, cache=cache, tree_path=tree_path, - build_env=build_env, - overlay_backend=overlay_backend, - build_env_settings=build_env_settings, + build_conf=build_conf, ) as tree: yield tree._to_spec() @@ -1384,10 +1413,7 @@ class KernelTree(Loggable, SerializeViaConstructor): tree_path=tree_path, version=kernel_info['version'], cache=cache, - make_vars=make_vars, - build_env=build_env, - overlay_backend=overlay_backend, - build_env_settings=build_env_settings, + build_conf=build_conf, ) as tree: yield tree._to_spec() @@ -1432,30 +1458,24 @@ class KernelTree(Loggable, SerializeViaConstructor): return cls( path_cm=functools.partial(try_loaders, loaders), - cc=cc, - make_vars=make_vars, - build_env=build_env, - build_env_settings=build_env_settings, - overlay_backend=overlay_backend, + build_conf=build_conf, ) @classmethod @SerializeViaConstructor.constructor - def from_path(cls, path, make_vars=None, cache=True, build_env=None, build_env_settings=None): + def from_path(cls, path, cache=True, build_conf=None): """ Build a tree from the given ``path`` to sources. """ return cls.from_overlays( tree_path=path, - make_vars=make_vars, cache=cache, - build_env=build_env, - build_env_settings=build_env_settings, + build_conf=build_conf, ) @classmethod @SerializeViaConstructor.constructor - def from_overlays(cls, version=None, tree_path=None, overlays=None, make_vars=None, cache=True, build_env=None, overlay_backend=None, build_env_settings=None): + def from_overlays(cls, version=None, tree_path=None, overlays=None, cache=True, build_conf=None): """ Build a tree from the given overlays, to be applied on a source tree. @@ -1467,13 +1487,8 @@ class KernelTree(Loggable, SerializeViaConstructor): """ logger = cls.get_logger() overlays = overlays or {} - make_vars, cc = cls._process_make_vars( - make_vars=make_vars, - build_env=build_env, - build_env_settings=build_env_settings, - ) - build_env, build_env_settings = KernelTree._resolve_build_env(build_env, build_env_settings) - overlay_backend = KernelTree._resolve_overlay_backend(overlay_backend) + + build_conf, cc, abi = cls._resolve_conf(build_conf) if tree_path: try: @@ -1509,17 +1524,16 @@ class KernelTree(Loggable, SerializeViaConstructor): cls._prepare_tree( path, cc=cc, - make_vars=make_vars, - build_env=build_env, - build_env_settings=build_env_settings, + abi=abi, + build_conf=build_conf, apply_overlays=functools.partial(apply_overlays, path), - overlay_backend=overlay_backend, ) @contextlib.contextmanager def overlay_cm(args): base_path, tree_key = args base_path = Path(base_path).resolve() + overlay_backend = build_conf['overlay-backend'] if cache and tree_key is not None: # Compute a unique token for the overlay. It includes: @@ -1532,33 +1546,15 @@ class KernelTree(Loggable, SerializeViaConstructor): # * All the variables passed to "make". This is very important # as things such as a toolchain change can make a kernel tree # unsuitable for compiling a module. - def mapping_key(mapping): - if isinstance(mapping, Mapping): - return [ - # We need to take checksum the make variables - # as well, as it can influence the kernel tree - # a great deal (e.g. changing toolchain) - (mapping_key(k), mapping_key(v)) - for k, v in sorted((mapping or {}).items()) - ] - else: - return str(mapping) - key = ( sorted( overlay._get_checksum() for overlay, dst in overlays.items() ) + [ - str(tree_key), - str(build_env), - str(overlay_backend), + tree_key, str(cc), - ] + - # We need to take checksum the make variables - # as well, as it can influence the kernel tree - # a great deal (e.g. changing toolchain) - mapping_key(make_vars) + - mapping_key(build_env_settings) + build_conf, + ] ) def populate(key, path): @@ -1623,7 +1619,7 @@ class KernelTree(Loggable, SerializeViaConstructor): url = cls._get_url(version) # Assume that the URL will always provide the same tarball yield ( - dir_cache.get_entry([url]), + dir_cache.get_entry(url), url, ) else: @@ -1636,10 +1632,7 @@ class KernelTree(Loggable, SerializeViaConstructor): cm = chain_cm(overlay_cm, tree_cm) return cls( path_cm=cm, - cc=cc, - make_vars=make_vars, - build_env=build_env, - build_env_settings=build_env_settings, + build_conf=build_conf, ) @classmethod @@ -1758,30 +1751,29 @@ class KmodSrc(Loggable): ) )).encode('utf-8') - def compile(self, kernel_tree, make_vars=None): + def compile(self, kernel_build_env, make_vars=None): """ Compile the module and returns the ``bytestring`` content of the ``.ko`` file. - :param kernel_tree: Kernel tree to build the module against. - :type kernel_tree: KernelTree + :param kernel_build_env: kernel build env to build the module against. + :type kernel_build_env: _KernelBuildEnv :param make_vars: Variables passed on ``make`` command line. This can be used for variables only impacting the module, otherwise it's - better to set them when creating the ``kernel_tree``. + better to set them when creating the ``kernel_build_env``. :type make_vars: dict(str, object) or None """ make_vars = { - **kernel_tree.make_vars, + **kernel_build_env.conf.get('make-variables', {}), **(make_vars or {}), } - cc = kernel_tree.cc - overlay_backend = kernel_tree.overlay_backend - tree_path = Path(kernel_tree.path) - # "inherit" the build env from the KernelTree as we must use the same + cc = kernel_build_env.cc + abi = kernel_build_env.abi + tree_path = Path(kernel_build_env.path) + # "inherit" the build env from the _KernelBuildEnv as we must use the same # environment as what was used for "make modules_prepare" - build_env = kernel_tree.build_env - build_env_settings = kernel_tree.build_env_settings + build_conf = kernel_build_env.conf bind_paths = {tree_path: tree_path} logger = self.logger @@ -1828,8 +1820,8 @@ class KmodSrc(Loggable): else: return filenames[0] - if build_env == 'alpine': - settings = (build_env_settings or {}).get('alpine', {}) + if build_conf['build-env'] == 'alpine': + settings = build_conf['build-env-settings']['alpine'] alpine_version = settings.get('version', None) alpine_packages = settings.get('packages', None) @@ -1838,8 +1830,8 @@ class KmodSrc(Loggable): with _make_build_chroot( cc=cc, bind_paths=bind_paths, - make_vars=make_vars, - overlay_backend=overlay_backend, + abi=abi, + overlay_backend=build_conf['overlay-backend'], version=alpine_version, packages=alpine_packages, ) as chroot: @@ -1863,7 +1855,7 @@ class KmodSrc(Loggable): make_vars=make_vars, ) - env = self._make_toolchain_env_from_settings(build_env_settings, env={'PATH': HOST_PATH}) + env = _KernelBuildEnv._make_toolchain_env_from_conf(build_conf, env={'PATH': HOST_PATH}) yield (mod_path, cmd, {'PATH': env['PATH']}) with cmd_cm() as (mod_path, cmd, env): @@ -1918,20 +1910,20 @@ class DynamicKmod(Loggable): :param src: Sources of the module. :type src: lisa._kmod.KmodSrc - :param kernel_tree: Kernel source tree to use to build the module against. - :type kernel_tree: lisa._kmod.KernelTree + :param kernel_build_env: Kernel source tree to use to build the module against. + :type kernel_build_env: lisa._kmod._KernelBuildEnv """ - def __init__(self, target, src, kernel_tree=None): + def __init__(self, target, src, kernel_build_env=None): self.src = src self.target = target - if not isinstance(kernel_tree, KernelTree): - kernel_tree = KernelTree.from_target( + if not isinstance(kernel_build_env, _KernelBuildEnv): + kernel_build_env = _KernelBuildEnv.from_target( target=self.target, - tree_path=kernel_tree, + tree_path=kernel_build_env, ) - self._kernel_tree = kernel_tree + self._kernel_build_env = kernel_build_env @property def mod_name(self): @@ -1950,23 +1942,23 @@ class DynamicKmod(Loggable): @property @memoized - def kernel_tree(self): - tree = self._kernel_tree - arch = _any_abi_to_kernel_arch( + def kernel_build_env(self): + tree = self._kernel_build_env + arch = _abi_to_kernel_arch( self.target.plat_info['abi'] ) - tree_arch = tree.make_vars['ARCH'] + tree_arch = tree.conf['make-variables']['ARCH'] if tree_arch != arch: - raise ValueError(f'The kernel tree ({tree_arch}) was not prepared for the same architecture as the target ({arch}). Please set ARCH={arch} make variable.') + raise ValueError(f'The kernel build env ({tree_arch}) was not prepared for the same architecture as the target ({arch}). Please set ARCH={arch} make variable.') else: return tree @property def _compile_needs_root(self): - tree = self.kernel_tree + tree = self.kernel_build_env return ( - tree.build_env != 'host' or - tree.overlay_backend == 'overlayfs' + tree.conf['build-env'] != 'host' or + tree.conf['overlay-backend'] == 'overlayfs' ) # Dummy memoized wrapper. The only reason we need one is that _do_compile() @@ -1986,51 +1978,48 @@ class DynamicKmod(Loggable): compile_ = ensure_root(compile_, inline=True) bin_, spec = compile_(self, make_vars=make_vars) - # Get back KernelTree._to_spec() and update the KernelTree we have in + # Get back _KernelBuildEnv._to_spec() and update the _KernelBuildEnv we have in # this process with it to remember the checksum, in case ensure_root() # spawned a new process. This is then used by Target.get_kmod() that # will reinject the known spec when creating new modules from the - # default KernelTree - self.kernel_tree._update_spec(spec) + # default _KernelBuildEnv + self.kernel_build_env._update_spec(spec) return bin_ def _do_compile(self, make_vars=None): - kernel_tree = self.kernel_tree + kernel_build_env = self.kernel_build_env extra_make_vars = make_vars or {} all_make_vars = { + **kernel_build_env.conf.get('make-variables', {}), **extra_make_vars, - **kernel_tree.make_vars, } src = self.src - def get_key(kernel_tree): - kernel_checksum = kernel_tree.checksum + def get_key(kernel_build_env): + kernel_checksum = kernel_build_env.checksum if kernel_checksum is None: - raise ValueError('Kernel tree has no checksum') + raise ValueError('kernel build env has no checksum') else: - var_tokens = [ - f'{k}={v}' - for k, v in sorted(all_make_vars.items()) - ] - # Cache the compilation based on: - # * the kernel tree - # * the make variables - # * the module name - return (kernel_checksum, kernel_tree.build_env, kernel_tree.build_env_settings, src.checksum, var_tokens) + return ( + kernel_checksum, + kernel_build_env.conf, + src.checksum, + all_make_vars, + ) - def get_bin(kernel_tree): + def get_bin(kernel_build_env): return src.compile( - kernel_tree=kernel_tree, + kernel_build_env=kernel_build_env, make_vars=extra_make_vars, ) - def lookup_cache(kernel_tree, key, enter_cm=False): - cm = kernel_tree if enter_cm else nullcontext(kernel_tree) + def lookup_cache(kernel_build_env, key, enter_cm=False): + cm = kernel_build_env if enter_cm else nullcontext(kernel_build_env) def populate(key, path): - with cm as kernel_tree: + with cm as kernel_build_env: with open(path / 'mod.ko', 'wb') as f: - f.write(get_bin(kernel_tree)) + f.write(get_bin(kernel_build_env)) dir_cache = DirCache( category='kernel_modules', @@ -2040,26 +2029,26 @@ class DynamicKmod(Loggable): with open(cache_path / 'mod.ko', 'rb') as f: return f.read() - # First try on the "bare" kernel tree, i.e. before calling __enter__(). + # First try on the "bare" kernel build env, i.e. before calling __enter__(). # If this happens to have enough information to hit the cache, we just # avoided a possibly costly setup of compilation environment try: - key = get_key(kernel_tree) + key = get_key(kernel_build_env) except ValueError: - with kernel_tree as kernel_tree: - if kernel_tree.checksum is None: - # Only cache the module if the kernel tree has a defined + with kernel_build_env as kernel_build_env: + if kernel_build_env.checksum is None: + # Only cache the module if the kernel build env has a defined # checksum, which is not always the case when it's not # coming from a controlled source that is guaranteed to be # immutable. - bin_ = get_bin(kernel_tree) + bin_ = get_bin(kernel_build_env) else: - key = get_key(kernel_tree) - bin_ = lookup_cache(kernel_tree, key) + key = get_key(kernel_build_env) + bin_ = lookup_cache(kernel_build_env, key) else: - bin_ = lookup_cache(kernel_tree, key, enter_cm=True) + bin_ = lookup_cache(kernel_build_env, key, enter_cm=True) - return (bin_, kernel_tree._to_spec()) + return (bin_, kernel_build_env._to_spec()) def install(self, kmod_params=None): """ diff --git a/lisa/conf.py b/lisa/conf.py index ac0e1f20e..bd9257fd8 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -576,7 +576,10 @@ class LevelKeyDesc(KeyDescBase, Mapping): def __init__(self, name, help, children, value_path=None): # pylint: disable=redefined-builtin super().__init__(name=name, help=help) - self.children = children + # Make it easier to share children with another configuration class by + # making them independent, so we will not accidentally override the + # parent link when it would not be appropriate. + self.children = list(map(copy.deepcopy, children)) # Fixup parent for easy nested declaration for key_desc in self.children: diff --git a/lisa/target.py b/lisa/target.py index 19c7cccb2..81668b268 100644 --- a/lisa/target.py +++ b/lisa/target.py @@ -34,6 +34,7 @@ import hashlib import shutil from types import ModuleType, FunctionType from operator import itemgetter +import warnings import devlib from devlib.exception import TargetStableError @@ -42,9 +43,9 @@ from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized, destroyablecontextmanager, ContextManagerExit from lisa._assets import ASSETS_PATH -from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable +from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable, DelegatedLevelKeyDesc from lisa._generic import TypedList, TypedDict -from lisa._kmod import KernelTree, DynamicKmod +from lisa._kmod import _KernelBuildEnv, DynamicKmod, _KernelBuildEnvConf from lisa.platforms.platinfo import PlatformInfo @@ -159,31 +160,10 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed up the connection', [bool]), LevelKeyDesc('kernel', 'kernel information', ( KeyDesc('src', 'Path to kernel source tree matching the kernel running on the target used to build modules', [str, None]), - LevelKeyDesc('modules', 'kernel modules', ( - LevelKeyDesc( - 'build-env', 'Settings specific to a given build-env ', - ( - KeyDesc('kind', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [str]), - # At this level we have all the build-env specific - # parameters. Generic parameters go straight into the - # "modules" level. Most people will not need to use the - # "settings" level. - LevelKeyDesc('settings', 'build-env settings', ( - LevelKeyDesc('host', 'Settings for host build-env', ( - KeyDesc('toolchain-path', 'Folder to prepend to PATH when executing toolchain command in the host build env', [str]), - )), - LevelKeyDesc('alpine', 'Settings for Alpine linux build-env', ( - KeyDesc('version', 'Alpine linux version, e.g. 3.18.0', [None, str]), - KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, TypedList[str]]), - )), - )), - ), - value_path=('kind',), - ), - # At this level we have generic parameters that apply regardless of the build environment - KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [TypedDict[str, object]]), - KeyDesc('overlay-backend', 'Backend to use for overlaying folders while building modules. Can be "overlayfs" (overlayfs filesystem, recommended) or "copy (plain folder copy)', [str]), - )), + DelegatedLevelKeyDesc( + 'modules', 'Kernel module build environment', + _KernelBuildEnvConf, + ), )), LevelKeyDesc('wait-boot', 'Wait for the target to finish booting', ( KeyDesc('enable', 'Enable the boot check', [bool]), @@ -280,10 +260,7 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): 'wait_boot_timeout': ['wait-boot', 'timeout'], 'kernel_src': ['kernel', 'src'], - 'kmod_build_env': ['kernel', 'modules', 'build-env', 'kind'], - 'kmod_build_env_settings': ['kernel', 'modules', 'build-env', 'settings'], - 'kmod_make_vars': ['kernel', 'modules', 'make-variables'], - 'kmod_overlay_backend': ['kernel', 'modules', 'overlay-backend'], + 'kmod_build_env': ['kernel', 'modules'], } def __init__(self, kind, name='', tools=[], res_dir=None, @@ -292,7 +269,6 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, kernel_src=None, kmod_build_env=None, kmod_make_vars=None, kmod_overlay_backend=None, devlib_max_async=None, - kmod_build_env_settings=None, ): # Set it temporarily to avoid breaking __getattr__ self._devlib_loadable_modules = set() @@ -300,18 +276,8 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): # pylint: disable=dangerous-default-value super().__init__() logger = self.logger - self.name = name - self._kmod_tree = None - self._kmod_tree_spec = dict( - tree_path=kernel_src, - build_env=kmod_build_env, - build_env_settings=kmod_build_env_settings, - make_vars=kmod_make_vars, - overlay_backend=kmod_overlay_backend, - ) - res_dir = res_dir if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', @@ -374,6 +340,38 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): cache_dir.mkdir(parents=True) self._cache_dir = cache_dir + self._kmod_build_env = None + + def _make_kernel_build_env_spec(kmod_build_env, abi): + kmod_build_env, *_ = _KernelBuildEnv._resolve_conf( + kmod_build_env, + abi=abi, + ) + deprecated = { + 'overlay-backend': kmod_overlay_backend, + 'make-variables': kmod_make_vars, + } + + cls_name = self.__class__.__qualname__ + if any(v is not None for v in deprecated.values()): + warnings.warn(f'{cls_name} kmod_overlay_backend and kmod_make_vars parameters are deprecated, please pass the information inside build_env instead using keys: {", ".join(deprecated.keys())}', DeprecationWarning) + + kmod_build_env.add_src( + src='deprecated-{cls_name}-params', + conf=deprecated, + filter_none=True + ) + + return dict( + tree_path=kernel_src, + build_conf=kmod_build_env, + ) + + self._kmod_build_env_spec = _make_kernel_build_env_spec( + kmod_build_env, + abi=self.plat_info['abi'], + ) + def _init_plat_info(self, plat_info=None, name=None, **kwargs): if plat_info is None: @@ -401,18 +399,18 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): k: v for k, v in self.__dict__.items() if k not in { - # this KernelTree contains a reference to ourselves, and will + # this _KernelBuildEnv contains a reference to ourselves, and will # fail to unpickle because of the circular dependency and the # fact that it will call its constructor again, trying to make # use of the Target before it is ready. - '_kmod_tree', + '_kmod_build_env', } } def __setstate__(self, dct): self.__dict__.update(dct) self._init_plat_info(deferred=True) - self._kmod_tree = None + self._kmod_build_env = None def get_kmod(self, mod_cls=DynamicKmod, **kwargs): """ @@ -427,30 +425,30 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): method of ``mod_cls``. """ try: - tree = kwargs['kernel_tree'] + build_env = kwargs['kernel_build_env'] except KeyError: - memoize_tree = True - if self._kmod_tree: - tree = self._kmod_tree + memoize_build_env = True + if self._kmod_build_env: + build_env = self._kmod_build_env else: - tree = KernelTree.from_target( + build_env = _KernelBuildEnv.from_target( target=self, - **self._kmod_tree_spec + **self._kmod_build_env_spec ) else: - memoize_tree = False + memoize_build_env = False - kwargs['kernel_tree'] = tree + kwargs['kernel_build_env'] = build_env mod = mod_cls.from_target( target=self, **kwargs, ) - if memoize_tree: + if memoize_build_env: # Memoize the KernelTree, as it is a reusable object. Memoizing # allows remembering its checksum across calls, which will allow # hitting the .ko cache without having to setup a kernel tree. - self._kmod_tree = mod.kernel_tree + self._kmod_build_env = mod.kernel_build_env return mod def cached_pull(self, src, dst, **kwargs): -- GitLab From acea43a7abab0c4870ac5b55bc95894231a9a6d6 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Tue, 25 Jul 2023 19:10:55 +0100 Subject: [PATCH 12/24] lisa.target: Use lisa._generic.OneOf in TargetConf FEATURE Ensure that target kind can only be "linux" or "android" using lisa._generic.OneOf --- lisa/target.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lisa/target.py b/lisa/target.py index 81668b268..a70115669 100644 --- a/lisa/target.py +++ b/lisa/target.py @@ -44,7 +44,7 @@ from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized, destroyablecontextmanager, ContextManagerExit from lisa._assets import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable, DelegatedLevelKeyDesc -from lisa._generic import TypedList, TypedDict +from lisa._generic import TypedList, TypedDict, OneOf from lisa._kmod import _KernelBuildEnv, DynamicKmod, _KernelBuildEnvConf from lisa.platforms.platinfo import PlatformInfo @@ -146,7 +146,7 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board name, free-form value only used to embelish logs', [str]), - KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [str]), + KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [OneOf['linux', 'android', 'host']]), KeyDesc('host', 'Hostname or IP address of the host', [str, None]), KeyDesc('username', 'SSH username. On ADB connections, "root" username will root adb upon target connection', [str, None]), -- GitLab From 7aaf84ae0c8059965a11c544713f826b61d88905 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 11:29:38 +0100 Subject: [PATCH 13/24] lisa.utils: Add typing hint support to get_cls_name() FEATURE Allow get_cls_name() to process correctly typing hints like typing.Union[str, int] --- doc/conf.py | 5 +++++ lisa/utils.py | 28 ++++++++++++++++++---------- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index c0651371f..9cbe14e60 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -436,6 +436,11 @@ ignored_refs = { # reference since the intersphinx inventory of the stdlib does not provide # any link for NoneType. r'NoneType', + + # Sphinx currently fails at finding the target for references like + # :class:`typing.List[str]` since it does not seem to have specific support + # for the bracketed syntax in that role. + r'typing.*', } ignored_refs.update( re.escape(f'{x.__module__}.{x.__qualname__}') diff --git a/lisa/utils.py b/lisa/utils.py index 4f5130286..585b5eba6 100644 --- a/lisa/utils.py +++ b/lisa/utils.py @@ -490,7 +490,7 @@ def get_cls_name(cls, style=None, fully_qualified=True): """ Get a prettily-formated name for the class given as parameter - :param cls: class to get the name from + :param cls: Class or typing hint to get the name from. :type cls: type :param style: When "rst", a RestructuredText snippet is returned @@ -499,17 +499,25 @@ def get_cls_name(cls, style=None, fully_qualified=True): """ if cls is None: return 'None' - - if fully_qualified or style == 'rst': - mod_name = inspect.getmodule(cls).__name__ - mod_name = mod_name + '.' if mod_name not in ('builtins', '__main__') else '' else: - mod_name = '' + try: + qualname = cls.__qualname__ + # type annotations like typing.Union[str, int] do not have a __qualname__ + except AttributeError: + name = str(cls) + else: + if fully_qualified or style == 'rst': + mod_name = inspect.getmodule(cls).__name__ + mod_name = mod_name + '.' if mod_name not in ('builtins', '__main__') else '' + else: + mod_name = '' - name = mod_name + cls.__qualname__ - if style == 'rst': - name = f':class:`~{name}`' - return name + name = mod_name + cls.__qualname__ + + if style == 'rst': + name = f':class:`~{name}`' + + return name def get_common_ancestor(classes): -- GitLab From a79e9e78377ada2e657e6b22d277e8fb6a0874d7 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 10:49:53 +0100 Subject: [PATCH 14/24] lisa._generic: Remove custom hierarchy Remove custom type hierarchy in favor of the typing standard module. The only type hint that is left is SortedList, which is integrated to the typeguard package for runtime checking. --- lisa/_generic.py | 249 +++++++++++++++++---------------------------- tests/test_conf.py | 6 +- 2 files changed, 96 insertions(+), 159 deletions(-) diff --git a/lisa/_generic.py b/lisa/_generic.py index 1649f8a43..98df2925b 100644 --- a/lisa/_generic.py +++ b/lisa/_generic.py @@ -20,193 +20,130 @@ Generic types inspired by the :mod:`typing` module. """ import functools -from collections.abc import Mapping, Sequence -from operator import attrgetter +import inspect +from typing import Any, Union, Generic, TypeVar, List +import typeguard +from collections.abc import Iterable -from lisa.utils import sphinx_register_nitpick_ignore +from lisa.utils import get_cls_name -def _isinstance(x, type_): - if isinstance(type_, tuple): - return any(map(lambda type_: _isinstance(x, type_), type_)) - elif isinstance(type_, type): - return isinstance(x, type_) - # Typing hint - else: - try: - from typing import get_origin, get_args, Union - except ImportError: - # We cannot process the typing hint in that version of Python, so - # we assume the input is correctly typed. It's not ideal but we - # cannot do much more than that. - return True - else: - combinator = get_origin(type_) - args = get_args(type_) - if combinator == Union: - return any(map(lambda type_: _isinstance(x, type_), args)) - else: - raise TypeError(f'Cannot handle type hint: {type_}') - -class MetaBase(type): - def __instancecheck__(cls, instance): - try: - cls.instancecheck(instance) - except TypeError: - return False - else: - return True - - # Fully memoize the function so that this always holds: - # assert Container[Foo] is Container[Foo] - @functools.lru_cache(maxsize=None, typed=True) - def __getitem__(cls, type_): - NewClass = cls.getitem(type_) +class _TypeguardCustom: + _HINT = Any - NewClass.__module__ = cls.__module__ + @classmethod + def _instancecheck(cls, value): + return - # Since this type name is not resolvable, avoid cross reference - # warnings from Sphinx - sphinx_register_nitpick_ignore(NewClass) - return NewClass + @classmethod + def _typeguard_checker(cls, value, origin_type, args, memo): + typeguard.check_type_internal(value, cls._HINT, memo) + try: + cls._instancecheck(value) + except TypeError as e: + raise typeguard.TypeCheckError(str(e)) -class GenericContainerMetaBase(MetaBase): - """ - Base class for the metaclass of generic containers. - They are parameterized with the ``type_`` class attribute, and classes can - also be created by indexing on classes with :class:`GenericBase` - metaclass. The ``type_`` class attribute will be set with what is passed as - the key. - """ +def _typeguard_lookup(origin_type, args, extras): + try: + issub = issubclass(origin_type, _TypeguardCustom) + except Exception: + issub = False - # Fully memoize the function so that this always holds: - # assert Container[Foo] is Container[Foo] - def getitem(cls, type_): - class NewClass(cls): - _type = type_ - - types = type_ if isinstance(type_, Sequence) else [type_] - - def make_name(self_getter, sub_getter): - def _sub_getter(type_): - try: - return sub_getter(type_) - # type hints like typing.Union don't have a name we can introspect, - # but it can be pretty-printed - except AttributeError: - return str(type_) - return '{}[{}]'.format( - self_getter(cls), - ','.join(_sub_getter(type_) for type_ in types) - ) - - NewClass.__name__ = make_name( - attrgetter('__name__'), - attrgetter('__name__') - ) + if issub: + return origin_type._typeguard_checker + else: + return None - def type_param_name(t): - if t.__module__ == 'builtins': - return t.__qualname__ - else: - # Add the module name so that Sphinx can establish cross - # references - return f'{t.__module__}.{t.__qualname__}' - - NewClass.__qualname__ = make_name( - attrgetter('__qualname__'), - type_param_name, - ) - return NewClass +typeguard.checker_lookup_functions.append(_typeguard_lookup) -class GenericBase: +def check_type(x, classinfo): """ - Base class for generic containers. + Equivalent of ``isinstance()`` that will also work with typing hints. """ - def __new__(cls, obj): - cls.instancecheck(obj) - return obj + if isinstance(classinfo, Iterable): + typ = Union[tuple(classinfo)] + else: + typ = classinfo + + try: + typeguard.check_type( + value=x, + expected_type=typ, + forward_ref_policy=typeguard.ForwardRefPolicy.ERROR, + collection_check_strategy=typeguard.CollectionCheckStrategy.ALL_ITEMS, + ) + except typeguard.TypeCheckError as e: + raise TypeError(str(e)) -class GenericMappingMeta(GenericContainerMetaBase, type(Mapping)): +def is_instance(obj, classinfo): """ - Metaclass for generic mapping containers. - - It provides an ``__instancecheck__`` implementation that checks the type - of the keys and values. This make it suitable for input sanitizing based - on type checking. + Same as builtin ``isinstance()`` but works with type hints. """ - def instancecheck(cls, instance): - if not isinstance(instance, Mapping): - raise TypeError('not a Mapping') - - k_type, v_type = cls._type - for k, v in instance.items(): - if not _isinstance(k, k_type): - raise TypeError(f'Key "{k}" of type {type(k).__qualname__} should be of type {k_type.__qualname__}', k) - - if not _isinstance(v, v_type): - raise TypeError(f'Value of {type(v).__qualname__} key "{k}" should be of type {v_type.__qualname__}', k) + try: + check_type(obj, classinfo) + except TypeError: + return False + else: + return True -class TypedDict(GenericBase, dict, metaclass=GenericMappingMeta): - """ - Subclass of dict providing keys and values type check. +def is_hint(obj): """ + Heuristic to check if a given ``obj`` is a typing hint or anything else. + This function will return ``False`` for classes. - -class GenericSequenceMeta(GenericContainerMetaBase, type(Sequence)): - """Similar to :class:`GenericMappingMeta` for sequences""" - def instancecheck(cls, instance): - if not isinstance(instance, Sequence): - raise TypeError('not a Sequence') - - type_ = cls._type - for i, x in enumerate(instance): - if not _isinstance(x, type_): - raise TypeError(f'Item #{i} "{x}" of type {type(x).__qualname__} should be of type {type_.__qualname__}', i) - -class GenericSortedSequenceMeta(GenericSequenceMeta): - def instancecheck(cls, instance): - super().instancecheck(instance) - for i, (x, y) in enumerate(zip(instance, instance[1:])): - if x > y: - raise TypeError(f'Item #{i} "{x}" is higher than the next item "{y}", but the list must be sorted') - - -class TypedList(GenericBase, list, metaclass=GenericSequenceMeta): - """ - Subclass of list providing keys and values type check. + .. warning:: Since there is currently no way to identify hints for sure, + the check might return ``False`` even if it is a hint. """ + module = getattr(obj, '__module__', None) + + # This is a class, so cannot be a hint. + if isinstance(obj, type): + return issubclass(obj, _TypeguardCustom) + elif module in ('typing', 'typing_extensions'): + return True + else: + return False -class SortedTypedList(GenericBase, list, metaclass=GenericSortedSequenceMeta): +@functools.lru_cache(maxsize=None, typed=True) +def hint_to_class(hint): """ - Subclass of list providing keys and values type check, and also check the - list is sorted in ascending order. + Convert a typing hint to a class that will do a runtime check against the + hint when ``isinstance()`` is used. """ + class Meta(type): + def __instancecheck__(cls, instance): + return is_instance(instance, hint) + class Stub(metaclass=Meta): + pass -class OneOfMeta(MetaBase): - def getitem(cls, allowed): - class NewClass(cls): - _allowed = allowed + name = get_cls_name(hint).split('.', 1) + try: + name = name[1] + except IndexError: + name = name[0] - NewClass.__qualname__ = f'{cls.__qualname__}[{", ".join(map(repr, allowed))}]' - return NewClass + Stub.__qualname__ = name + Stub.__name__ = name.split('.')[-1] - def instancecheck(cls, instance): - allowed = cls._allowed - if instance not in allowed: - raise ValueError(f'Value {repr(instance)} is not allowed. It must be one of: {", ".join(map(repr, allowed))}') + return Stub -class OneOf(GenericBase, metaclass=OneOfMeta): +T = TypeVar('T') +class SortedList(Generic[T], _TypeguardCustom): """ - Check that the provided value is part of a specific set of allowed values. + Same as :class:`typing.List` but enforces sorted values when runtime + checked using :mod:`typeguard`. """ - def __new__(cls, obj): - cls.instancecheck(obj) - return obj + _HINT = List[T] + + @classmethod + def _instancecheck(cls, value): + for i, (x, y) in enumerate(zip(value, value[1:])): + if x > y: + raise TypeError(f'Item #{i} "{x}" is higher than the next item "{y}", but the list must be sorted') diff --git a/tests/test_conf.py b/tests/test_conf.py index 56d6110b9..49f820b60 100644 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -19,11 +19,11 @@ import os import copy from unittest import TestCase +import typing import pytest from lisa.conf import MultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, DerivedKeyDesc, DeferredValue -from lisa._generic import TypedList from .utils import StorageTestCase, HOST_PLAT_INFO, HOST_TARGET_CONF """ A test suite for the MultiSrcConf subclasses.""" @@ -86,8 +86,8 @@ def compute_derived(base_conf): INTERNAL_STRUCTURE = ( KeyDesc('foo', 'foo help', [int]), - KeyDesc('bar', 'bar help', [TypedList[int]]), - KeyDesc('multitypes', 'multitypes help', [TypedList[int], str, None]), + KeyDesc('bar', 'bar help', [typing.Sequence[int]]), + KeyDesc('multitypes', 'multitypes help', [typing.Sequence[int], str, None]), LevelKeyDesc('sublevel', 'sublevel help', ( KeyDesc('subkey', 'subkey help', [int]), )), -- GitLab From 122c053bd40a21e04c875a61e80dcddda985c2ad Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 11:21:25 +0100 Subject: [PATCH 15/24] lisa.conf: Use typeguard to validate conf key types FEATURE Use the typeguard library to validate the values passed by the user. This enables using standard typing annotations instead of lisa._generic. --- doc/conf.py | 1 + lisa/conf.py | 48 +++++++++--------------------------------------- setup.py | 2 ++ 3 files changed, 12 insertions(+), 39 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 9cbe14e60..889aaed4b 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -363,6 +363,7 @@ intersphinx_mapping = { 'wa': ('https://workload-automation.readthedocs.io/en/latest/', None), 'ipywidgets': ('https://ipywidgets.readthedocs.io/en/latest/', None), 'IPython': ('https://ipython.readthedocs.io/en/stable/', None), + 'typeguard': ('https://typeguard.readthedocs.io/en/stable/', None), } manpages_url = "https://manpages.debian.org/{path}" diff --git a/lisa/conf.py b/lisa/conf.py index bd9257fd8..4d65808da 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -32,8 +32,10 @@ import io import functools import threading import weakref +import typing from ruamel.yaml.comments import CommentedMap +import typeguard import lisa from lisa.utils import ( @@ -41,6 +43,7 @@ from lisa.utils import ( is_running_sphinx, get_cls_name, HideExekallID, get_subclasses, groupby, import_all_submodules, ) +from lisa._generic import check_type class DeferredValue: @@ -244,41 +247,12 @@ class KeyDesc(KeyDescBase): classinfo = self.classinfo key = self.qualname - def get_excep(key, val, classinfo, cls, msg): - # pylint: disable=unused-argument - classinfo = ' or '.join(get_cls_name(cls) for cls in classinfo) - msg = ': ' + msg if msg else '' - return TypeError(f'Key "{key}" is an instance of {get_cls_name(type(val))}, but should be instance of {classinfo}{msg}. Help: {self.help}', key) - def checkinstance(key, val, classinfo): - excep_list = [] - for cls in classinfo: - if cls is None: - if val is not None: - excep_list.append( - get_excep(key, val, classinfo, cls, 'Key is not None') - ) - # Some classes are able to raise a more detailed - # exception than just the boolean return value of - # __instancecheck__ - elif hasattr(cls, 'instancecheck'): - try: - cls.instancecheck(val) - except TypeError as e: - excep_list.append( - get_excep(key, val, classinfo, cls, str(e)) - ) - else: - if not isinstance(val, cls): - excep_list.append( - get_excep(key, val, classinfo, cls, None) - ) - - # If no type was validated, we raise an exception. This will - # only show the exception for the first class to be tried, - # which is the primary one. - if len(excep_list) == len(classinfo): - raise excep_list[0] + try: + check_type(val, classinfo) + except TypeError as e: + classinfo = ' or '.join(get_cls_name(cls) for cls in classinfo) + raise TypeError(f'Key "{key}" is an instance of {get_cls_name(type(val))}, but should be instance of {classinfo}: {e}. Help: {self.help}', key) # DeferredValue will be checked when they are computed if not isinstance(val, DeferredValue): @@ -1011,11 +985,7 @@ class MultiSrcConfABC(Serializable, abc.ABC): # type given in KeyDesc.__init__(classinfo=...) class NewtypeMeta(type): def __instancecheck__(cls, x): - classinfo = tuple( - c if c is not None else type(None) - for c in key_desc.classinfo - ) - return isinstance(x, classinfo) + return check_type(x, key_desc.classinfo) return NewtypeMeta diff --git a/setup.py b/setup.py index 80f940a5c..8abf72a6c 100755 --- a/setup.py +++ b/setup.py @@ -156,6 +156,8 @@ if __name__ == "__main__": "pyelftools", # To get symbol names in kernel module "cffi", # unshare syscall + + "typeguard", ], extras_require=extras_require, -- GitLab From 4b0e93058c6d6fcf80e070e6e0b25b75bff86c53 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 11:40:31 +0100 Subject: [PATCH 16/24] lisa: Use typing module instead of lisa._generic --- lisa/_kmod.py | 8 +++---- lisa/_typeclass.py | 39 ++++++++++++++++++++++++---------- lisa/analysis/base.py | 8 +++---- lisa/analysis/idle.py | 4 ++-- lisa/analysis/load_tracking.py | 8 +++---- lisa/analysis/tasks.py | 10 ++++----- lisa/energy_meter.py | 8 +++---- lisa/platforms/platinfo.py | 19 +++++++++-------- lisa/target.py | 10 ++++----- lisa/tests/base.py | 4 ++-- lisa/trace.py | 17 +++++++-------- 11 files changed, 76 insertions(+), 59 deletions(-) diff --git a/lisa/_kmod.py b/lisa/_kmod.py index bef07c807..6d0b10712 100644 --- a/lisa/_kmod.py +++ b/lisa/_kmod.py @@ -131,6 +131,7 @@ from operator import itemgetter from shlex import quote from io import BytesIO from collections.abc import Mapping +from typing import List, Dict, Literal from elftools.elf.elffile import ELFFile @@ -143,7 +144,6 @@ from lisa._assets import ASSETS_PATH, HOST_PATH, ABI_BINARIES_FOLDER from lisa._unshare import ensure_root import lisa._git as git from lisa.conf import SimpleMultiSrcConf, TopLevelKeyDesc, LevelKeyDesc, KeyDesc -from lisa._generic import TypedList, TypedDict, OneOf _ALPINE_ROOTFS_URL = 'https://dl-cdn.alpinelinux.org/alpine/v{minor}/releases/{arch}/alpine-minirootfs-{version}-{arch}.tar.gz' @@ -713,19 +713,19 @@ class TarOverlay(_PathOverlayBase): class _KernelBuildEnvConf(SimpleMultiSrcConf): STRUCTURE = TopLevelKeyDesc('kernel-build-env-conf', 'Build environment settings', ( - KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [OneOf['host', 'alpine']]), + KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [Literal['host', 'alpine']]), LevelKeyDesc('build-env-settings', 'build-env settings', ( LevelKeyDesc('host', 'Settings for host build-env', ( KeyDesc('toolchain-path', 'Folder to prepend to PATH when executing toolchain command in the host build env', [str]), )), LevelKeyDesc('alpine', 'Settings for Alpine linux build-env', ( KeyDesc('version', 'Alpine linux version, e.g. 3.18.0', [None, str]), - KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, TypedList[str]]), + KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, List[str]]), )), )), KeyDesc('overlay-backend', 'Backend to use for overlaying folders while building modules. Can be "overlayfs" (overlayfs filesystem, recommended and fastest) or "copy (plain folder copy)', [str]), - KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [TypedDict[str, object]]), + KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [Dict[str, object]]), ), ) diff --git a/lisa/_typeclass.py b/lisa/_typeclass.py index 3468d7d23..49002c2ef 100644 --- a/lisa/_typeclass.py +++ b/lisa/_typeclass.py @@ -192,10 +192,11 @@ Note that it's possible to implement a typeclass for a type that has no values, but for which ``isinstance(value, thetype)`` will return true. This can be achieved using ``__instancecheck__`` or ``__subclasscheck__`` and is used in particular by the abstract base classes provided by :mod:`collections.abc`. -:class:`lisa._generic.TypedList` is another example. Casting values "registered" as -instances of these types is expensive though, as validity of the cast depends -on the value itself. That means it's not possible to memoize the result of the -cast associated it with the type of the value. +:class:`lisa._generic.SortedList` is another example. Typing hints from the +:mod:`typing` module can also be used. Casting values "registered" as instances +of these types is expensive though, as validity of the cast depends on the +value itself. That means it's not possible to memoize the result of the cast +associated it with the type of the value. One might wonder what casting a value to a typeclass gives. When possible, a @@ -215,13 +216,22 @@ import itertools import contextlib import textwrap from collections.abc import Iterable +from typing import List from devlib.utils.misc import ranges_to_list from lisa.utils import deduplicate # TODO: revisit pylint annotation once this is solved: # https://github.com/PyCQA/pylint/issues/1630 -from lisa._generic import TypedList # pylint: disable=unused-import +from lisa._generic import hint_to_class, is_hint + + +def _process_hint(obj): + if is_hint(obj): + return hint_to_class(obj) + else: + return obj + class TypeClassMeta(type): """ @@ -351,6 +361,10 @@ class TypeClassMeta(type): dct = {**typeclass.DEFAULTS, **dct} types = types if isinstance(types, Iterable) else [types] + + # Process type hints to turn them into normal class, implementing __instancecheck__ + types = list(map(_process_hint, types)) + for type_ in types: # Create an instance for each type, with the type as base class. bases = (type_,) @@ -432,6 +446,8 @@ class TypeClass(metaclass=TypeClassMeta): Base class to inherit from to define a new typeclass. """ def __new__(cls, obj): + obj = _process_hint(obj) + safe_to_memoize, instance, dct = cls._find_instance_dct(obj) # pylint: disable=unused-variable # Shallow copy to allow "casting" to the right type. Using a made-up # class allows piggy backing on regular attribute lookup, which is much @@ -467,11 +483,11 @@ class TypeClass(metaclass=TypeClassMeta): Make a proxy object for given type. The proxy is itself a type inheriting from the original type, along - with all the methods in ``dct``. ``__call__`` is overrident in the + with all the methods in ``dct``. ``__call__`` is overriden in the metaclass to make sure that invoking the type will yield instances of the original type. """ - class TypeProxyMeta(type): + class TypeProxyMeta(type(obj)): def __instancecheck__(cls, x): return isinstance(x, obj) @@ -698,14 +714,15 @@ class FromString(TypeClass): :type short: bool """ -class BuiltinFromStringInstance(FromString, types=(int, float, TypedList[float])): + +class BuiltinFromStringInstance(FromString, types=(int, float, List[float])): """ Parse the following types from a string: * ``int`` * ``float`` * ``str`` - Plus all the :class:`lisa._generic.TypedList` subtypes of the above types. + Plus all the :class:`lisa._generic.List` subtypes of the above types. """ @classmethod def from_str(cls, string): @@ -744,7 +761,7 @@ class BoolFromStringInstance(FromString, types=bool): return 'bool' -class IntListFromStringInstance(FromString, types=TypedList[int]): +class IntListFromStringInstance(FromString, types=List[int]): """ Instance of :class:`lisa._typeclass.FromString` for :class:`int` type. """ @@ -783,7 +800,7 @@ class StrFromStringInstance(FromString, types=str): def get_format_description(cls, short): return 'str' -class StrListFromStringInstance(FromString, types=TypedList[str]): +class StrListFromStringInstance(FromString, types=List[str]): """ Instance of :class:`lisa._typeclass.FromString` for :class:`str` type. """ diff --git a/lisa/analysis/base.py b/lisa/analysis/base.py index 7d805b305..416d2eee8 100644 --- a/lisa/analysis/base.py +++ b/lisa/analysis/base.py @@ -28,6 +28,7 @@ import warnings import itertools import copy from operator import itemgetter, attrgetter +from typing import List import numpy import matplotlib @@ -46,7 +47,6 @@ import panel.widgets from lisa.utils import Loggable, deprecate, get_doc_url, get_short_doc, get_subclasses, guess_format, is_running_ipython, measure_time, memoized, update_wrapper_doc, _import_all_submodules, optional_kwargs from lisa.trace import _CacheDataDesc from lisa.notebook import _hv_fig_to_pane, _hv_link_dataframes, axis_cursor_delta, axis_link_dataframes, make_figure -from lisa._generic import TypedList # Ensure hv.extension() is called import lisa.notebook @@ -592,9 +592,9 @@ class AnalysisHelpers(Loggable, abc.ABC): rc_params=None, axis=None, interactive=None, - colors: TypedList[str]=None, - linestyles: TypedList[str]=None, - markers: TypedList[str]=None, + colors: List[str]=None, + linestyles: List[str]=None, + markers: List[str]=None, **kwargs ): diff --git a/lisa/analysis/idle.py b/lisa/analysis/idle.py index af419dafa..5505ce997 100644 --- a/lisa/analysis/idle.py +++ b/lisa/analysis/idle.py @@ -18,6 +18,7 @@ from functools import reduce import operator import warnings +from typing import List import pandas as pd import holoviews as hv @@ -25,7 +26,6 @@ import holoviews as hv from lisa.datautils import df_add_delta, df_refit_index, df_split_signals from lisa.analysis.base import TraceAnalysisBase from lisa.trace import requires_events, CPU -from lisa._generic import TypedList from lisa.analysis.base import TraceAnalysisBase @@ -263,7 +263,7 @@ class IdleAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_cluster_idle_state_residency.used_events - def plot_cluster_idle_state_residency(self, cluster: TypedList[CPU], pct: bool=False): + def plot_cluster_idle_state_residency(self, cluster: List[CPU], pct: bool=False): """ Plot the idle state residency of a cluster diff --git a/lisa/analysis/load_tracking.py b/lisa/analysis/load_tracking.py index e627dd8b4..d46e7e1a8 100644 --- a/lisa/analysis/load_tracking.py +++ b/lisa/analysis/load_tracking.py @@ -19,6 +19,7 @@ import operator import itertools +from typing import List import holoviews as hv import pandas as pd @@ -28,7 +29,6 @@ from lisa.analysis.status import StatusAnalysis from lisa.trace import requires_one_event_of, may_use_events, will_use_events_from, TaskID, CPU, MissingTraceEventError, OrTraceEventChecker from lisa.utils import deprecate from lisa.datautils import df_refit_index, series_refit_index, df_filter_task_ids, df_split_signals -from lisa._generic import TypedList from lisa.notebook import plot_signal, _hv_neutral @@ -133,7 +133,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): 'sched_util_est_cfs', 'sched_cpu_capacity', ) - def df_cpus_signal(self, signal, cpus: TypedList[CPU]=None): + def df_cpus_signal(self, signal, cpus: List[CPU]=None): """ Get the load-tracking signals for the CPUs @@ -322,7 +322,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @may_use_events(StatusAnalysis.plot_overutilized.used_events) @df_cpus_signal.used_events - def plot_cpus_signals(self, cpus: TypedList[CPU]=None, signals: TypedList[str]=['util', 'load']): + def plot_cpus_signals(self, cpus: List[CPU]=None, signals: List[str]=['util', 'load']): """ Plot the CPU-related load-tracking signals @@ -374,7 +374,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_task_signal.used_events - def plot_task_signals(self, task: TaskID, signals: TypedList[str]=['util', 'load']): + def plot_task_signals(self, task: TaskID, signals: List[str]=['util', 'load']): """ Plot the task-related load-tracking signals diff --git a/lisa/analysis/tasks.py b/lisa/analysis/tasks.py index bb314a14b..3a70124dc 100644 --- a/lisa/analysis/tasks.py +++ b/lisa/analysis/tasks.py @@ -18,6 +18,7 @@ from enum import Enum import itertools import warnings +from typing import List import numpy as np import pandas as pd @@ -28,7 +29,6 @@ from lisa.analysis.base import TraceAnalysisBase from lisa.utils import memoized, kwargs_forwarded_to, deprecate from lisa.datautils import df_filter_task_ids, series_rolling_apply, series_refit_index, df_refit_index, df_deduplicate, df_split_signals, df_add_delta, df_window, df_update_duplicates, df_combine_duplicates from lisa.trace import requires_events, will_use_events_from, may_use_events, TaskID, CPU, MissingTraceEventError -from lisa._generic import TypedList from lisa.notebook import _hv_neutral, plot_signal, _hv_twinx @@ -831,7 +831,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_tasks_total_residency.used_events - def plot_tasks_total_residency(self, tasks: TypedList[TaskID]=None, ascending: bool=False, + def plot_tasks_total_residency(self, tasks: List[TaskID]=None, ascending: bool=False, count: bool=None): """ Plot the stacked total time spent by each task on each CPU @@ -926,7 +926,7 @@ class TasksAnalysis(TraceAnalysisBase): return plot_signal(series, name=label) @TraceAnalysisBase.plot_method - def plot_tasks_wakeups(self, target_cpus: TypedList[CPU]=None, window: float=1e-2, per_sec: bool=False): + def plot_tasks_wakeups(self, target_cpus: List[CPU]=None, window: float=1e-2, per_sec: bool=False): """ Plot task wakeups over time @@ -975,7 +975,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @requires_events("sched_wakeup_new") - def plot_tasks_forks(self, target_cpus: TypedList[CPU]=None, window: float=1e-2, per_sec: bool=False): + def plot_tasks_forks(self, target_cpus: List[CPU]=None, window: float=1e-2, per_sec: bool=False): """ Plot task forks over time @@ -1319,7 +1319,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @_plot_tasks_activation.used_events @kwargs_forwarded_to(_plot_tasks_activation, ignore=['tasks', 'best_effort']) - def plot_tasks_activation(self, tasks: TypedList[TaskID]=None, hide_tasks: TypedList[TaskID]=None, which_cpu: bool=True, overlay: bool=False, **kwargs): + def plot_tasks_activation(self, tasks: List[TaskID]=None, hide_tasks: List[TaskID]=None, which_cpu: bool=True, overlay: bool=False, **kwargs): """ Plot all tasks activations, in a style similar to kernelshark. diff --git a/lisa/energy_meter.py b/lisa/energy_meter.py index e72898d50..d3d8052f1 100644 --- a/lisa/energy_meter.py +++ b/lisa/energy_meter.py @@ -27,6 +27,7 @@ from collections.abc import Mapping from subprocess import Popen, PIPE, STDOUT import subprocess from time import sleep +from typing import List import numpy as np import pandas as pd @@ -39,7 +40,6 @@ from lisa.datautils import series_integrate from lisa.conf import ( SimpleMultiSrcConf, KeyDesc, TopLevelKeyDesc, Configurable, ) -from lisa._generic import TypedList # Default energy measurements for each board EnergyReport = namedtuple('EnergyReport', @@ -320,9 +320,9 @@ class AEPConf(SimpleMultiSrcConf, HideExekallID): """ STRUCTURE = TopLevelKeyDesc('aep-conf', 'AEP Energy Meter configuration', ( KeyDesc('channel-map', 'Channels to use', [Mapping]), - KeyDesc('resistor-values', 'Resistor values', [TypedList[float]]), - KeyDesc('labels', 'List of labels', [TypedList[str]]), - KeyDesc('device-entry', 'TTY device', [TypedList[str]]), + KeyDesc('resistor-values', 'Resistor values', [List[float]]), + KeyDesc('labels', 'List of labels', [List[str]]), + KeyDesc('device-entry', 'TTY device', [List[str]]), )) diff --git a/lisa/platforms/platinfo.py b/lisa/platforms/platinfo.py index c9c137f94..2b9831a27 100644 --- a/lisa/platforms/platinfo.py +++ b/lisa/platforms/platinfo.py @@ -19,13 +19,14 @@ import re import functools import contextlib from collections.abc import Mapping +from typing import Dict, List from lisa.utils import HideExekallID, group_by_value, memoized from lisa.conf import ( DeferredValue, DeferredExcep, MultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, DerivedKeyDesc, ConfigKeyError, ) -from lisa._generic import TypedDict, TypedList, SortedTypedList +from lisa._generic import SortedList from lisa.energy_model import EnergyModel from lisa.wlgen.rta import RTA @@ -70,9 +71,9 @@ class KernelSymbolsAddress(KeyDesc): return '' -CPUIdList = SortedTypedList[int] -FreqList = SortedTypedList[int] -CPUCapacities = TypedDict[int,int] +CPUIdList = SortedList[int] +FreqList = SortedList[int] +CPUCapacities = Dict[int,int] class PlatformInfo(MultiSrcConf, HideExekallID): @@ -89,13 +90,13 @@ class PlatformInfo(MultiSrcConf, HideExekallID): # we need. STRUCTURE = TopLevelKeyDesc('platform-info', 'Platform-specific information', ( LevelKeyDesc('rtapp', 'RTapp configuration', ( - KeyDesc('calib', 'RTapp calibration dictionary', [TypedDict[int,int]]), + KeyDesc('calib', 'RTapp calibration dictionary', [Dict[int,int]]), )), LevelKeyDesc('kernel', 'Kernel-related information', ( KeyDesc('version', '', [KernelVersion]), KernelConfigKeyDesc('config', '', [TypedKernelConfig]), - KernelSymbolsAddress('symbols-address', 'Dictionary of addresses to symbol names extracted from /proc/kallsyms', [TypedDict[int,str]], deepcopy_val=False), + KernelSymbolsAddress('symbols-address', 'Dictionary of addresses to symbol names extracted from /proc/kallsyms', [Dict[int,str]], deepcopy_val=False), )), KeyDesc('nrg-model', 'Energy model object', [EnergyModel]), LevelKeyDesc('cpu-capacities', 'Dictionaries of CPU ID to capacity value', ( @@ -117,12 +118,12 @@ class PlatformInfo(MultiSrcConf, HideExekallID): KeyDesc('freq-domains', 'Frequency domains modeled by a list of CPU IDs for each domain', - [TypedList[CPUIdList]]), - KeyDesc('freqs', 'Dictionnary of CPU ID to list of frequencies', [TypedDict[int, FreqList]]), + [List[CPUIdList]]), + KeyDesc('freqs', 'Dictionnary of CPU ID to list of frequencies', [Dict[int, FreqList]]), DerivedKeyDesc('capacity-classes', 'Capacity classes modeled by a list of CPU IDs for each capacity, sorted by capacity', - [TypedList[CPUIdList]], + [List[CPUIdList]], [['cpu-capacities', 'orig']], compute_capa_classes), )) """Some keys have a reserved meaning with an associated type.""" diff --git a/lisa/target.py b/lisa/target.py index a70115669..7ecf4a2df 100644 --- a/lisa/target.py +++ b/lisa/target.py @@ -35,6 +35,7 @@ import shutil from types import ModuleType, FunctionType from operator import itemgetter import warnings +from typing import List, Literal import devlib from devlib.exception import TargetStableError @@ -44,7 +45,6 @@ from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized, destroyablecontextmanager, ContextManagerExit from lisa._assets import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable, DelegatedLevelKeyDesc -from lisa._generic import TypedList, TypedDict, OneOf from lisa._kmod import _KernelBuildEnv, DynamicKmod, _KernelBuildEnvConf from lisa.platforms.platinfo import PlatformInfo @@ -146,7 +146,7 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board name, free-form value only used to embelish logs', [str]), - KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [OneOf['linux', 'android', 'host']]), + KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [Literal['linux', 'android', 'host']]), KeyDesc('host', 'Hostname or IP address of the host', [str, None]), KeyDesc('username', 'SSH username. On ADB connections, "root" username will root adb upon target connection', [str, None]), @@ -156,7 +156,7 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): KeyDesc('keyfile', 'SSH private key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target workdir', [str]), - KeyDesc('tools', 'List of tools to install on the target', [TypedList[str]]), + KeyDesc('tools', 'List of tools to install on the target', [List[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed up the connection', [bool]), LevelKeyDesc('kernel', 'kernel information', ( KeyDesc('src', 'Path to kernel source tree matching the kernel running on the target used to build modules', [str, None]), @@ -177,8 +177,8 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): KeyDesc('class', 'Name of the class to use', [str]), KeyDesc('args', 'Keyword arguments to build the Platform object', [Mapping]), )), - KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [TypedList[str]]), - KeyDesc('file-xfer', 'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)', [TypedList[str]]), + KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [List[str]]), + KeyDesc('file-xfer', 'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)', [List[str]]), KeyDesc('max-async', 'Maximum number of asynchronous commands in flight at any time', [int, None]), )) diff --git a/lisa/tests/base.py b/lisa/tests/base.py index 5fe208eab..de5f2c618 100644 --- a/lisa/tests/base.py +++ b/lisa/tests/base.py @@ -31,6 +31,7 @@ import itertools import types import warnings from operator import attrgetter +from typing import List from datetime import datetime from collections import OrderedDict, ChainMap @@ -61,7 +62,6 @@ from lisa.trace import FtraceCollector, FtraceConf, DmesgCollector, ComposedColl from lisa.conf import ( SimpleMultiSrcConf, KeyDesc, TopLevelKeyDesc, ) -from lisa._generic import TypedList from lisa.pelt import pelt_settling_time @@ -1426,7 +1426,7 @@ class DmesgTestConf(TestConfBase): {yaml_example} """ STRUCTURE = TopLevelKeyDesc('dmesg', 'Dmesg test configuration', ( - KeyDesc('ignored-patterns', 'List of Python regex matching dmesg entries *content* to be ignored (see :class:`devlib.collector.dmesg.KernelLogEntry` for how the message is split)', [TypedList[str]]), + KeyDesc('ignored-patterns', 'List of Python regex matching dmesg entries *content* to be ignored (see :class:`devlib.collector.dmesg.KernelLogEntry` for how the message is split)', [List[str]]), )) diff --git a/lisa/trace.py b/lisa/trace.py index 17a4324fc..a2e163331 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -44,7 +44,7 @@ import subprocess import itertools import functools import fnmatch -from typing import Union +from typing import Union, List from difflib import get_close_matches import numpy as np @@ -57,7 +57,6 @@ import devlib from lisa.utils import Loggable, HideExekallID, memoized, lru_memoized, deduplicate, take, deprecate, nullcontext, measure_time, checksum, newtype, groupby, PartialInit, kwargs_forwarded_to, kwargs_dispatcher, ComposedContextManager, get_nested_key, bothmethod from lisa.conf import SimpleMultiSrcConf, LevelKeyDesc, KeyDesc, TopLevelKeyDesc, Configurable -from lisa._generic import TypedList from lisa.datautils import SignalDesc, df_add_delta, df_deduplicate, df_window, df_window_signals, series_convert from lisa.version import VERSION_TOKEN from lisa._typeclass import FromString, IntListFromStringInstance @@ -134,7 +133,7 @@ class TaskIDFromStringInstance(FromString, types=TaskID): """).strip() -class TaskIDListFromStringInstance(FromString, types=TypedList[TaskID]): +class TaskIDListFromStringInstance(FromString, types=List[TaskID]): """ Instance of :class:`lisa._typeclass.FromString` for lists :class:`TaskID` type. """ @@ -157,13 +156,13 @@ class TaskIDListFromStringInstance(FromString, types=TypedList[TaskID]): CPU = newtype(int, 'CPU', doc='Alias to ``int`` used for CPU IDs') -class CPUListFromStringInstance(FromString, types=TypedList[CPU]): - # Use the same implementation as for TypedList[int] +class CPUListFromStringInstance(FromString, types=List[CPU]): + # Use the same implementation as for List[int] from_str = IntListFromStringInstance.from_str @classmethod def get_format_description(cls, short): - return FromString(TypedList[int]).get_format_description(short=short) + return FromString(List[int]).get_format_description(short=short) class MissingMetadataError(KeyError): @@ -5627,9 +5626,9 @@ class FtraceConf(SimpleMultiSrcConf, HideExekallID): {yaml_example} """ STRUCTURE = TopLevelKeyDesc('ftrace-conf', 'FTrace configuration', ( - KeyDesc('events', 'FTrace events to trace', [TypedList[str], TraceEventCheckerBase]), - KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [TypedList[Union[str, None]], None]), - KeyDesc('functions', 'FTrace functions to trace', [TypedList[str]]), + KeyDesc('events', 'FTrace events to trace', [List[str], TraceEventCheckerBase]), + KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [List[Union[str, None]], None]), + KeyDesc('functions', 'FTrace functions to trace', [List[str]]), KeyDesc('buffer-size', 'FTrace buffer size', [int]), KeyDesc('trace-clock', 'Clock used while tracing (see "trace_clock" in ftrace.txt kernel doc)', [str, None]), KeyDesc('saved-cmdlines-nr', 'Number of saved cmdlines with associated PID while tracing', [int]), -- GitLab From 0922fb5b3530cb23764d32a77b728eeafe6aab4a Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 15:20:46 +0100 Subject: [PATCH 17/24] setup.py: Bump required Python to >= 3.8 FEATURE Require Python >= 3.8 so that the typing standard module contains the necessary helpers. Python 3.7 has reached end of life and 2 Ubuntu LTS versions have been released with >= 3.8 so no breakage is expected. A number of packages like scipy have moved away from 3.7 already. --- .github/workflows/test.yml | 1 - setup.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9612016a2..9550a2d3a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -41,7 +41,6 @@ jobs: python-version: # Github seems to always block 3 jobs these days, so keep at most 3 # versions there: - - '3.7' - '3.8' - '3.11' diff --git a/setup.py b/setup.py index 8abf72a6c..f747205ab 100755 --- a/setup.py +++ b/setup.py @@ -96,7 +96,7 @@ extras_require['all'] = sorted(set( itertools.chain.from_iterable(extras_require.values()) )) -python_requires = '>= 3.7' +python_requires = '>= 3.8' if __name__ == "__main__": -- GitLab From cdefcd78494e0f1c5a25c77a74b2644a53464681 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 16:17:21 +0100 Subject: [PATCH 18/24] lisa: Cleanup Python <= 3.7 cruft Cleanup things that were only relevant in Python <= 3.7 since we now require >= 3.8 --- lisa/trace.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lisa/trace.py b/lisa/trace.py index a2e163331..67b61a388 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -764,9 +764,7 @@ class TxtTraceParserBase(TraceParserBase): will be used as is. """ - # Since re.Match is only importable directly since Python >= 3.7, use a - # dummy match to get the type - _RE_MATCH_CLS = re.match('x', 'x').__class__ + _RE_MATCH_CLS = re.Match def __init__(self, lines, -- GitLab From 2a3e17924fcf7d2541e97071dce708e2079f0ce9 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 14:22:33 +0100 Subject: [PATCH 19/24] lisa._typeclass: Fix BuiltinFromStringInstance FIX * Align docstring with the actual implementation * Remove the implementation for List[float]. The format is in direct conflict with the implementation for List[int], and nothing in lisa is currently using List[float]. --- lisa/_typeclass.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lisa/_typeclass.py b/lisa/_typeclass.py index 49002c2ef..e8d2a910a 100644 --- a/lisa/_typeclass.py +++ b/lisa/_typeclass.py @@ -715,14 +715,11 @@ class FromString(TypeClass): """ -class BuiltinFromStringInstance(FromString, types=(int, float, List[float])): +class BuiltinFromStringInstance(FromString, types=(int, float)): """ Parse the following types from a string: * ``int`` * ``float`` - * ``str`` - - Plus all the :class:`lisa._generic.List` subtypes of the above types. """ @classmethod def from_str(cls, string): -- GitLab From 6897f664dc4f616d630a2846e5ee9233ac75d3e4 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 14:29:19 +0100 Subject: [PATCH 20/24] lisa._typeclass: Make all instances private FIX Make typeclass instance declarations private, since the details of how it is done is still a private API. --- lisa/_typeclass.py | 10 +++++----- lisa/trace.py | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/lisa/_typeclass.py b/lisa/_typeclass.py index e8d2a910a..b5f760049 100644 --- a/lisa/_typeclass.py +++ b/lisa/_typeclass.py @@ -715,7 +715,7 @@ class FromString(TypeClass): """ -class BuiltinFromStringInstance(FromString, types=(int, float)): +class _BuiltinFromStringInstance(FromString, types=(int, float)): """ Parse the following types from a string: * ``int`` @@ -733,7 +733,7 @@ class BuiltinFromStringInstance(FromString, types=(int, float)): return cls.__name__ -class BoolFromStringInstance(FromString, types=bool): +class _BoolFromStringInstance(FromString, types=bool): """ Parse boolean from a string. """ @@ -758,7 +758,7 @@ class BoolFromStringInstance(FromString, types=bool): return 'bool' -class IntListFromStringInstance(FromString, types=List[int]): +class _IntListFromStringInstance(FromString, types=List[int]): """ Instance of :class:`lisa._typeclass.FromString` for :class:`int` type. """ @@ -785,7 +785,7 @@ class IntListFromStringInstance(FromString, types=List[int]): * ``1,2,10,55-99``: a comma separated list of the previous formats """).strip() -class StrFromStringInstance(FromString, types=str): +class _StrFromStringInstance(FromString, types=str): """ Instance of :class:`lisa._typeclass.FromString` for :class:`str` type. """ @@ -797,7 +797,7 @@ class StrFromStringInstance(FromString, types=str): def get_format_description(cls, short): return 'str' -class StrListFromStringInstance(FromString, types=List[str]): +class _StrListFromStringInstance(FromString, types=List[str]): """ Instance of :class:`lisa._typeclass.FromString` for :class:`str` type. """ diff --git a/lisa/trace.py b/lisa/trace.py index 67b61a388..eba48edbb 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -59,7 +59,7 @@ from lisa.utils import Loggable, HideExekallID, memoized, lru_memoized, deduplic from lisa.conf import SimpleMultiSrcConf, LevelKeyDesc, KeyDesc, TopLevelKeyDesc, Configurable from lisa.datautils import SignalDesc, df_add_delta, df_deduplicate, df_window, df_window_signals, series_convert from lisa.version import VERSION_TOKEN -from lisa._typeclass import FromString, IntListFromStringInstance +from lisa._typeclass import FromString from lisa._kmod import LISAFtraceDynamicKmod from lisa._assets import get_bin @@ -99,7 +99,7 @@ class TaskID(namedtuple('TaskID', ('pid', 'comm'))): _STR_PARSE_REGEX = re.compile(r'\[?([0-9]+):([a-zA-Z0-9_-]+)\]?') -class TaskIDFromStringInstance(FromString, types=TaskID): +class _TaskIDFromStringInstance(FromString, types=TaskID): """ Instance of :class:`lisa._typeclass.FromString` for :class:`TaskID` type. """ @@ -133,7 +133,7 @@ class TaskIDFromStringInstance(FromString, types=TaskID): """).strip() -class TaskIDListFromStringInstance(FromString, types=List[TaskID]): +class _TaskIDListFromStringInstance(FromString, types=List[TaskID]): """ Instance of :class:`lisa._typeclass.FromString` for lists :class:`TaskID` type. """ @@ -156,7 +156,7 @@ class TaskIDListFromStringInstance(FromString, types=List[TaskID]): CPU = newtype(int, 'CPU', doc='Alias to ``int`` used for CPU IDs') -class CPUListFromStringInstance(FromString, types=List[CPU]): +class _CPUListFromStringInstance(FromString, types=List[CPU]): # Use the same implementation as for List[int] from_str = IntListFromStringInstance.from_str -- GitLab From 7c22e14d2c1387978f0f9ba9ced3ee829215190a Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 14:44:58 +0100 Subject: [PATCH 21/24] lisa._typeclass: Implement instances for typing.Sequence Use the same implementation for typing.Sequence and typing.List --- lisa/_typeclass.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lisa/_typeclass.py b/lisa/_typeclass.py index b5f760049..d0c526a84 100644 --- a/lisa/_typeclass.py +++ b/lisa/_typeclass.py @@ -216,7 +216,7 @@ import itertools import contextlib import textwrap from collections.abc import Iterable -from typing import List +import typing from devlib.utils.misc import ranges_to_list @@ -758,7 +758,7 @@ class _BoolFromStringInstance(FromString, types=bool): return 'bool' -class _IntListFromStringInstance(FromString, types=List[int]): +class _IntSeqFromStringInstance(FromString, types=(typing.List[int], typing.Sequence[int])): """ Instance of :class:`lisa._typeclass.FromString` for :class:`int` type. """ @@ -797,7 +797,7 @@ class _StrFromStringInstance(FromString, types=str): def get_format_description(cls, short): return 'str' -class _StrListFromStringInstance(FromString, types=List[str]): +class _StrSeqFromStringInstance(FromString, types=(typing.List[str], typing.Sequence[str])): """ Instance of :class:`lisa._typeclass.FromString` for :class:`str` type. """ -- GitLab From 68aace7a6d62bfc43168c3acb6575c570b077c57 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 14:18:12 +0100 Subject: [PATCH 22/24] lisa.trace: Cleanup CPUListFromStringInstance Use instance resolution with FromString(List[int]) rather than hardcoding IntListFromStringInstance. --- lisa/trace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lisa/trace.py b/lisa/trace.py index eba48edbb..2f988df28 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -158,7 +158,7 @@ CPU = newtype(int, 'CPU', doc='Alias to ``int`` used for CPU IDs') class _CPUListFromStringInstance(FromString, types=List[CPU]): # Use the same implementation as for List[int] - from_str = IntListFromStringInstance.from_str + from_str = FromString(List[int]).from_str @classmethod def get_format_description(cls, short): -- GitLab From e8b6c3a1ef3349d70e6c203b84c59dfa0d459870 Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 14:40:15 +0100 Subject: [PATCH 23/24] lisa: Use typing.Sequence instead of typing.List FIX typing.List is over-restrictive as it prevents types like tuples, where in practice most of the code does not care. --- lisa/_generic.py | 7 ++++--- lisa/_kmod.py | 8 ++++---- lisa/_typeclass.py | 2 +- lisa/analysis/base.py | 8 ++++---- lisa/analysis/idle.py | 4 ++-- lisa/analysis/load_tracking.py | 8 ++++---- lisa/analysis/tasks.py | 10 +++++----- lisa/energy_meter.py | 8 ++++---- lisa/platforms/platinfo.py | 24 ++++++++++++++---------- lisa/target.py | 10 +++++----- lisa/tests/base.py | 4 ++-- lisa/trace.py | 17 ++++++++--------- 12 files changed, 57 insertions(+), 53 deletions(-) diff --git a/lisa/_generic.py b/lisa/_generic.py index 98df2925b..93d6291a2 100644 --- a/lisa/_generic.py +++ b/lisa/_generic.py @@ -21,7 +21,8 @@ Generic types inspired by the :mod:`typing` module. import functools import inspect -from typing import Any, Union, Generic, TypeVar, List +import typing +from typing import Any, Union, Generic, TypeVar import typeguard from collections.abc import Iterable @@ -135,12 +136,12 @@ def hint_to_class(hint): T = TypeVar('T') -class SortedList(Generic[T], _TypeguardCustom): +class SortedSequence(Generic[T], _TypeguardCustom): """ Same as :class:`typing.List` but enforces sorted values when runtime checked using :mod:`typeguard`. """ - _HINT = List[T] + _HINT = typing.Sequence[T] @classmethod def _instancecheck(cls, value): diff --git a/lisa/_kmod.py b/lisa/_kmod.py index 6d0b10712..36c45fe5d 100644 --- a/lisa/_kmod.py +++ b/lisa/_kmod.py @@ -131,7 +131,7 @@ from operator import itemgetter from shlex import quote from io import BytesIO from collections.abc import Mapping -from typing import List, Dict, Literal +import typing from elftools.elf.elffile import ELFFile @@ -713,19 +713,19 @@ class TarOverlay(_PathOverlayBase): class _KernelBuildEnvConf(SimpleMultiSrcConf): STRUCTURE = TopLevelKeyDesc('kernel-build-env-conf', 'Build environment settings', ( - KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [Literal['host', 'alpine']]), + KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [typing.Literal['host', 'alpine']]), LevelKeyDesc('build-env-settings', 'build-env settings', ( LevelKeyDesc('host', 'Settings for host build-env', ( KeyDesc('toolchain-path', 'Folder to prepend to PATH when executing toolchain command in the host build env', [str]), )), LevelKeyDesc('alpine', 'Settings for Alpine linux build-env', ( KeyDesc('version', 'Alpine linux version, e.g. 3.18.0', [None, str]), - KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, List[str]]), + KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, typing.Sequence[str]]), )), )), KeyDesc('overlay-backend', 'Backend to use for overlaying folders while building modules. Can be "overlayfs" (overlayfs filesystem, recommended and fastest) or "copy (plain folder copy)', [str]), - KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [Dict[str, object]]), + KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [typing.Dict[str, object]]), ), ) diff --git a/lisa/_typeclass.py b/lisa/_typeclass.py index d0c526a84..ccf07ab49 100644 --- a/lisa/_typeclass.py +++ b/lisa/_typeclass.py @@ -192,7 +192,7 @@ Note that it's possible to implement a typeclass for a type that has no values, but for which ``isinstance(value, thetype)`` will return true. This can be achieved using ``__instancecheck__`` or ``__subclasscheck__`` and is used in particular by the abstract base classes provided by :mod:`collections.abc`. -:class:`lisa._generic.SortedList` is another example. Typing hints from the +:class:`lisa._generic.SortedSequence` is another example. Typing hints from the :mod:`typing` module can also be used. Casting values "registered" as instances of these types is expensive though, as validity of the cast depends on the value itself. That means it's not possible to memoize the result of the cast diff --git a/lisa/analysis/base.py b/lisa/analysis/base.py index 416d2eee8..93ac39a84 100644 --- a/lisa/analysis/base.py +++ b/lisa/analysis/base.py @@ -28,7 +28,7 @@ import warnings import itertools import copy from operator import itemgetter, attrgetter -from typing import List +import typing import numpy import matplotlib @@ -592,9 +592,9 @@ class AnalysisHelpers(Loggable, abc.ABC): rc_params=None, axis=None, interactive=None, - colors: List[str]=None, - linestyles: List[str]=None, - markers: List[str]=None, + colors: typing.Sequence[str]=None, + linestyles: typing.Sequence[str]=None, + markers: typing.Sequence[str]=None, **kwargs ): diff --git a/lisa/analysis/idle.py b/lisa/analysis/idle.py index 5505ce997..cb97054d7 100644 --- a/lisa/analysis/idle.py +++ b/lisa/analysis/idle.py @@ -18,7 +18,7 @@ from functools import reduce import operator import warnings -from typing import List +import typing import pandas as pd import holoviews as hv @@ -263,7 +263,7 @@ class IdleAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_cluster_idle_state_residency.used_events - def plot_cluster_idle_state_residency(self, cluster: List[CPU], pct: bool=False): + def plot_cluster_idle_state_residency(self, cluster: typing.Sequence[CPU], pct: bool=False): """ Plot the idle state residency of a cluster diff --git a/lisa/analysis/load_tracking.py b/lisa/analysis/load_tracking.py index d46e7e1a8..da30674ca 100644 --- a/lisa/analysis/load_tracking.py +++ b/lisa/analysis/load_tracking.py @@ -19,7 +19,7 @@ import operator import itertools -from typing import List +import typing import holoviews as hv import pandas as pd @@ -133,7 +133,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): 'sched_util_est_cfs', 'sched_cpu_capacity', ) - def df_cpus_signal(self, signal, cpus: List[CPU]=None): + def df_cpus_signal(self, signal, cpus: typing.Sequence[CPU]=None): """ Get the load-tracking signals for the CPUs @@ -322,7 +322,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @may_use_events(StatusAnalysis.plot_overutilized.used_events) @df_cpus_signal.used_events - def plot_cpus_signals(self, cpus: List[CPU]=None, signals: List[str]=['util', 'load']): + def plot_cpus_signals(self, cpus: typing.Sequence[CPU]=None, signals: typing.Sequence[str]=['util', 'load']): """ Plot the CPU-related load-tracking signals @@ -374,7 +374,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_task_signal.used_events - def plot_task_signals(self, task: TaskID, signals: List[str]=['util', 'load']): + def plot_task_signals(self, task: TaskID, signals: typing.Sequence[str]=['util', 'load']): """ Plot the task-related load-tracking signals diff --git a/lisa/analysis/tasks.py b/lisa/analysis/tasks.py index 3a70124dc..d45266ff2 100644 --- a/lisa/analysis/tasks.py +++ b/lisa/analysis/tasks.py @@ -18,7 +18,7 @@ from enum import Enum import itertools import warnings -from typing import List +import typing import numpy as np import pandas as pd @@ -831,7 +831,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_tasks_total_residency.used_events - def plot_tasks_total_residency(self, tasks: List[TaskID]=None, ascending: bool=False, + def plot_tasks_total_residency(self, tasks: typing.Sequence[TaskID]=None, ascending: bool=False, count: bool=None): """ Plot the stacked total time spent by each task on each CPU @@ -926,7 +926,7 @@ class TasksAnalysis(TraceAnalysisBase): return plot_signal(series, name=label) @TraceAnalysisBase.plot_method - def plot_tasks_wakeups(self, target_cpus: List[CPU]=None, window: float=1e-2, per_sec: bool=False): + def plot_tasks_wakeups(self, target_cpus: typing.Sequence[CPU]=None, window: float=1e-2, per_sec: bool=False): """ Plot task wakeups over time @@ -975,7 +975,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @requires_events("sched_wakeup_new") - def plot_tasks_forks(self, target_cpus: List[CPU]=None, window: float=1e-2, per_sec: bool=False): + def plot_tasks_forks(self, target_cpus: typing.Sequence[CPU]=None, window: float=1e-2, per_sec: bool=False): """ Plot task forks over time @@ -1319,7 +1319,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @_plot_tasks_activation.used_events @kwargs_forwarded_to(_plot_tasks_activation, ignore=['tasks', 'best_effort']) - def plot_tasks_activation(self, tasks: List[TaskID]=None, hide_tasks: List[TaskID]=None, which_cpu: bool=True, overlay: bool=False, **kwargs): + def plot_tasks_activation(self, tasks: typing.Sequence[TaskID]=None, hide_tasks: typing.Sequence[TaskID]=None, which_cpu: bool=True, overlay: bool=False, **kwargs): """ Plot all tasks activations, in a style similar to kernelshark. diff --git a/lisa/energy_meter.py b/lisa/energy_meter.py index d3d8052f1..3807a6a2a 100644 --- a/lisa/energy_meter.py +++ b/lisa/energy_meter.py @@ -27,7 +27,7 @@ from collections.abc import Mapping from subprocess import Popen, PIPE, STDOUT import subprocess from time import sleep -from typing import List +import typing import numpy as np import pandas as pd @@ -320,9 +320,9 @@ class AEPConf(SimpleMultiSrcConf, HideExekallID): """ STRUCTURE = TopLevelKeyDesc('aep-conf', 'AEP Energy Meter configuration', ( KeyDesc('channel-map', 'Channels to use', [Mapping]), - KeyDesc('resistor-values', 'Resistor values', [List[float]]), - KeyDesc('labels', 'List of labels', [List[str]]), - KeyDesc('device-entry', 'TTY device', [List[str]]), + KeyDesc('resistor-values', 'Resistor values', [typing.Sequence[float]]), + KeyDesc('labels', 'List of labels', [typing.Sequence[str]]), + KeyDesc('device-entry', 'TTY device', [typing.Sequence[str]]), )) diff --git a/lisa/platforms/platinfo.py b/lisa/platforms/platinfo.py index 2b9831a27..e6018dfe4 100644 --- a/lisa/platforms/platinfo.py +++ b/lisa/platforms/platinfo.py @@ -19,14 +19,14 @@ import re import functools import contextlib from collections.abc import Mapping -from typing import Dict, List +import typing from lisa.utils import HideExekallID, group_by_value, memoized from lisa.conf import ( DeferredValue, DeferredExcep, MultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, DerivedKeyDesc, ConfigKeyError, ) -from lisa._generic import SortedList +from lisa._generic import SortedSequence from lisa.energy_model import EnergyModel from lisa.wlgen.rta import RTA @@ -71,9 +71,13 @@ class KernelSymbolsAddress(KeyDesc): return '' -CPUIdList = SortedList[int] -FreqList = SortedList[int] -CPUCapacities = Dict[int,int] +CPUIdSequence = SortedSequence[int] +FreqSequence = SortedSequence[int] +CPUCapacities = typing.Dict[int,int] + +# For backward compat only +CPUIdList = CPUIdSequence +FreqList = FreqSequence class PlatformInfo(MultiSrcConf, HideExekallID): @@ -90,13 +94,13 @@ class PlatformInfo(MultiSrcConf, HideExekallID): # we need. STRUCTURE = TopLevelKeyDesc('platform-info', 'Platform-specific information', ( LevelKeyDesc('rtapp', 'RTapp configuration', ( - KeyDesc('calib', 'RTapp calibration dictionary', [Dict[int,int]]), + KeyDesc('calib', 'RTapp calibration dictionary', [typing.Dict[int,int]]), )), LevelKeyDesc('kernel', 'Kernel-related information', ( KeyDesc('version', '', [KernelVersion]), KernelConfigKeyDesc('config', '', [TypedKernelConfig]), - KernelSymbolsAddress('symbols-address', 'Dictionary of addresses to symbol names extracted from /proc/kallsyms', [Dict[int,str]], deepcopy_val=False), + KernelSymbolsAddress('symbols-address', 'Dictionary of addresses to symbol names extracted from /proc/kallsyms', [typing.Dict[int,str]], deepcopy_val=False), )), KeyDesc('nrg-model', 'Energy model object', [EnergyModel]), LevelKeyDesc('cpu-capacities', 'Dictionaries of CPU ID to capacity value', ( @@ -118,12 +122,12 @@ class PlatformInfo(MultiSrcConf, HideExekallID): KeyDesc('freq-domains', 'Frequency domains modeled by a list of CPU IDs for each domain', - [List[CPUIdList]]), - KeyDesc('freqs', 'Dictionnary of CPU ID to list of frequencies', [Dict[int, FreqList]]), + [typing.Sequence[CPUIdSequence]]), + KeyDesc('freqs', 'Dictionnary of CPU ID to list of frequencies', [typing.Dict[int, FreqSequence]]), DerivedKeyDesc('capacity-classes', 'Capacity classes modeled by a list of CPU IDs for each capacity, sorted by capacity', - [List[CPUIdList]], + [typing.Sequence[CPUIdSequence]], [['cpu-capacities', 'orig']], compute_capa_classes), )) """Some keys have a reserved meaning with an associated type.""" diff --git a/lisa/target.py b/lisa/target.py index 7ecf4a2df..d4f3c30c3 100644 --- a/lisa/target.py +++ b/lisa/target.py @@ -35,7 +35,7 @@ import shutil from types import ModuleType, FunctionType from operator import itemgetter import warnings -from typing import List, Literal +import typing import devlib from devlib.exception import TargetStableError @@ -146,7 +146,7 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board name, free-form value only used to embelish logs', [str]), - KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [Literal['linux', 'android', 'host']]), + KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [typing.Literal['linux', 'android', 'host']]), KeyDesc('host', 'Hostname or IP address of the host', [str, None]), KeyDesc('username', 'SSH username. On ADB connections, "root" username will root adb upon target connection', [str, None]), @@ -156,7 +156,7 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): KeyDesc('keyfile', 'SSH private key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target workdir', [str]), - KeyDesc('tools', 'List of tools to install on the target', [List[str]]), + KeyDesc('tools', 'List of tools to install on the target', [typing.Sequence[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed up the connection', [bool]), LevelKeyDesc('kernel', 'kernel information', ( KeyDesc('src', 'Path to kernel source tree matching the kernel running on the target used to build modules', [str, None]), @@ -177,8 +177,8 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): KeyDesc('class', 'Name of the class to use', [str]), KeyDesc('args', 'Keyword arguments to build the Platform object', [Mapping]), )), - KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [List[str]]), - KeyDesc('file-xfer', 'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)', [List[str]]), + KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [typing.Sequence[str]]), + KeyDesc('file-xfer', 'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)', [typing.Sequence[str]]), KeyDesc('max-async', 'Maximum number of asynchronous commands in flight at any time', [int, None]), )) diff --git a/lisa/tests/base.py b/lisa/tests/base.py index de5f2c618..9a1870954 100644 --- a/lisa/tests/base.py +++ b/lisa/tests/base.py @@ -31,7 +31,7 @@ import itertools import types import warnings from operator import attrgetter -from typing import List +import typing from datetime import datetime from collections import OrderedDict, ChainMap @@ -1426,7 +1426,7 @@ class DmesgTestConf(TestConfBase): {yaml_example} """ STRUCTURE = TopLevelKeyDesc('dmesg', 'Dmesg test configuration', ( - KeyDesc('ignored-patterns', 'List of Python regex matching dmesg entries *content* to be ignored (see :class:`devlib.collector.dmesg.KernelLogEntry` for how the message is split)', [List[str]]), + KeyDesc('ignored-patterns', 'List of Python regex matching dmesg entries *content* to be ignored (see :class:`devlib.collector.dmesg.KernelLogEntry` for how the message is split)', [typing.Sequence[str]]), )) diff --git a/lisa/trace.py b/lisa/trace.py index 2f988df28..d5bc53c64 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -44,7 +44,7 @@ import subprocess import itertools import functools import fnmatch -from typing import Union, List +import typing from difflib import get_close_matches import numpy as np @@ -133,7 +133,7 @@ class _TaskIDFromStringInstance(FromString, types=TaskID): """).strip() -class _TaskIDListFromStringInstance(FromString, types=List[TaskID]): +class _TaskIDSeqFromStringInstance(FromString, types=(typing.List[TaskID], typing.Sequence[TaskID])): """ Instance of :class:`lisa._typeclass.FromString` for lists :class:`TaskID` type. """ @@ -156,13 +156,12 @@ class _TaskIDListFromStringInstance(FromString, types=List[TaskID]): CPU = newtype(int, 'CPU', doc='Alias to ``int`` used for CPU IDs') -class _CPUListFromStringInstance(FromString, types=List[CPU]): - # Use the same implementation as for List[int] - from_str = FromString(List[int]).from_str +class _CPUSeqFromStringInstance(FromString, types=(typing.List[CPU], typing.Sequence[CPU])): + from_str = FromString(typing.Sequence[int]).from_str @classmethod def get_format_description(cls, short): - return FromString(List[int]).get_format_description(short=short) + return FromString(typing.Sequence[int]).get_format_description(short=short) class MissingMetadataError(KeyError): @@ -5624,9 +5623,9 @@ class FtraceConf(SimpleMultiSrcConf, HideExekallID): {yaml_example} """ STRUCTURE = TopLevelKeyDesc('ftrace-conf', 'FTrace configuration', ( - KeyDesc('events', 'FTrace events to trace', [List[str], TraceEventCheckerBase]), - KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [List[Union[str, None]], None]), - KeyDesc('functions', 'FTrace functions to trace', [List[str]]), + KeyDesc('events', 'FTrace events to trace', [typing.Sequence[str], TraceEventCheckerBase]), + KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [typing.Sequence[typing.Union[str, None]], None]), + KeyDesc('functions', 'FTrace functions to trace', [typing.Sequence[str]]), KeyDesc('buffer-size', 'FTrace buffer size', [int]), KeyDesc('trace-clock', 'Clock used while tracing (see "trace_clock" in ftrace.txt kernel doc)', [str, None]), KeyDesc('saved-cmdlines-nr', 'Number of saved cmdlines with associated PID while tracing', [int]), -- GitLab From 17c53c5ebffc5dddfd6d9398ba3256a20b50c83b Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Thu, 27 Jul 2023 16:15:10 +0100 Subject: [PATCH 24/24] lisa.analysis.tasks: Fix plot_task_residency() FIX Fix plot_task_residency() when there is no frequency domain information available. --- lisa/analysis/tasks.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lisa/analysis/tasks.py b/lisa/analysis/tasks.py index d45266ff2..f2c1b8ca0 100644 --- a/lisa/analysis/tasks.py +++ b/lisa/analysis/tasks.py @@ -800,8 +800,9 @@ class TasksAnalysis(TraceAnalysisBase): label=f"Task running in domain {domain}" ) else: - self._plot_markers( - series_refit_index(sw_df['__cpu'], window=self.trace.window) + return self._plot_markers( + series_refit_index(sw_df['__cpu'], window=self.trace.window), + label=str(task), ) return ( -- GitLab