diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9612016a27b2c5e90f42cfdd3b29d68c0b38be4c..9550a2d3aa0c46aa5507e6622b9ed97e395016b5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -41,7 +41,6 @@ jobs: python-version: # Github seems to always block 3 jobs these days, so keep at most 3 # versions there: - - '3.7' - '3.8' - '3.11' diff --git a/doc/conf.py b/doc/conf.py index c0651371febf0ec530810f51bdccd3f1a3798485..889aaed4b2ad51d8ee6dc3bf8b4304e94c23c351 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -363,6 +363,7 @@ intersphinx_mapping = { 'wa': ('https://workload-automation.readthedocs.io/en/latest/', None), 'ipywidgets': ('https://ipywidgets.readthedocs.io/en/latest/', None), 'IPython': ('https://ipython.readthedocs.io/en/stable/', None), + 'typeguard': ('https://typeguard.readthedocs.io/en/stable/', None), } manpages_url = "https://manpages.debian.org/{path}" @@ -436,6 +437,11 @@ ignored_refs = { # reference since the intersphinx inventory of the stdlib does not provide # any link for NoneType. r'NoneType', + + # Sphinx currently fails at finding the target for references like + # :class:`typing.List[str]` since it does not seem to have specific support + # for the bracketed syntax in that role. + r'typing.*', } ignored_refs.update( re.escape(f'{x.__module__}.{x.__qualname__}') diff --git a/lisa/_generic.py b/lisa/_generic.py index 3b1fae86bba84c9fd0c2133773578317caa04e30..93d6291a2e02b456249dea1c7d3c34eb88918b31 100644 --- a/lisa/_generic.py +++ b/lisa/_generic.py @@ -20,163 +20,131 @@ Generic types inspired by the :mod:`typing` module. """ import functools -from collections.abc import Mapping, Sequence -from operator import attrgetter +import inspect +import typing +from typing import Any, Union, Generic, TypeVar +import typeguard +from collections.abc import Iterable -from lisa.utils import sphinx_register_nitpick_ignore +from lisa.utils import get_cls_name -def _isinstance(x, type_): - if isinstance(type_, tuple): - return any(map(lambda type_: _isinstance(x, type_), type_)) - elif isinstance(type_, type): - return isinstance(x, type_) - # Typing hint - else: - try: - from typing import get_origin, get_args, Union - except ImportError: - # We cannot process the typing hint in that version of Python, so - # we assume the input is correctly typed. It's not ideal but we - # cannot do much more than that. - return True - else: - combinator = get_origin(type_) - args = get_args(type_) - if combinator == Union: - return any(map(lambda type_: _isinstance(x, type_), args)) - else: - raise TypeError(f'Cannot handle type hint: {type_}') - - -class GenericContainerMetaBase(type): - """ - Base class for the metaclass of generic containers. +class _TypeguardCustom: + _HINT = Any + + @classmethod + def _instancecheck(cls, value): + return + + @classmethod + def _typeguard_checker(cls, value, origin_type, args, memo): + typeguard.check_type_internal(value, cls._HINT, memo) - They are parameterized with the ``type_`` class attribute, and classes can - also be created by indexing on classes with :class:`GenericContainerBase` - metaclass. The ``type_`` class attribute will be set with what is passed as - the key. - """ - def __instancecheck__(cls, instance): try: - cls.instancecheck(instance) - except TypeError: - return False - else: - return True - - # Fully memoize the function so that this always holds: - # assert Container[Foo] is Container[Foo] - @functools.lru_cache(maxsize=None, typed=True) - def __getitem__(cls, type_): - class NewClass(cls): - _type = type_ - - types = type_ if isinstance(type_, Sequence) else [type_] - - def make_name(self_getter, sub_getter): - def _sub_getter(type_): - try: - return sub_getter(type_) - # type hints like typing.Union don't have a name we can introspect, - # but it can be pretty-printed - except AttributeError: - return str(type_) - return '{}[{}]'.format( - self_getter(cls), - ','.join(_sub_getter(type_) for type_ in types) - ) - - NewClass.__name__ = make_name( - attrgetter('__name__'), - attrgetter('__name__') - ) + cls._instancecheck(value) + except TypeError as e: + raise typeguard.TypeCheckError(str(e)) - def type_param_name(t): - if t.__module__ == 'builtins': - return t.__qualname__ - else: - # Add the module name so that Sphinx can establish cross - # references - return f'{t.__module__}.{t.__qualname__}' - - NewClass.__qualname__ = make_name( - attrgetter('__qualname__'), - type_param_name, - ) - NewClass.__module__ = cls.__module__ - # Since this type name is not resolvable, avoid cross reference - # warnings from Sphinx - sphinx_register_nitpick_ignore(NewClass) +def _typeguard_lookup(origin_type, args, extras): + try: + issub = issubclass(origin_type, _TypeguardCustom) + except Exception: + issub = False + + if issub: + return origin_type._typeguard_checker + else: + return None - return NewClass +typeguard.checker_lookup_functions.append(_typeguard_lookup) -class GenericContainerBase: +def check_type(x, classinfo): """ - Base class for generic containers. + Equivalent of ``isinstance()`` that will also work with typing hints. """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - type(self).instancecheck(self) + if isinstance(classinfo, Iterable): + typ = Union[tuple(classinfo)] + else: + typ = classinfo + + try: + typeguard.check_type( + value=x, + expected_type=typ, + forward_ref_policy=typeguard.ForwardRefPolicy.ERROR, + collection_check_strategy=typeguard.CollectionCheckStrategy.ALL_ITEMS, + ) + except typeguard.TypeCheckError as e: + raise TypeError(str(e)) -class GenericMappingMeta(GenericContainerMetaBase, type(Mapping)): +def is_instance(obj, classinfo): """ - Metaclass for generic mapping containers. + Same as builtin ``isinstance()`` but works with type hints. + """ + try: + check_type(obj, classinfo) + except TypeError: + return False + else: + return True + - It provides an ``__instancecheck__`` implementation that checks the type - of the keys and values. This make it suitable for input sanitizing based - on type checking. +def is_hint(obj): """ - def instancecheck(cls, instance): - if not isinstance(instance, Mapping): - raise TypeError('not a Mapping') + Heuristic to check if a given ``obj`` is a typing hint or anything else. + This function will return ``False`` for classes. - k_type, v_type = cls._type - for k, v in instance.items(): - if not _isinstance(k, k_type): - raise TypeError(f'Key "{k}" of type {type(k).__qualname__} should be of type {k_type.__qualname__}', k) + .. warning:: Since there is currently no way to identify hints for sure, + the check might return ``False`` even if it is a hint. + """ + module = getattr(obj, '__module__', None) - if not _isinstance(v, v_type): - raise TypeError(f'Value of {type(v).__qualname__} key "{k}" should be of type {v_type.__qualname__}', k) + # This is a class, so cannot be a hint. + if isinstance(obj, type): + return issubclass(obj, _TypeguardCustom) + elif module in ('typing', 'typing_extensions'): + return True + else: + return False -class TypedDict(GenericContainerBase, dict, metaclass=GenericMappingMeta): +@functools.lru_cache(maxsize=None, typed=True) +def hint_to_class(hint): """ - Subclass of dict providing keys and values type check. + Convert a typing hint to a class that will do a runtime check against the + hint when ``isinstance()`` is used. """ + class Meta(type): + def __instancecheck__(cls, instance): + return is_instance(instance, hint) + class Stub(metaclass=Meta): + pass -class GenericSequenceMeta(GenericContainerMetaBase, type(Sequence)): - """Similar to :class:`GenericMappingMeta` for sequences""" - def instancecheck(cls, instance): - if not isinstance(instance, Sequence): - raise TypeError('not a Sequence') + name = get_cls_name(hint).split('.', 1) + try: + name = name[1] + except IndexError: + name = name[0] - type_ = cls._type - for i, x in enumerate(instance): - if not _isinstance(x, type_): - raise TypeError(f'Item #{i} "{x}" of type {type(x).__qualname__} should be of type {type_.__qualname__}', i) + Stub.__qualname__ = name + Stub.__name__ = name.split('.')[-1] -class GenericSortedSequenceMeta(GenericSequenceMeta): - def instancecheck(cls, instance): - super().instancecheck(instance) - for i, (x, y) in enumerate(zip(instance, instance[1:])): - if x > y: - raise TypeError(f'Item #{i} "{x}" is higher than the next item "{y}", but the list must be sorted') + return Stub -class TypedList(GenericContainerBase, list, metaclass=GenericSequenceMeta): +T = TypeVar('T') +class SortedSequence(Generic[T], _TypeguardCustom): """ - Subclass of list providing keys and values type check. + Same as :class:`typing.List` but enforces sorted values when runtime + checked using :mod:`typeguard`. """ + _HINT = typing.Sequence[T] - -class SortedTypedList(GenericContainerBase, list, metaclass=GenericSortedSequenceMeta): - """ - Subclass of list providing keys and values type check, and also check the - list is sorted in ascending order. - """ + @classmethod + def _instancecheck(cls, value): + for i, (x, y) in enumerate(zip(value, value[1:])): + if x > y: + raise TypeError(f'Item #{i} "{x}" is higher than the next item "{y}", but the list must be sorted') diff --git a/lisa/_kmod.py b/lisa/_kmod.py index 7a5bbb26251ebcb187c008e9a85d0bf83f0c7916..36c45fe5db2125fa4a407da04624704c38e53e08 100644 --- a/lisa/_kmod.py +++ b/lisa/_kmod.py @@ -38,7 +38,6 @@ Here is an example of such module:: lazy_platinfo=True, kernel_src='/path/to/kernel/tree/', kmod_build_env='alpine', - # kmod_make_vars={'CC': 'clang'}, ) # Example module from: https://tldp.org/LDP/lkmpg/2.6/html/x279.html @@ -131,6 +130,8 @@ import hashlib from operator import itemgetter from shlex import quote from io import BytesIO +from collections.abc import Mapping +import typing from elftools.elf.elffile import ELFFile @@ -138,21 +139,35 @@ from devlib.target import KernelVersion, TypedKernelConfig, KernelConfigTristate from devlib.host import LocalConnection from devlib.exception import TargetStableError -from lisa.utils import nullcontext, Loggable, LISA_CACHE_HOME, checksum, DirCache, chain_cm, memoized, LISA_HOST_ABI, subprocess_log, SerializeViaConstructor, destroyablecontextmanager, ContextManagerExit, ignore_exceps +from lisa.utils import nullcontext, Loggable, LISA_CACHE_HOME, checksum, DirCache, chain_cm, memoized, LISA_HOST_ABI, subprocess_log, SerializeViaConstructor, destroyablecontextmanager, ContextManagerExit, ignore_exceps, get_nested_key from lisa._assets import ASSETS_PATH, HOST_PATH, ABI_BINARIES_FOLDER from lisa._unshare import ensure_root import lisa._git as git +from lisa.conf import SimpleMultiSrcConf, TopLevelKeyDesc, LevelKeyDesc, KeyDesc _ALPINE_ROOTFS_URL = 'https://dl-cdn.alpinelinux.org/alpine/v{minor}/releases/{arch}/alpine-minirootfs-{version}-{arch}.tar.gz' -def _any_abi_to_kernel_arch(abi): +def _abi_to_kernel_arch(abi): + """ + Convert a devlib ABI into a valid ARCH= for the kernel + """ return { 'armeabi': 'arm', - 'armv7': 'arm', - 'aarch64': 'arm64', }.get(abi, abi) +def _kernel_arch_to_abi(arch): + """ + Convert a kernel arch to a devlib ABI + """ + if arch == 'arm64': + return 'arm64' + elif 'arm' in arch: + return 'armeabi' + else: + return arch + + def _url_path(url): return PurePosixPath( urllib.parse.unquote( @@ -194,18 +209,98 @@ def _kbuild_make_cmd(path, targets, cc, make_vars): var_cc = make_vars.get('CC', cc) if var_cc != cc: pretty_cmd = ' '.join(map(quote, map(str, cmd))) - raise ValueError(f'The kernel tree was prepared using CC={cc} so the make command cannot be ran with CC={var_cc}: {pretty_cmd}') + raise ValueError(f'The kernel build env was prepared using CC={cc} so the make command cannot be ran with CC={var_cc}: {pretty_cmd}') return cmd @destroyablecontextmanager -def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overlay_backend=None): +def _make_build_chroot(cc, abi, bind_paths=None, version=None, overlay_backend=None, packages=None): """ Create a chroot folder ready to be used to build a kernel. """ logger = logging.getLogger(f'{__name__}.alpine_chroot') + def is_clang(cc): + return cc.startswith('clang') + + def default_packages(cc): + # Default packages needed to compile a linux kernel module + packages = [ + 'bash', + 'binutils', + 'coreutils', + 'diffutils', + 'make', + 'file', + 'gawk', + 'sed', + 'musl-dev', + 'elfutils-dev', + 'gmp-dev', + 'libffi-dev', + 'openssl-dev', + 'linux-headers', + 'musl', + 'bison', + 'flex', + 'python3', + ] + + if is_clang(cc): + try: + _, version = cc.split('-', 1) + except ValueError: + # apk understands "clang" even if there is no clang package + version = '' + + packages.extend([ + 'lld', + f'llvm{version}', + f'clang{version}', + ]) + else: + packages.append(cc) + + return packages + + if (version, packages) != (None, None) and None in (version, packages): + raise ValueError('Both version and packages need to be set or none of them') + else: + version = version or '3.18.0' + packages = default_packages(cc) if packages is None else packages + + use_qemu = ( + # Since clang binaries support cross compilation without issues, + # there is no need to use QEMU that will slow everything down. + (not is_clang(cc)) and + abi != LISA_HOST_ABI + ) + + chroot_abi = abi if use_qemu else LISA_HOST_ABI + + bind_paths = { + **dict(bind_paths or {}), + ABI_BINARIES_FOLDER[chroot_abi]: '/usr/local/bin' + } + + with _make_alpine_chroot( + version=version, + abi=chroot_abi, + packages=packages, + bind_paths=bind_paths, + overlay_backend=overlay_backend, + ) as chroot: + try: + yield chroot + except ContextManagerExit: + pass + + +@destroyablecontextmanager +def _make_alpine_chroot(version, packages=None, abi=None, bind_paths=None, overlay_backend='overlayfs'): + logger = logging.getLogger(f'{__name__}.alpine_chroot') + def mount_binds(chroot, bind_paths, mount=True): for src, dst in bind_paths.items(): dst = Path(dst).resolve() @@ -219,19 +314,17 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla _subprocess_log(cmd, logger=logger, level=logging.DEBUG) def populate(key, path, init_cache=True): - version, arch, packages, use_qemu = key + version, alpine_arch, packages = key path = path.resolve() # Packages have already been installed, so we can speed things up a # bit if init_cache: - packages = packages.split(' ') - _version = version.split('.') - minor = '.'.join(_version[:-1]) + minor = '.'.join(_version[:2]) url = _ALPINE_ROOTFS_URL.format( minor=minor, - arch=arch, + arch=alpine_arch, version=version, ) @@ -249,61 +342,23 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla shutil.copy('/etc/resolv.conf', path / 'etc' / 'resolv.conf') - if packages: - cmd = _make_chroot_cmd(path, ['apk', 'add', *packages]) - _subprocess_log(cmd, logger=logger, level=logging.DEBUG) - - packages = [ - 'bash', - 'binutils', - 'coreutils', - 'diffutils', - 'make', - 'file', - 'gawk', - 'sed', - 'musl-dev', - 'elfutils-dev', - 'gmp-dev', - 'libffi-dev', - 'openssl-dev', - 'linux-headers', - 'musl', - 'bison', - 'flex', - 'python3', - ] - make_vars = make_vars or {} - - is_clang = cc.startswith('clang') - if is_clang: - try: - _, version = cc.split('-', 1) - except ValueError: - # apk understands "clang" even if there is no clang package - version = '' - - packages.extend([ - 'lld', - f'llvm{version}', - f'clang{version}', - ]) - else: - packages.append(cc) + def install_packages(packages): + if packages: + cmd = _make_build_chroot_cmd(path, ['apk', 'add', *sorted(set(packages))]) + _subprocess_log(cmd, logger=logger, level=logging.DEBUG) - target_arch = make_vars.get('ARCH', LISA_HOST_ABI) + install_packages(packages) - use_qemu = ( - # Since clang binaries support cross compilation without issues, - # there is no need to use QEMU that will slow everything down. - (not is_clang) and - target_arch != LISA_HOST_ABI - ) + # Ensure we have a full version number with 3 components + version = version.split('.') + version = version + ['0' for _ in range(3 - len(version))] + version = '.'.join(version) - chroot_arch = target_arch if use_qemu else LISA_HOST_ABI + abi = abi or LISA_HOST_ABI + use_qemu = abi != LISA_HOST_ABI - qemu_msg = ' using QEMU userspace emulation' if use_qemu else '' - logger.debug(f'Using Alpine v{alpine_version} chroot with architecture {chroot_arch}{qemu_msg}.') + qemu_msg = f' using QEMU userspace emulation to emulate {abi} on {LISA_HOST_ABI}' if use_qemu else '' + logger.debug(f'Using Alpine v{version} chroot with ABI {abi}{qemu_msg}.') # Check that QEMU userspace emulation is setup if we need it if use_qemu: @@ -311,22 +366,15 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla 'arm64': 'aarch64', 'armeabi': 'arm', 'armv7': 'arm', - }.get(chroot_arch, chroot_arch) + }.get(abi, abi) binfmt_path = Path('/proc/sys/fs/binfmt_misc/', f'qemu-{qemu_arch}') if not binfmt_path.exists(): raise ValueError(f'Alpine chroot is setup for {qemu_arch} architecture but QEMU userspace emulation is not installed on the host (missing {binfmt_path})') - - # Add LISA static binaries inside the chroot - bind_paths = { - **dict(bind_paths or {}), - ABI_BINARIES_FOLDER[chroot_arch]: '/usr/local/bin' - } - alpine_arch = { 'arm64': 'aarch64', 'armeabi': 'armv7', - }.get(chroot_arch, chroot_arch) + }.get(abi, abi) dir_cache = DirCache( category='alpine_chroot', @@ -334,10 +382,9 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla ) key = ( - alpine_version, + version, alpine_arch, - ' '.join(sorted(packages)), - use_qemu, + sorted(set(packages or [])), ) cache_path = dir_cache.get_entry(key) with _overlay_folders([cache_path], backend=overlay_backend) as path: @@ -351,7 +398,7 @@ def _make_chroot(cc, make_vars, bind_paths=None, alpine_version='3.18.0', overla mount_binds(path, bind_paths, mount=False) -def _make_chroot_cmd(chroot, cmd): +def _make_build_chroot_cmd(chroot, cmd): chroot = Path(chroot).resolve() cmd = ' '.join(map(quote, map(str, cmd))) # Source /etc/profile to get sane defaults for e.g. PATH. Otherwise, we @@ -361,7 +408,7 @@ def _make_chroot_cmd(chroot, cmd): @destroyablecontextmanager -def _overlay_folders(lowers, upper=None, backend=None, copy_filter=None): +def _overlay_folders(lowers, backend, upper=None, copy_filter=None): """ Overlay folders on top of each other. @@ -387,7 +434,6 @@ def _overlay_folders(lowers, upper=None, backend=None, copy_filter=None): :type backend: str or None """ logger = logging.getLogger(f'{__name__}.overlay') - backend = KernelTree._resolve_overlay_backend(backend) def make_dir(root, name): path = Path(root) / name @@ -664,40 +710,53 @@ class TarOverlay(_PathOverlayBase): tar.extractall(dst) -class KernelTree(Loggable, SerializeViaConstructor): +class _KernelBuildEnvConf(SimpleMultiSrcConf): + STRUCTURE = TopLevelKeyDesc('kernel-build-env-conf', 'Build environment settings', + ( + KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (command ran directly on host system)', [typing.Literal['host', 'alpine']]), + LevelKeyDesc('build-env-settings', 'build-env settings', ( + LevelKeyDesc('host', 'Settings for host build-env', ( + KeyDesc('toolchain-path', 'Folder to prepend to PATH when executing toolchain command in the host build env', [str]), + )), + LevelKeyDesc('alpine', 'Settings for Alpine linux build-env', ( + KeyDesc('version', 'Alpine linux version, e.g. 3.18.0', [None, str]), + KeyDesc('packages', 'List of Alpine linux packages to install. If that is provided, then errors while installing the package list provided by LISA will not raise an exception, so that the user can provide their own replacement for them. This allows future-proofing hardcoded package names in LISA, as Alpine package names might evolve between versions.', [None, typing.Sequence[str]]), + )), + )), + + KeyDesc('overlay-backend', 'Backend to use for overlaying folders while building modules. Can be "overlayfs" (overlayfs filesystem, recommended and fastest) or "copy (plain folder copy)', [str]), + KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [typing.Dict[str, object]]), + ), + ) + + DEFAULT_SRC = { + 'build-env': 'host', + 'overlay-backend': 'overlayfs', + } + + +class _KernelBuildEnv(Loggable, SerializeViaConstructor): """ :param path_cm: Context manager factory expected to return a path to a - prepared kernel tree. + prepared kernel build env. :type path_cm: collections.abc.Callable - :param cc: Compiler used to prepare the kernel tree. Can be e.g. ``"gcc"``, - ``"clang"``, ``"clang-14"`` etc. - :type cc: str or None - - :param make_vars: Variables passed on ``make`` command line when preparing - the kernel tree. - :type make_vars: dict(str, str) - - :param build_env: Build environment to use. Can be one of: + :param build_conf: Build environment configuration. If specified as a + string, it can be one of: * ``alpine``: Alpine linux chroot, providing a controlled environment * ``host``: No specific env is setup, whatever the host is using will be picked. * ``None``: defaults to ``host``. - :type build_env: str or None - :param overlay_backend: Backend used to create folder overlays. One of: - - * ``overlayfs``: Use overlayfs Linux filesystem. This is the fastest - and the recommanded option. - * ``copy``: Use plain folder copies. This can be used as an alternative - if overlayfs cannot be used for some reason. - * ``None``: default to ``overlayfs``. + Otherwise, pass an instance of :class:`_KernelBuildEnvConf` of a mapping with + the same structure. + :type build_conf: collections.abc.Mapping or str or None """ # Preserve checksum attribute when serializing, as it will allow hitting - # the module cache without actually setting up the kernel tree in many + # the module cache without actually setting up the kernel build env in many # cases. _SERIALIZE_PRESERVED_ATTRS = {'checksum'} @@ -708,29 +767,43 @@ class KernelTree(Loggable, SerializeViaConstructor): # On top of that, the kernel does not handle clang < 10.0.1 _MIN_CLANG_VERSION = 11 - def __init__(self, path_cm, cc, make_vars, build_env=None, overlay_backend=None): + def __init__(self, path_cm, build_conf=None): self._make_path_cm = path_cm - self.build_env = self._resolve_build_env(build_env) - self.make_vars = make_vars or {} - self.overlay_backend = self._resolve_overlay_backend(overlay_backend) + self.conf, self.cc, self.abi = self._resolve_conf(build_conf) + self._path_cm = None self.path = None self.checksum = None - self.cc = cc - @staticmethod - def _resolve_build_env(build_env): - return build_env or 'host' + @classmethod + def _resolve_conf(cls, conf, abi=None): + def make_conf(conf): + if isinstance(conf, _KernelBuildEnvConf): + return conf + elif conf is None: + return _KernelBuildEnvConf() + elif isinstance(conf, str): + return _KernelBuildEnvConf.from_map({ + 'build-env': conf + }) + elif isinstance(conf, Mapping): + return _KernelBuildEnvConf.from_map(conf) + else: + raise TypeError(f'Unsupported value type for build_conf: {conf}') + + conf = make_conf(conf) + make_vars, cc, abi = cls._process_make_vars(conf, abi=abi) + conf.add_src(src='processed make-variables', conf={'make-variables': make_vars}) + + return (conf, cc, abi) - @staticmethod - def _resolve_overlay_backend(overlay_backend): - return overlay_backend or 'overlayfs' + _SPEC_KEYS = ('path', 'checksum') def _to_spec(self): - return dict( - path=self.path, - checksum=self.checksum, - ) + return { + attr: getattr(self, attr) + for attr in self._SPEC_KEYS + } def _update_spec(self, spec): def update(x): @@ -738,7 +811,7 @@ class KernelTree(Loggable, SerializeViaConstructor): if val is not None: setattr(self, x, val) if spec: - for attr in ('path', 'checksum'): + for attr in self._SPEC_KEYS: update(attr) # It is expected that the same object can be used more than once, so @@ -849,7 +922,7 @@ class KernelTree(Loggable, SerializeViaConstructor): @classmethod - def _prepare_tree(cls, path, cc, make_vars, build_env, apply_overlays, overlay_backend): + def _prepare_tree(cls, path, cc, abi, build_conf, apply_overlays): logger = cls.get_logger() path = Path(path) @@ -858,7 +931,7 @@ class KernelTree(Loggable, SerializeViaConstructor): path=path, targets=targets, cc=cc, - make_vars=make_vars, + make_vars=build_conf.get('make-variables', {}), ) cmds = [ @@ -888,7 +961,7 @@ class KernelTree(Loggable, SerializeViaConstructor): bind_paths = {path: path} - def fixup_kernel_tree(): + def fixup_kernel_build_env(): # TODO: re-assess # The headers in /sys/kheaders.tar.xz generated by @@ -909,12 +982,25 @@ class KernelTree(Loggable, SerializeViaConstructor): _path.write_bytes(content) - if build_env == 'alpine': + if build_conf['build-env'] == 'alpine': + settings = build_conf['build-env-settings']['alpine'] + version = settings.get('version', None) + alpine_packages = settings.get('packages', None) + make_vars = build_conf.get('make-variables', {}) + overlay_backend = build_conf['overlay-backend'] + @contextlib.contextmanager def cmd_cm(cmds): - with _make_chroot(cc=cc, bind_paths=bind_paths, make_vars=make_vars, overlay_backend=overlay_backend) as chroot: + with _make_build_chroot( + cc=cc, + abi=abi, + bind_paths=bind_paths, + overlay_backend=overlay_backend, + version=version, + packages=alpine_packages, + ) as chroot: yield [ - _make_chroot_cmd(chroot, cmd) if cmd else None + _make_build_chroot_cmd(chroot, cmd) if cmd else None for cmd in cmds ] else: @@ -930,18 +1016,18 @@ class KernelTree(Loggable, SerializeViaConstructor): # Apply the overlays before running make, so that it sees the # correct headers and conf etc apply_overlays() - fixup_kernel_tree() + fixup_kernel_build_env() _subprocess_log(post, logger=logger, level=logging.DEBUG) # Re-apply the overlays, since we could have overwritten important # things, such as include/linux/vermagic.h apply_overlays() - fixup_kernel_tree() + fixup_kernel_build_env() @classmethod - def _process_make_vars(cls, build_env, make_vars, abi=None): + def _process_make_vars(cls, build_conf, abi): env = { k: str(v) for k, v in ( @@ -956,7 +1042,7 @@ class KernelTree(Loggable, SerializeViaConstructor): make_vars = { **env, - **dict(make_vars or {}) + **dict(build_conf.get('make-variables', {})) } make_vars = { @@ -964,14 +1050,27 @@ class KernelTree(Loggable, SerializeViaConstructor): for k, v in make_vars.items() } - if abi is None: - abi = make_vars.get('ARCH', LISA_HOST_ABI) + try: + arch = make_vars['ARCH'] + except KeyError: + if abi: + arch = _abi_to_kernel_arch(abi) + else: + raise ValueError('The ABI must be specified or the ARCH make variable') + + abi = abi or _kernel_arch_to_abi(arch) - arch = _any_abi_to_kernel_arch(abi) make_vars['ARCH'] = arch - make_vars, cc = cls._resolve_toolchain(abi, make_vars, build_env) + build_conf = build_conf.add_src( + src='make-variables', + conf={ + 'make-variables': make_vars + }, + inplace=False, + ) + make_vars, cc = cls._resolve_toolchain(abi, build_conf) - if build_env == 'alpine': + if build_conf['build-env'] == 'alpine': if cc.startswith('clang'): make_vars['LLVM'] = '1' else: @@ -988,12 +1087,34 @@ class KernelTree(Loggable, SerializeViaConstructor): # then be re-filtered right before invoking make to remove CC=gcc as it # can confuse KBuild. make_vars['CC'] = cc - return (make_vars, cc) + assert 'ARCH' in make_vars + return (make_vars, cc, arch) @classmethod - def _check_cc_version(cls, cc): + def _make_toolchain_env(cls, toolchain_path=None, env=None): + env = env or os.environ + if toolchain_path is not None: + path = env.get('PATH', '') + env = { + **env, + 'PATH': ':'.join((toolchain_path, path)) + } + + return {**os.environ, **env} + + @classmethod + def _make_toolchain_env_from_conf(cls, build_conf, env=None): + if build_conf['build-env'] == 'host': + toolchain_path = build_conf['build-env-settings']['host'].get('toolchain-path') + else: + toolchain_path = None + return cls._make_toolchain_env(toolchain_path, env=env) + + @classmethod + def _check_cc_version(cls, cc, toolchain_path): if cc == 'clang': - version = subprocess.check_output([cc, '--version']) + env = cls._make_toolchain_env(toolchain_path) + version = subprocess.check_output([cc, '--version'], env=env) m = re.match(rb'.*clang version ([0-9]+)\.', version) if m: major = int(m.group(1)) @@ -1005,9 +1126,11 @@ class KernelTree(Loggable, SerializeViaConstructor): return False @classmethod - def _resolve_toolchain(cls, abi, make_vars, build_env): + def _resolve_toolchain(cls, abi, build_conf): logger = cls.get_logger() - build_env = KernelTree._resolve_build_env(build_env) + env = cls._make_toolchain_env_from_conf(build_conf) + + make_vars = build_conf.get('make-variables', {}) if abi == LISA_HOST_ABI: toolchain = None @@ -1018,12 +1141,12 @@ class KernelTree(Loggable, SerializeViaConstructor): try: toolchain = os.environ['CROSS_COMPILE'] except KeyError: - if abi in ('arm64', 'aarch64'): + if abi == 'arm64': toolchain = 'aarch64-linux-gnu-' - elif 'arm' in abi: + elif abi == 'armeabi': toolchain = 'arm-linux-gnueabi-' else: - raise KeyError('CROSS_COMPILE env var needs to be set') + raise KeyError(f'ABI {abi} not recognized, CROSS_COMPILE env var needs to be set') logger.debug(f'CROSS_COMPILE env var not set, assuming "{toolchain}"') @@ -1059,7 +1182,7 @@ class KernelTree(Loggable, SerializeViaConstructor): # Default to clang on alpine, as it will be in a high-enough version # and since Alpine does not ship any cross-toolchain for GCC, this will # avoid having to use QEMU userspace emulation which is really slow. - elif build_env == 'alpine': + elif build_conf['build-env'] == 'alpine': cc = 'clang' if 'LLVM' in make_vars: @@ -1074,7 +1197,9 @@ class KernelTree(Loggable, SerializeViaConstructor): # Only run the check on host build env, as other build envs are # expected to be correctly configured. - if build_env == 'host' and commands: + if build_conf['build-env'] == 'host' and commands: + toolchain_path = build_conf['build-env-settings']['host'].get('toolchain-path', None) + for cc, cmd in commands.items(): pretty_cmd = ' '.join(cmd) try: @@ -1082,7 +1207,8 @@ class KernelTree(Loggable, SerializeViaConstructor): cmd, # Most basic compiler input that will not do anything. input=b';', - stderr=subprocess.STDOUT + stderr=subprocess.STDOUT, + env=env, ) except subprocess.CalledProcessError as e: logger.debug(f'Checking {cc} compiler: {pretty_cmd} failed with:\n{e.output.decode()}') @@ -1091,7 +1217,7 @@ class KernelTree(Loggable, SerializeViaConstructor): logger.debug(f'Checking {cc} compiler: {e}') continue else: - if cls._check_cc_version(cc): + if cls._check_cc_version(cc, toolchain_path): break else: raise ValueError(f'Could not find a working toolchain for CROSS_COMPILE={toolchain}') @@ -1114,7 +1240,7 @@ class KernelTree(Loggable, SerializeViaConstructor): @classmethod @SerializeViaConstructor.constructor - def from_target(cls, target, tree_path=None, make_vars=None, cache=True, build_env=None, overlay_backend=None): + def from_target(cls, target, tree_path=None, cache=True, build_conf=None): """ Build the tree from the given :class:`lisa.target.Target`. @@ -1149,31 +1275,25 @@ class KernelTree(Loggable, SerializeViaConstructor): downloading a tarball from kernel.org for the matching version.) :type tree_path: str or None - :param make_vars: Variables passed on ``make`` command line. - :type make_vars: dict(str, object) - :param cache: If ``True``, will attempt to cache intermediate steps. :type cache: bool - :param build_env: See :class:`lisa._kmod.KernelTree`. - :type build_env: str or None - - :param overlay_backend: See :class:`lisa._kmod.KernelTree`. - :type overlay_backend: str or None + :param build_conf: See :class:`lisa._kmod._KernelBuildEnv`. + :type build_conf: str or None """ - make_vars, cc = cls._process_make_vars( - make_vars=make_vars, - abi=target.plat_info['abi'], - build_env=build_env, - ) - kernel_info = target.plat_info['kernel'] + plat_info = target.plat_info + abi = plat_info['abi'] + kernel_info = plat_info['kernel'] + + build_conf, cc, _abi = cls._resolve_conf(build_conf, abi=abi) + assert _abi == abi @contextlib.contextmanager def from_installed_headers(): """ Get the kernel tree from /lib/modules """ - if build_env == 'alpine': + if build_conf['build-env'] == 'alpine': raise ValueError(f'Building from /lib/modules is not supported with the Alpine build environment as /lib/modules might not be self contained (i.e. symlinks pointing outside)') else: if isinstance(target.conn, LocalConnection): @@ -1240,11 +1360,9 @@ class KernelTree(Loggable, SerializeViaConstructor): with cls.from_overlays( version=version, overlays=overlays, - make_vars=make_vars, cache=cache, tree_path=tree_path, - build_env=build_env, - overlay_backend=overlay_backend, + build_conf=build_conf, ) as tree: yield tree._to_spec() @@ -1295,9 +1413,7 @@ class KernelTree(Loggable, SerializeViaConstructor): tree_path=tree_path, version=kernel_info['version'], cache=cache, - make_vars=make_vars, - build_env=build_env, - overlay_backend=overlay_backend, + build_conf=build_conf, ) as tree: yield tree._to_spec() @@ -1342,28 +1458,24 @@ class KernelTree(Loggable, SerializeViaConstructor): return cls( path_cm=functools.partial(try_loaders, loaders), - cc=cc, - make_vars=make_vars, - build_env=build_env, - overlay_backend=overlay_backend, + build_conf=build_conf, ) @classmethod @SerializeViaConstructor.constructor - def from_path(cls, path, make_vars=None, cache=True, build_env=None): + def from_path(cls, path, cache=True, build_conf=None): """ Build a tree from the given ``path`` to sources. """ return cls.from_overlays( tree_path=path, - make_vars=make_vars, cache=cache, - build_env=build_env, + build_conf=build_conf, ) @classmethod @SerializeViaConstructor.constructor - def from_overlays(cls, version=None, tree_path=None, overlays=None, make_vars=None, cache=True, build_env=None, overlay_backend=None): + def from_overlays(cls, version=None, tree_path=None, overlays=None, cache=True, build_conf=None): """ Build a tree from the given overlays, to be applied on a source tree. @@ -1375,12 +1487,8 @@ class KernelTree(Loggable, SerializeViaConstructor): """ logger = cls.get_logger() overlays = overlays or {} - make_vars, cc = cls._process_make_vars( - make_vars=make_vars, - build_env=build_env, - ) - build_env = KernelTree._resolve_build_env(build_env) - overlay_backend = KernelTree._resolve_overlay_backend(overlay_backend) + + build_conf, cc, abi = cls._resolve_conf(build_conf) if tree_path: try: @@ -1416,16 +1524,16 @@ class KernelTree(Loggable, SerializeViaConstructor): cls._prepare_tree( path, cc=cc, - make_vars=make_vars, - build_env=build_env, + abi=abi, + build_conf=build_conf, apply_overlays=functools.partial(apply_overlays, path), - overlay_backend=overlay_backend, ) @contextlib.contextmanager def overlay_cm(args): base_path, tree_key = args base_path = Path(base_path).resolve() + overlay_backend = build_conf['overlay-backend'] if cache and tree_key is not None: # Compute a unique token for the overlay. It includes: @@ -1443,16 +1551,9 @@ class KernelTree(Loggable, SerializeViaConstructor): overlay._get_checksum() for overlay, dst in overlays.items() ) + [ - str(tree_key), - str(build_env), - str(overlay_backend), + tree_key, str(cc), - ] + [ - # We need to take checksum the make variables - # as well, as it can influence the kernel tree - # a great deal (e.g. changing toolchain) - f'{k}={v}' - for k, v in sorted((make_vars or {}).items()) + build_conf, ] ) @@ -1518,7 +1619,7 @@ class KernelTree(Loggable, SerializeViaConstructor): url = cls._get_url(version) # Assume that the URL will always provide the same tarball yield ( - dir_cache.get_entry([url]), + dir_cache.get_entry(url), url, ) else: @@ -1531,9 +1632,7 @@ class KernelTree(Loggable, SerializeViaConstructor): cm = chain_cm(overlay_cm, tree_cm) return cls( path_cm=cm, - cc=cc, - make_vars=make_vars, - build_env=build_env, + build_conf=build_conf, ) @classmethod @@ -1652,29 +1751,29 @@ class KmodSrc(Loggable): ) )).encode('utf-8') - def compile(self, kernel_tree, make_vars=None): + def compile(self, kernel_build_env, make_vars=None): """ Compile the module and returns the ``bytestring`` content of the ``.ko`` file. - :param kernel_tree: Kernel tree to build the module against. - :type kernel_tree: KernelTree + :param kernel_build_env: kernel build env to build the module against. + :type kernel_build_env: _KernelBuildEnv :param make_vars: Variables passed on ``make`` command line. This can be used for variables only impacting the module, otherwise it's - better to set them when creating the ``kernel_tree``. + better to set them when creating the ``kernel_build_env``. :type make_vars: dict(str, object) or None """ make_vars = { - **kernel_tree.make_vars, + **kernel_build_env.conf.get('make-variables', {}), **(make_vars or {}), } - cc = kernel_tree.cc - overlay_backend = kernel_tree.overlay_backend - tree_path = Path(kernel_tree.path) - # "inherit" the build env from the KernelTree as we must use the same + cc = kernel_build_env.cc + abi = kernel_build_env.abi + tree_path = Path(kernel_build_env.path) + # "inherit" the build env from the _KernelBuildEnv as we must use the same # environment as what was used for "make modules_prepare" - build_env = kernel_tree.build_env + build_conf = kernel_build_env.conf bind_paths = {tree_path: tree_path} logger = self.logger @@ -1721,10 +1820,21 @@ class KmodSrc(Loggable): else: return filenames[0] - if build_env == 'alpine': + if build_conf['build-env'] == 'alpine': + settings = build_conf['build-env-settings']['alpine'] + alpine_version = settings.get('version', None) + alpine_packages = settings.get('packages', None) + @contextlib.contextmanager def cmd_cm(): - with _make_chroot(cc=cc, bind_paths=bind_paths, make_vars=make_vars, overlay_backend=overlay_backend) as chroot: + with _make_build_chroot( + cc=cc, + bind_paths=bind_paths, + abi=abi, + overlay_backend=build_conf['overlay-backend'], + version=alpine_version, + packages=alpine_packages, + ) as chroot: # Do not use a CM here to avoid choking on permission # issues. Since the chroot itself will be entirely # removed it's not a problem. @@ -1734,7 +1844,7 @@ class KmodSrc(Loggable): mod_path=f'/{mod_path.relative_to(chroot)}', make_vars=make_vars, ) - yield (mod_path, _make_chroot_cmd(chroot, cmd), {}) + yield (mod_path, _make_build_chroot_cmd(chroot, cmd), {}) else: @contextlib.contextmanager def cmd_cm(): @@ -1744,7 +1854,9 @@ class KmodSrc(Loggable): mod_path=mod_path, make_vars=make_vars, ) - yield (mod_path, cmd, {'PATH': HOST_PATH}) + + env = _KernelBuildEnv._make_toolchain_env_from_conf(build_conf, env={'PATH': HOST_PATH}) + yield (mod_path, cmd, {'PATH': env['PATH']}) with cmd_cm() as (mod_path, cmd, env): mod_path = Path(mod_path) @@ -1798,20 +1910,20 @@ class DynamicKmod(Loggable): :param src: Sources of the module. :type src: lisa._kmod.KmodSrc - :param kernel_tree: Kernel source tree to use to build the module against. - :type kernel_tree: lisa._kmod.KernelTree + :param kernel_build_env: Kernel source tree to use to build the module against. + :type kernel_build_env: lisa._kmod._KernelBuildEnv """ - def __init__(self, target, src, kernel_tree=None): + def __init__(self, target, src, kernel_build_env=None): self.src = src self.target = target - if not isinstance(kernel_tree, KernelTree): - kernel_tree = KernelTree.from_target( + if not isinstance(kernel_build_env, _KernelBuildEnv): + kernel_build_env = _KernelBuildEnv.from_target( target=self.target, - tree_path=kernel_tree, + tree_path=kernel_build_env, ) - self._kernel_tree = kernel_tree + self._kernel_build_env = kernel_build_env @property def mod_name(self): @@ -1830,23 +1942,23 @@ class DynamicKmod(Loggable): @property @memoized - def kernel_tree(self): - tree = self._kernel_tree - arch = _any_abi_to_kernel_arch( + def kernel_build_env(self): + tree = self._kernel_build_env + arch = _abi_to_kernel_arch( self.target.plat_info['abi'] ) - tree_arch = tree.make_vars['ARCH'] + tree_arch = tree.conf['make-variables']['ARCH'] if tree_arch != arch: - raise ValueError(f'The kernel tree ({tree_arch}) was not prepared for the same architecture as the target ({arch}). Please set ARCH={arch} make variable.') + raise ValueError(f'The kernel build env ({tree_arch}) was not prepared for the same architecture as the target ({arch}). Please set ARCH={arch} make variable.') else: return tree @property def _compile_needs_root(self): - tree = self.kernel_tree + tree = self.kernel_build_env return ( - tree.build_env != 'host' or - tree.overlay_backend == 'overlayfs' + tree.conf['build-env'] != 'host' or + tree.conf['overlay-backend'] == 'overlayfs' ) # Dummy memoized wrapper. The only reason we need one is that _do_compile() @@ -1866,51 +1978,48 @@ class DynamicKmod(Loggable): compile_ = ensure_root(compile_, inline=True) bin_, spec = compile_(self, make_vars=make_vars) - # Get back KernelTree._to_spec() and update the KernelTree we have in + # Get back _KernelBuildEnv._to_spec() and update the _KernelBuildEnv we have in # this process with it to remember the checksum, in case ensure_root() # spawned a new process. This is then used by Target.get_kmod() that # will reinject the known spec when creating new modules from the - # default KernelTree - self.kernel_tree._update_spec(spec) + # default _KernelBuildEnv + self.kernel_build_env._update_spec(spec) return bin_ def _do_compile(self, make_vars=None): - kernel_tree = self.kernel_tree + kernel_build_env = self.kernel_build_env extra_make_vars = make_vars or {} all_make_vars = { + **kernel_build_env.conf.get('make-variables', {}), **extra_make_vars, - **kernel_tree.make_vars, } src = self.src - def get_key(kernel_tree): - kernel_checksum = kernel_tree.checksum + def get_key(kernel_build_env): + kernel_checksum = kernel_build_env.checksum if kernel_checksum is None: - raise ValueError('Kernel tree has no checksum') + raise ValueError('kernel build env has no checksum') else: - var_tokens = [ - f'{k}={v}' - for k, v in sorted(all_make_vars.items()) - ] - # Cache the compilation based on: - # * the kernel tree - # * the make variables - # * the module name - return (kernel_checksum, kernel_tree.build_env, src.checksum, *var_tokens) + return ( + kernel_checksum, + kernel_build_env.conf, + src.checksum, + all_make_vars, + ) - def get_bin(kernel_tree): + def get_bin(kernel_build_env): return src.compile( - kernel_tree=kernel_tree, + kernel_build_env=kernel_build_env, make_vars=extra_make_vars, ) - def lookup_cache(kernel_tree, key, enter_cm=False): - cm = kernel_tree if enter_cm else nullcontext(kernel_tree) + def lookup_cache(kernel_build_env, key, enter_cm=False): + cm = kernel_build_env if enter_cm else nullcontext(kernel_build_env) def populate(key, path): - with cm as kernel_tree: + with cm as kernel_build_env: with open(path / 'mod.ko', 'wb') as f: - f.write(get_bin(kernel_tree)) + f.write(get_bin(kernel_build_env)) dir_cache = DirCache( category='kernel_modules', @@ -1920,26 +2029,26 @@ class DynamicKmod(Loggable): with open(cache_path / 'mod.ko', 'rb') as f: return f.read() - # First try on the "bare" kernel tree, i.e. before calling __enter__(). + # First try on the "bare" kernel build env, i.e. before calling __enter__(). # If this happens to have enough information to hit the cache, we just # avoided a possibly costly setup of compilation environment try: - key = get_key(kernel_tree) + key = get_key(kernel_build_env) except ValueError: - with kernel_tree as kernel_tree: - if kernel_tree.checksum is None: - # Only cache the module if the kernel tree has a defined + with kernel_build_env as kernel_build_env: + if kernel_build_env.checksum is None: + # Only cache the module if the kernel build env has a defined # checksum, which is not always the case when it's not # coming from a controlled source that is guaranteed to be # immutable. - bin_ = get_bin(kernel_tree) + bin_ = get_bin(kernel_build_env) else: - key = get_key(kernel_tree) - bin_ = lookup_cache(kernel_tree, key) + key = get_key(kernel_build_env) + bin_ = lookup_cache(kernel_build_env, key) else: - bin_ = lookup_cache(kernel_tree, key, enter_cm=True) + bin_ = lookup_cache(kernel_build_env, key, enter_cm=True) - return (bin_, kernel_tree._to_spec()) + return (bin_, kernel_build_env._to_spec()) def install(self, kmod_params=None): """ diff --git a/lisa/_typeclass.py b/lisa/_typeclass.py index 3468d7d235e3e681c589827cea0cd4d031b33056..ccf07ab4915bfd32c5567e0503275e19fd362e51 100644 --- a/lisa/_typeclass.py +++ b/lisa/_typeclass.py @@ -192,10 +192,11 @@ Note that it's possible to implement a typeclass for a type that has no values, but for which ``isinstance(value, thetype)`` will return true. This can be achieved using ``__instancecheck__`` or ``__subclasscheck__`` and is used in particular by the abstract base classes provided by :mod:`collections.abc`. -:class:`lisa._generic.TypedList` is another example. Casting values "registered" as -instances of these types is expensive though, as validity of the cast depends -on the value itself. That means it's not possible to memoize the result of the -cast associated it with the type of the value. +:class:`lisa._generic.SortedSequence` is another example. Typing hints from the +:mod:`typing` module can also be used. Casting values "registered" as instances +of these types is expensive though, as validity of the cast depends on the +value itself. That means it's not possible to memoize the result of the cast +associated it with the type of the value. One might wonder what casting a value to a typeclass gives. When possible, a @@ -215,13 +216,22 @@ import itertools import contextlib import textwrap from collections.abc import Iterable +import typing from devlib.utils.misc import ranges_to_list from lisa.utils import deduplicate # TODO: revisit pylint annotation once this is solved: # https://github.com/PyCQA/pylint/issues/1630 -from lisa._generic import TypedList # pylint: disable=unused-import +from lisa._generic import hint_to_class, is_hint + + +def _process_hint(obj): + if is_hint(obj): + return hint_to_class(obj) + else: + return obj + class TypeClassMeta(type): """ @@ -351,6 +361,10 @@ class TypeClassMeta(type): dct = {**typeclass.DEFAULTS, **dct} types = types if isinstance(types, Iterable) else [types] + + # Process type hints to turn them into normal class, implementing __instancecheck__ + types = list(map(_process_hint, types)) + for type_ in types: # Create an instance for each type, with the type as base class. bases = (type_,) @@ -432,6 +446,8 @@ class TypeClass(metaclass=TypeClassMeta): Base class to inherit from to define a new typeclass. """ def __new__(cls, obj): + obj = _process_hint(obj) + safe_to_memoize, instance, dct = cls._find_instance_dct(obj) # pylint: disable=unused-variable # Shallow copy to allow "casting" to the right type. Using a made-up # class allows piggy backing on regular attribute lookup, which is much @@ -467,11 +483,11 @@ class TypeClass(metaclass=TypeClassMeta): Make a proxy object for given type. The proxy is itself a type inheriting from the original type, along - with all the methods in ``dct``. ``__call__`` is overrident in the + with all the methods in ``dct``. ``__call__`` is overriden in the metaclass to make sure that invoking the type will yield instances of the original type. """ - class TypeProxyMeta(type): + class TypeProxyMeta(type(obj)): def __instancecheck__(cls, x): return isinstance(x, obj) @@ -698,14 +714,12 @@ class FromString(TypeClass): :type short: bool """ -class BuiltinFromStringInstance(FromString, types=(int, float, TypedList[float])): + +class _BuiltinFromStringInstance(FromString, types=(int, float)): """ Parse the following types from a string: * ``int`` * ``float`` - * ``str`` - - Plus all the :class:`lisa._generic.TypedList` subtypes of the above types. """ @classmethod def from_str(cls, string): @@ -719,7 +733,7 @@ class BuiltinFromStringInstance(FromString, types=(int, float, TypedList[float]) return cls.__name__ -class BoolFromStringInstance(FromString, types=bool): +class _BoolFromStringInstance(FromString, types=bool): """ Parse boolean from a string. """ @@ -744,7 +758,7 @@ class BoolFromStringInstance(FromString, types=bool): return 'bool' -class IntListFromStringInstance(FromString, types=TypedList[int]): +class _IntSeqFromStringInstance(FromString, types=(typing.List[int], typing.Sequence[int])): """ Instance of :class:`lisa._typeclass.FromString` for :class:`int` type. """ @@ -771,7 +785,7 @@ class IntListFromStringInstance(FromString, types=TypedList[int]): * ``1,2,10,55-99``: a comma separated list of the previous formats """).strip() -class StrFromStringInstance(FromString, types=str): +class _StrFromStringInstance(FromString, types=str): """ Instance of :class:`lisa._typeclass.FromString` for :class:`str` type. """ @@ -783,7 +797,7 @@ class StrFromStringInstance(FromString, types=str): def get_format_description(cls, short): return 'str' -class StrListFromStringInstance(FromString, types=TypedList[str]): +class _StrSeqFromStringInstance(FromString, types=(typing.List[str], typing.Sequence[str])): """ Instance of :class:`lisa._typeclass.FromString` for :class:`str` type. """ diff --git a/lisa/analysis/base.py b/lisa/analysis/base.py index 7d805b3051b957cdc1fcee37eadfbf26d3b723eb..93ac39a84f5ca007ede442bc3a1cbaac568a5bc0 100644 --- a/lisa/analysis/base.py +++ b/lisa/analysis/base.py @@ -28,6 +28,7 @@ import warnings import itertools import copy from operator import itemgetter, attrgetter +import typing import numpy import matplotlib @@ -46,7 +47,6 @@ import panel.widgets from lisa.utils import Loggable, deprecate, get_doc_url, get_short_doc, get_subclasses, guess_format, is_running_ipython, measure_time, memoized, update_wrapper_doc, _import_all_submodules, optional_kwargs from lisa.trace import _CacheDataDesc from lisa.notebook import _hv_fig_to_pane, _hv_link_dataframes, axis_cursor_delta, axis_link_dataframes, make_figure -from lisa._generic import TypedList # Ensure hv.extension() is called import lisa.notebook @@ -592,9 +592,9 @@ class AnalysisHelpers(Loggable, abc.ABC): rc_params=None, axis=None, interactive=None, - colors: TypedList[str]=None, - linestyles: TypedList[str]=None, - markers: TypedList[str]=None, + colors: typing.Sequence[str]=None, + linestyles: typing.Sequence[str]=None, + markers: typing.Sequence[str]=None, **kwargs ): diff --git a/lisa/analysis/idle.py b/lisa/analysis/idle.py index af419dafaab5fe0b46d43572d201f7a800dbc5c2..cb97054d7c02bd9b2c63b9340bc87937df2e8b2c 100644 --- a/lisa/analysis/idle.py +++ b/lisa/analysis/idle.py @@ -18,6 +18,7 @@ from functools import reduce import operator import warnings +import typing import pandas as pd import holoviews as hv @@ -25,7 +26,6 @@ import holoviews as hv from lisa.datautils import df_add_delta, df_refit_index, df_split_signals from lisa.analysis.base import TraceAnalysisBase from lisa.trace import requires_events, CPU -from lisa._generic import TypedList from lisa.analysis.base import TraceAnalysisBase @@ -263,7 +263,7 @@ class IdleAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_cluster_idle_state_residency.used_events - def plot_cluster_idle_state_residency(self, cluster: TypedList[CPU], pct: bool=False): + def plot_cluster_idle_state_residency(self, cluster: typing.Sequence[CPU], pct: bool=False): """ Plot the idle state residency of a cluster diff --git a/lisa/analysis/load_tracking.py b/lisa/analysis/load_tracking.py index e627dd8b4a3c179aa3d3c7c80d56cfea7de606ac..da30674caa745b8c8932d9935932f3f25db1f258 100644 --- a/lisa/analysis/load_tracking.py +++ b/lisa/analysis/load_tracking.py @@ -19,6 +19,7 @@ import operator import itertools +import typing import holoviews as hv import pandas as pd @@ -28,7 +29,6 @@ from lisa.analysis.status import StatusAnalysis from lisa.trace import requires_one_event_of, may_use_events, will_use_events_from, TaskID, CPU, MissingTraceEventError, OrTraceEventChecker from lisa.utils import deprecate from lisa.datautils import df_refit_index, series_refit_index, df_filter_task_ids, df_split_signals -from lisa._generic import TypedList from lisa.notebook import plot_signal, _hv_neutral @@ -133,7 +133,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): 'sched_util_est_cfs', 'sched_cpu_capacity', ) - def df_cpus_signal(self, signal, cpus: TypedList[CPU]=None): + def df_cpus_signal(self, signal, cpus: typing.Sequence[CPU]=None): """ Get the load-tracking signals for the CPUs @@ -322,7 +322,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @may_use_events(StatusAnalysis.plot_overutilized.used_events) @df_cpus_signal.used_events - def plot_cpus_signals(self, cpus: TypedList[CPU]=None, signals: TypedList[str]=['util', 'load']): + def plot_cpus_signals(self, cpus: typing.Sequence[CPU]=None, signals: typing.Sequence[str]=['util', 'load']): """ Plot the CPU-related load-tracking signals @@ -374,7 +374,7 @@ class LoadTrackingAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_task_signal.used_events - def plot_task_signals(self, task: TaskID, signals: TypedList[str]=['util', 'load']): + def plot_task_signals(self, task: TaskID, signals: typing.Sequence[str]=['util', 'load']): """ Plot the task-related load-tracking signals diff --git a/lisa/analysis/tasks.py b/lisa/analysis/tasks.py index bb314a14b45f3ac50718ad02f089baf1db03103f..f2c1b8ca092616fdf9a18e28ba891065ea0f3199 100644 --- a/lisa/analysis/tasks.py +++ b/lisa/analysis/tasks.py @@ -18,6 +18,7 @@ from enum import Enum import itertools import warnings +import typing import numpy as np import pandas as pd @@ -28,7 +29,6 @@ from lisa.analysis.base import TraceAnalysisBase from lisa.utils import memoized, kwargs_forwarded_to, deprecate from lisa.datautils import df_filter_task_ids, series_rolling_apply, series_refit_index, df_refit_index, df_deduplicate, df_split_signals, df_add_delta, df_window, df_update_duplicates, df_combine_duplicates from lisa.trace import requires_events, will_use_events_from, may_use_events, TaskID, CPU, MissingTraceEventError -from lisa._generic import TypedList from lisa.notebook import _hv_neutral, plot_signal, _hv_twinx @@ -800,8 +800,9 @@ class TasksAnalysis(TraceAnalysisBase): label=f"Task running in domain {domain}" ) else: - self._plot_markers( - series_refit_index(sw_df['__cpu'], window=self.trace.window) + return self._plot_markers( + series_refit_index(sw_df['__cpu'], window=self.trace.window), + label=str(task), ) return ( @@ -831,7 +832,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @df_tasks_total_residency.used_events - def plot_tasks_total_residency(self, tasks: TypedList[TaskID]=None, ascending: bool=False, + def plot_tasks_total_residency(self, tasks: typing.Sequence[TaskID]=None, ascending: bool=False, count: bool=None): """ Plot the stacked total time spent by each task on each CPU @@ -926,7 +927,7 @@ class TasksAnalysis(TraceAnalysisBase): return plot_signal(series, name=label) @TraceAnalysisBase.plot_method - def plot_tasks_wakeups(self, target_cpus: TypedList[CPU]=None, window: float=1e-2, per_sec: bool=False): + def plot_tasks_wakeups(self, target_cpus: typing.Sequence[CPU]=None, window: float=1e-2, per_sec: bool=False): """ Plot task wakeups over time @@ -975,7 +976,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @requires_events("sched_wakeup_new") - def plot_tasks_forks(self, target_cpus: TypedList[CPU]=None, window: float=1e-2, per_sec: bool=False): + def plot_tasks_forks(self, target_cpus: typing.Sequence[CPU]=None, window: float=1e-2, per_sec: bool=False): """ Plot task forks over time @@ -1319,7 +1320,7 @@ class TasksAnalysis(TraceAnalysisBase): @TraceAnalysisBase.plot_method @_plot_tasks_activation.used_events @kwargs_forwarded_to(_plot_tasks_activation, ignore=['tasks', 'best_effort']) - def plot_tasks_activation(self, tasks: TypedList[TaskID]=None, hide_tasks: TypedList[TaskID]=None, which_cpu: bool=True, overlay: bool=False, **kwargs): + def plot_tasks_activation(self, tasks: typing.Sequence[TaskID]=None, hide_tasks: typing.Sequence[TaskID]=None, which_cpu: bool=True, overlay: bool=False, **kwargs): """ Plot all tasks activations, in a style similar to kernelshark. diff --git a/lisa/conf.py b/lisa/conf.py index 685b7f933470df7b99abe2719183f63536525162..4d65808da1a8d795795161fe7fde89b76ca0a958 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -32,8 +32,10 @@ import io import functools import threading import weakref +import typing from ruamel.yaml.comments import CommentedMap +import typeguard import lisa from lisa.utils import ( @@ -41,6 +43,7 @@ from lisa.utils import ( is_running_sphinx, get_cls_name, HideExekallID, get_subclasses, groupby, import_all_submodules, ) +from lisa._generic import check_type class DeferredValue: @@ -69,8 +72,8 @@ class DeferredValue: key = key_desc.qualname if key_desc else '' raise KeyComputationRecursionError(f'Recursion error while computing deferred value for key: {key}', key) - self._is_computing = True try: + self._is_computing = True return self.callback(*self.args, **self.kwargs) finally: self._is_computing = False @@ -158,12 +161,15 @@ class KeyDescBase(abc.ABC): return self.parent.path + curr @abc.abstractmethod - def get_help(self, style=None): + def get_help(self, style=None, last=False): """ Get a help message describing the key. :param style: When "rst", ResStructuredText formatting may be applied :param style: str + + :param last: ``True`` if this is the last item in a list. + :type last: bool """ @abc.abstractmethod @@ -241,47 +247,18 @@ class KeyDesc(KeyDescBase): classinfo = self.classinfo key = self.qualname - def get_excep(key, val, classinfo, cls, msg): - # pylint: disable=unused-argument - classinfo = ' or '.join(get_cls_name(cls) for cls in classinfo) - msg = ': ' + msg if msg else '' - return TypeError(f'Key "{key}" is an instance of {get_cls_name(type(val))}, but should be instance of {classinfo}{msg}. Help: {self.help}', key) - def checkinstance(key, val, classinfo): - excep_list = [] - for cls in classinfo: - if cls is None: - if val is not None: - excep_list.append( - get_excep(key, val, classinfo, cls, 'Key is not None') - ) - # Some classes are able to raise a more detailed - # exception than just the boolean return value of - # __instancecheck__ - elif hasattr(cls, 'instancecheck'): - try: - cls.instancecheck(val) - except TypeError as e: - excep_list.append( - get_excep(key, val, classinfo, cls, str(e)) - ) - else: - if not isinstance(val, cls): - excep_list.append( - get_excep(key, val, classinfo, cls, None) - ) - - # If no type was validated, we raise an exception. This will - # only show the exception for the first class to be tried, - # which is the primary one. - if len(excep_list) == len(classinfo): - raise excep_list[0] + try: + check_type(val, classinfo) + except TypeError as e: + classinfo = ' or '.join(get_cls_name(cls) for cls in classinfo) + raise TypeError(f'Key "{key}" is an instance of {get_cls_name(type(val))}, but should be instance of {classinfo}: {e}. Help: {self.help}', key) # DeferredValue will be checked when they are computed if not isinstance(val, DeferredValue): checkinstance(key, val, classinfo) - def get_help(self, style=None): + def get_help(self, style=None, last=False): base_fmt = '{prefix}{key} ({classinfo}){prefixed_help}.' if style == 'rst': prefix = '* ' @@ -292,7 +269,7 @@ class KeyDesc(KeyDescBase): key = '' fmt = '{key}{help}\ntype: {classinfo}' else: - prefix = '|- ' + prefix = ('└' if last else '├') + ' ' key = self.name fmt = base_fmt @@ -402,17 +379,7 @@ class DerivedKeyDesc(KeyDesc): super().__init__(name=name, help=help, classinfo=classinfo, newtype=newtype) self._base_key_paths = base_key_paths self._compute = compute - self._compute_stack_tls = threading.local() - - def _get_compute_stack(self, conf): - try: - stack = self._compute_stack_tls.stack - except AttributeError: - stack = weakref.WeakKeyDictionary() - self._compute_stack_tls.stack = stack - - key = conf._as_hashable - return stack.setdefault(key, []) + self._is_computing_in = set() @property def help(self): @@ -520,27 +487,25 @@ class DerivedKeyDesc(KeyDesc): return True def compute_val(self, conf, eval_deferred=True): - stack = self._get_compute_stack(conf) - - if stack: + conf_id = id(conf) + if conf_id in self._is_computing_in: key = self.qualname raise KeyComputationRecursionError(f'Recursion error while computing derived key: {key}', key) else: - stack.append(self) - - try: - # If there is non evaluated base, transitively return a closure rather - # than computing now. - if not eval_deferred and self.get_non_evaluated_base_keys(conf): - val = DeferredValue(self.compute_val, conf=conf, eval_deferred=True) - else: - base_conf = self._get_base_conf(conf) - val = self._compute(base_conf) - self.validate_val(val) - finally: - stack.pop() + try: + self._is_computing_in.add(conf_id) + # If there is non evaluated base, transitively return a closure rather + # than computing now. + if not eval_deferred and self.get_non_evaluated_base_keys(conf): + val = DeferredValue(self.compute_val, conf=conf, eval_deferred=True) + else: + base_conf = self._get_base_conf(conf) + val = self._compute(base_conf) + self.validate_val(val) + finally: + self._is_computing_in.remove(conf_id) - return val + return val def get_src(self, conf): return ','.join( @@ -564,20 +529,59 @@ class LevelKeyDesc(KeyDescBase, Mapping): under that level :type children: collections.abc.Sequence + :param value_path: Relative path to a sub-key that will receive assignment + to that level for non-mapping types. This allows turning a leaf key into a + level while preserving backward compatibility, as long as: + + * The key did not accept mapping values, otherwise it would be + ambiguous and is therefore rejected. + + * The old leaf key has a matching new leaf key, that is a sub-key + of the new level key. + + In practice, that allows turning a single knob into a tree of settings. + :type value_path: list(str) or None + Children keys will get this key assigned as a parent when passed to the constructor. """ - def __init__(self, name, help, children): + def __init__(self, name, help, children, value_path=None): # pylint: disable=redefined-builtin super().__init__(name=name, help=help) - self.children = children + # Make it easier to share children with another configuration class by + # making them independent, so we will not accidentally override the + # parent link when it would not be appropriate. + self.children = list(map(copy.deepcopy, children)) # Fixup parent for easy nested declaration for key_desc in self.children: key_desc.parent = self + self.value_path = value_path + + @property + def key_desc(self): + path = self.value_path + if path is None: + raise AttributeError(f'{self} does not define a value path for direct assignment') + else: + return get_nested_key(self, path) + + def __getattr__(self, attr): + # If the property raised an exception, __getattr__ is tried so we need + # to fail explicitly in order to avoid infinite recursion + if attr == 'key_desc': + raise AttributeError('recursive key_desc lookup') + else: + try: + key_desc = self.key_desc + except Exception as e: + raise AttributeError(str(e)) + else: + return getattr(key_desc, attr) + @property def _key_map(self): return { @@ -628,9 +632,9 @@ class LevelKeyDesc(KeyDescBase, Mapping): for key, val in conf.items(): self[key].validate_val(val) - def get_help(self, style=None): + def get_help(self, style=None, last=False): idt = self.INDENTATION - prefix = '*' if style == 'rst' else '+-' + prefix = '*' if style == 'rst' else ('└' if last else '├') # Nasty hack: adding an empty ResStructuredText comment between levels # of nested list avoids getting extra blank line between list items. # That prevents ResStructuredText from thinking each item must be a @@ -644,9 +648,13 @@ class LevelKeyDesc(KeyDescBase, Mapping): help=' ' + self.help if self.help else '', ) nl = '\n' + idt + last = len(self.children) - 1 help_ += nl.join( - key_desc.get_help(style=style).replace('\n', nl) - for key_desc in self.children + key_desc.get_help( + style=style, + last=i == last, + ).replace('\n', nl) + for i, key_desc in enumerate(self.children) ) if style == 'rst': help_ += '\n\n..\n' @@ -654,6 +662,38 @@ class LevelKeyDesc(KeyDescBase, Mapping): return help_ +class DelegatedLevelKeyDesc(LevelKeyDesc): + """ + Level key descriptor that imports the keys from another + :class:`~lisa.conf.MultiSrcConfABC` subclass. + + :param conf: Configuration class to extract keys from. + :type conf: MultiSrcConfABC + + :Variable keyword arguments: Forwarded to :class:`lisa.conf.LevelKeyDesc`. + + This allows embedding a configuration inside another one, mostly to be able + to split a configuration class while preserving backward compatibility. + + .. note:: Only the children keys are taken from the passed level, other + information such as ``value_path`` are ignored and must be set + explicitly. + """ + + def __init__(self, name, help, conf, **kwargs): + # Make a deepcopy to ensure we will not change the parent attribute of + # an existing structure. + level = copy.deepcopy(conf.STRUCTURE) + + children = level.values() + super().__init__( + name=name, + help=help, + children=children, + **kwargs + ) + + class TopLevelKeyDescBase(LevelKeyDesc): """ Top-level key descriptor, which defines the top-level key to use in the @@ -681,11 +721,11 @@ class TopLevelKeyDescBase(LevelKeyDesc): def _check_name(cls, name): pass - def get_help(self, style=None): + def get_help(self, style=None, **kwargs): if style == 'yaml': return self.help else: - return super().get_help(style=style) + return super().get_help(style=style, **kwargs) class TopLevelKeyDesc(TopLevelKeyDescBase): @@ -945,11 +985,7 @@ class MultiSrcConfABC(Serializable, abc.ABC): # type given in KeyDesc.__init__(classinfo=...) class NewtypeMeta(type): def __instancecheck__(cls, x): - classinfo = tuple( - c if c is not None else type(None) - for c in key_desc.classinfo - ) - return isinstance(x, classinfo) + return check_type(x, key_desc.classinfo) return NewtypeMeta @@ -1050,6 +1086,14 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): .. note:: Since the dosctring is interpreted as a template, "{" and "}" characters must be doubled to appear in the final output. + + .. attention:: The layout of the configuration is typically guaranteed to + be backward-compatible in terms of accepted shape of input, but layout + of the configuration might change. This means that the path to a given + key could change as long as old input is still accepted. Types of + values can also be widened, so third party code re-using config classes + from :mod:`lisa` might have to evolve along the changes of + configuration. """ @abc.abstractmethod @@ -1194,7 +1238,7 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): conf.force_src_nested(src_override) return conf - def add_src(self, src, conf, filter_none=False, fallback=False): + def add_src(self, src, conf, filter_none=False, fallback=False, inplace=True): """ Add a source of configuration. @@ -1216,6 +1260,10 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): priority override is setup. :type fallback: bool + :param inplace: If ``True``, the object is modified. If ``False``, a + mutated copy is returned and the original object is left unmodified. + :type inplace: bool + This method provides a way to update the configuration, by importing a mapping as a new source. """ @@ -1242,24 +1290,26 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): return self def format_conf(conf): - conf = conf or {} - # Make sure that mappings won't be too long - max_mapping_len = 10 - key_val = sorted(conf.items()) - if len(key_val) > max_mapping_len: - key_val = key_val[:max_mapping_len] - key_val.append((PlaceHolder(), PlaceHolder())) - - def format_val(val): - if isinstance(val, Mapping): - return format_conf(val) - else: - return NonEscapedValue(val) + if isinstance(conf, Mapping): + # Make sure that mappings won't be too long + max_mapping_len = 10 + key_val = sorted(conf.items()) + if len(key_val) > max_mapping_len: + key_val = key_val[:max_mapping_len] + key_val.append((PlaceHolder(), PlaceHolder())) + + def format_val(val): + if isinstance(val, Mapping): + return format_conf(val) + else: + return NonEscapedValue(val) - return { - key: format_val(val) - for key, val in key_val - } + return { + key: format_val(val) + for key, val in key_val + } + else: + return conf logger = self.logger if logger.isEnabledFor(logging.DEBUG): @@ -1275,43 +1325,58 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): filename=filename if filename else '', lineno=lineno if lineno else '', )) + + self = self if inplace else copy.copy(self) return self._add_src( src, conf, filter_none=filter_none, fallback=fallback ) def _add_src(self, src, conf, filter_none=False, fallback=False): - conf = conf or {} - # Filter-out None values, so they won't override actual data from - # another source - if filter_none: - conf = { + conf = {} if conf is None else conf + + if isinstance(conf, Mapping): + # Filter-out None values, so they won't override actual data from + # another source + if filter_none: + conf = { + k: v for k, v in conf.items() + if v is not None + } + + # only validate at that level, since sublevel will take care of + # filtering then validating their own level + validated_conf = { k: v for k, v in conf.items() - if v is not None + if not isinstance(self._structure[k], LevelKeyDesc) } - - # only validate at that level, since sublevel will take care of - # filtering then validating their own level - validated_conf = { - k: v for k, v in conf.items() - if not isinstance(self._structure[k], LevelKeyDesc) - } - self._structure.validate_val(validated_conf) - - for key, val in conf.items(): - key_desc = self._structure[key] - # Dispatch the nested mapping to the right sublevel - if isinstance(key_desc, LevelKeyDesc): - # sublevels have already been initialized when the root object - # was created. - self._sublevel_map[key]._add_src(src, val, filter_none=filter_none, fallback=fallback) - # Derived keys cannot be set, since they are purely derived from - # other keys - elif isinstance(key_desc, DerivedKeyDesc): - raise ValueError(f'Cannot set a value for a derived key "{key_desc.qualname}"', key_desc.qualname) - # Otherwise that is a leaf value that we store at that level + self._structure.validate_val(validated_conf) + + for key, val in conf.items(): + key_desc = self._structure[key] + # Dispatch the nested mapping to the right sublevel + if isinstance(key_desc, LevelKeyDesc): + # sublevels have already been initialized when the root object + # was created. + self._sublevel_map[key]._add_src(src, val, filter_none=filter_none, fallback=fallback) + # Derived keys cannot be set, since they are purely derived from + # other keys + elif isinstance(key_desc, DerivedKeyDesc): + raise ValueError(f'Cannot set a value for a derived key "{key_desc.qualname}"', key_desc.qualname) + # Otherwise that is a leaf value that we store at that level + else: + self._key_map.setdefault(key, {})[src] = val + else: + # Non-mapping value are allowed if the level defines a subkey + # to assign to. We then craft a conf that sets that specific + # value. + key_desc = self._structure + value_path = key_desc.value_path + if value_path is None: + raise ValueError(f'Cannot set a value for the key level "{key_desc.qualname}"', key_desc.qualname) else: - self._key_map.setdefault(key, {})[src] = val + conf = set_nested_key({}, list(value_path), conf) + self._add_src(src, conf, filter_none=filter_none, fallback=fallback) if src not in self._src_prio: if fallback: @@ -1319,6 +1384,8 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): else: self._src_prio.insert(0, src) + return self + def set_default_src(self, src_prio): """ Set the default source priority list. @@ -1691,10 +1758,14 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): yield key, val - for k, v in itertools.chain( + items = list(itertools.chain( self.items(eval_deferred=eval_deferred), derived_items() - ): + )) + _last = len(items) - 1 + + for i, (k, v) in enumerate(items): + last = i == _last v_cls = type(v) key_desc = self._structure[k] @@ -1713,12 +1784,8 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): else: v = ' ' + v - if is_sublevel: - k_str = '+- ' + k - v_prefix = ' ' - else: - k_str = '|- ' + k - v_prefix = '| ' + k_str = ('└' if last else '├') + ' ' + k + v_prefix = ' ' if is_sublevel else '| ' v = v.replace('\n', '\n' + v_prefix) @@ -2002,7 +2069,11 @@ class Configurable(abc.ABC): ':param {param}: {help}\n:type {param}: {type}\n'.format( param=param, help=key_desc.help, - type=' or '.join(get_cls_name(t) for t in key_desc.classinfo), + type=( + 'collections.abc.Mapping' + if isinstance(key_desc, LevelKeyDesc) else + ' or '.join(get_cls_name(t) for t in key_desc.classinfo) + ), ) for param, key_desc in cls._get_param_key_desc_map().items() diff --git a/lisa/energy_meter.py b/lisa/energy_meter.py index e72898d509bcb8cb47a28ef2a654c0d95c8666ec..3807a6a2a0c9479ef38dd0bcd4a582025f58b6e9 100644 --- a/lisa/energy_meter.py +++ b/lisa/energy_meter.py @@ -27,6 +27,7 @@ from collections.abc import Mapping from subprocess import Popen, PIPE, STDOUT import subprocess from time import sleep +import typing import numpy as np import pandas as pd @@ -39,7 +40,6 @@ from lisa.datautils import series_integrate from lisa.conf import ( SimpleMultiSrcConf, KeyDesc, TopLevelKeyDesc, Configurable, ) -from lisa._generic import TypedList # Default energy measurements for each board EnergyReport = namedtuple('EnergyReport', @@ -320,9 +320,9 @@ class AEPConf(SimpleMultiSrcConf, HideExekallID): """ STRUCTURE = TopLevelKeyDesc('aep-conf', 'AEP Energy Meter configuration', ( KeyDesc('channel-map', 'Channels to use', [Mapping]), - KeyDesc('resistor-values', 'Resistor values', [TypedList[float]]), - KeyDesc('labels', 'List of labels', [TypedList[str]]), - KeyDesc('device-entry', 'TTY device', [TypedList[str]]), + KeyDesc('resistor-values', 'Resistor values', [typing.Sequence[float]]), + KeyDesc('labels', 'List of labels', [typing.Sequence[str]]), + KeyDesc('device-entry', 'TTY device', [typing.Sequence[str]]), )) diff --git a/lisa/platforms/platinfo.py b/lisa/platforms/platinfo.py index c9c137f94b57d853dfba98cc56ab76a4f2957394..e6018dfe44f8c7e4d2cb44fc412d3861982b1cbc 100644 --- a/lisa/platforms/platinfo.py +++ b/lisa/platforms/platinfo.py @@ -19,13 +19,14 @@ import re import functools import contextlib from collections.abc import Mapping +import typing from lisa.utils import HideExekallID, group_by_value, memoized from lisa.conf import ( DeferredValue, DeferredExcep, MultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, DerivedKeyDesc, ConfigKeyError, ) -from lisa._generic import TypedDict, TypedList, SortedTypedList +from lisa._generic import SortedSequence from lisa.energy_model import EnergyModel from lisa.wlgen.rta import RTA @@ -70,9 +71,13 @@ class KernelSymbolsAddress(KeyDesc): return '' -CPUIdList = SortedTypedList[int] -FreqList = SortedTypedList[int] -CPUCapacities = TypedDict[int,int] +CPUIdSequence = SortedSequence[int] +FreqSequence = SortedSequence[int] +CPUCapacities = typing.Dict[int,int] + +# For backward compat only +CPUIdList = CPUIdSequence +FreqList = FreqSequence class PlatformInfo(MultiSrcConf, HideExekallID): @@ -89,13 +94,13 @@ class PlatformInfo(MultiSrcConf, HideExekallID): # we need. STRUCTURE = TopLevelKeyDesc('platform-info', 'Platform-specific information', ( LevelKeyDesc('rtapp', 'RTapp configuration', ( - KeyDesc('calib', 'RTapp calibration dictionary', [TypedDict[int,int]]), + KeyDesc('calib', 'RTapp calibration dictionary', [typing.Dict[int,int]]), )), LevelKeyDesc('kernel', 'Kernel-related information', ( KeyDesc('version', '', [KernelVersion]), KernelConfigKeyDesc('config', '', [TypedKernelConfig]), - KernelSymbolsAddress('symbols-address', 'Dictionary of addresses to symbol names extracted from /proc/kallsyms', [TypedDict[int,str]], deepcopy_val=False), + KernelSymbolsAddress('symbols-address', 'Dictionary of addresses to symbol names extracted from /proc/kallsyms', [typing.Dict[int,str]], deepcopy_val=False), )), KeyDesc('nrg-model', 'Energy model object', [EnergyModel]), LevelKeyDesc('cpu-capacities', 'Dictionaries of CPU ID to capacity value', ( @@ -117,12 +122,12 @@ class PlatformInfo(MultiSrcConf, HideExekallID): KeyDesc('freq-domains', 'Frequency domains modeled by a list of CPU IDs for each domain', - [TypedList[CPUIdList]]), - KeyDesc('freqs', 'Dictionnary of CPU ID to list of frequencies', [TypedDict[int, FreqList]]), + [typing.Sequence[CPUIdSequence]]), + KeyDesc('freqs', 'Dictionnary of CPU ID to list of frequencies', [typing.Dict[int, FreqSequence]]), DerivedKeyDesc('capacity-classes', 'Capacity classes modeled by a list of CPU IDs for each capacity, sorted by capacity', - [TypedList[CPUIdList]], + [typing.Sequence[CPUIdSequence]], [['cpu-capacities', 'orig']], compute_capa_classes), )) """Some keys have a reserved meaning with an associated type.""" diff --git a/lisa/target.py b/lisa/target.py index 4be9daacd3298d1be4e0fd1cd7c0f3d0a4446461..d4f3c30c3fc7a14355d5047fd6ecd8e7061ed4f5 100644 --- a/lisa/target.py +++ b/lisa/target.py @@ -34,6 +34,8 @@ import hashlib import shutil from types import ModuleType, FunctionType from operator import itemgetter +import warnings +import typing import devlib from devlib.exception import TargetStableError @@ -42,9 +44,8 @@ from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized, destroyablecontextmanager, ContextManagerExit from lisa._assets import ASSETS_PATH -from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable -from lisa._generic import TypedList, TypedDict -from lisa._kmod import KernelTree, DynamicKmod +from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable, DelegatedLevelKeyDesc +from lisa._kmod import _KernelBuildEnv, DynamicKmod, _KernelBuildEnvConf from lisa.platforms.platinfo import PlatformInfo @@ -145,7 +146,7 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board name, free-form value only used to embelish logs', [str]), - KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [str]), + KeyDesc('kind', 'Target kind. Can be "linux" (ssh) or "android" (adb)', [typing.Literal['linux', 'android', 'host']]), KeyDesc('host', 'Hostname or IP address of the host', [str, None]), KeyDesc('username', 'SSH username. On ADB connections, "root" username will root adb upon target connection', [str, None]), @@ -155,15 +156,14 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): KeyDesc('keyfile', 'SSH private key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target workdir', [str]), - KeyDesc('tools', 'List of tools to install on the target', [TypedList[str]]), + KeyDesc('tools', 'List of tools to install on the target', [typing.Sequence[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed up the connection', [bool]), LevelKeyDesc('kernel', 'kernel information', ( KeyDesc('src', 'Path to kernel source tree matching the kernel running on the target used to build modules', [str, None]), - LevelKeyDesc('modules', 'kernel modules', ( - KeyDesc('build-env', 'Environment used to build modules. Can be any of "alpine" (Alpine Linux chroot, recommended) or "host" (host system)', [str]), - KeyDesc('make-variables', 'Extra variables to pass to "make" command, such as "CC"', [TypedDict[str, object]]), - KeyDesc('overlay-backend', 'Backend to use for overlaying folders while building modules. Can be "overlayfs" (overlayfs filesystem, recommended) or "copy (plain folder copy)', [str]), - )), + DelegatedLevelKeyDesc( + 'modules', 'Kernel module build environment', + _KernelBuildEnvConf, + ), )), LevelKeyDesc('wait-boot', 'Wait for the target to finish booting', ( KeyDesc('enable', 'Enable the boot check', [bool]), @@ -177,8 +177,8 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): KeyDesc('class', 'Name of the class to use', [str]), KeyDesc('args', 'Keyword arguments to build the Platform object', [Mapping]), )), - KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [TypedList[str]]), - KeyDesc('file-xfer', 'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)', [TypedList[str]]), + KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [typing.Sequence[str]]), + KeyDesc('file-xfer', 'File transfer method. Can be "sftp" (default) or "scp". (Only valid for linux targets)', [typing.Sequence[str]]), KeyDesc('max-async', 'Maximum number of asynchronous commands in flight at any time', [int, None]), )) @@ -260,9 +260,7 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): 'wait_boot_timeout': ['wait-boot', 'timeout'], 'kernel_src': ['kernel', 'src'], - 'kmod_build_env': ['kernel', 'modules', 'build-env'], - 'kmod_make_vars': ['kernel', 'modules', 'make-variables'], - 'kmod_overlay_backend': ['kernel', 'modules', 'overlay-backend'], + 'kmod_build_env': ['kernel', 'modules'], } def __init__(self, kind, name='', tools=[], res_dir=None, @@ -278,17 +276,8 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): # pylint: disable=dangerous-default-value super().__init__() logger = self.logger - self.name = name - self._kmod_tree = None - self._kmod_tree_spec = dict( - tree_path=kernel_src, - build_env=kmod_build_env, - make_vars=kmod_make_vars, - overlay_backend=kmod_overlay_backend, - ) - res_dir = res_dir if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', @@ -351,6 +340,38 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): cache_dir.mkdir(parents=True) self._cache_dir = cache_dir + self._kmod_build_env = None + + def _make_kernel_build_env_spec(kmod_build_env, abi): + kmod_build_env, *_ = _KernelBuildEnv._resolve_conf( + kmod_build_env, + abi=abi, + ) + deprecated = { + 'overlay-backend': kmod_overlay_backend, + 'make-variables': kmod_make_vars, + } + + cls_name = self.__class__.__qualname__ + if any(v is not None for v in deprecated.values()): + warnings.warn(f'{cls_name} kmod_overlay_backend and kmod_make_vars parameters are deprecated, please pass the information inside build_env instead using keys: {", ".join(deprecated.keys())}', DeprecationWarning) + + kmod_build_env.add_src( + src='deprecated-{cls_name}-params', + conf=deprecated, + filter_none=True + ) + + return dict( + tree_path=kernel_src, + build_conf=kmod_build_env, + ) + + self._kmod_build_env_spec = _make_kernel_build_env_spec( + kmod_build_env, + abi=self.plat_info['abi'], + ) + def _init_plat_info(self, plat_info=None, name=None, **kwargs): if plat_info is None: @@ -378,18 +399,18 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): k: v for k, v in self.__dict__.items() if k not in { - # this KernelTree contains a reference to ourselves, and will + # this _KernelBuildEnv contains a reference to ourselves, and will # fail to unpickle because of the circular dependency and the # fact that it will call its constructor again, trying to make # use of the Target before it is ready. - '_kmod_tree', + '_kmod_build_env', } } def __setstate__(self, dct): self.__dict__.update(dct) self._init_plat_info(deferred=True) - self._kmod_tree = None + self._kmod_build_env = None def get_kmod(self, mod_cls=DynamicKmod, **kwargs): """ @@ -404,30 +425,30 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): method of ``mod_cls``. """ try: - tree = kwargs['kernel_tree'] + build_env = kwargs['kernel_build_env'] except KeyError: - memoize_tree = True - if self._kmod_tree: - tree = self._kmod_tree + memoize_build_env = True + if self._kmod_build_env: + build_env = self._kmod_build_env else: - tree = KernelTree.from_target( + build_env = _KernelBuildEnv.from_target( target=self, - **self._kmod_tree_spec + **self._kmod_build_env_spec ) else: - memoize_tree = False + memoize_build_env = False - kwargs['kernel_tree'] = tree + kwargs['kernel_build_env'] = build_env mod = mod_cls.from_target( target=self, **kwargs, ) - if memoize_tree: + if memoize_build_env: # Memoize the KernelTree, as it is a reusable object. Memoizing # allows remembering its checksum across calls, which will allow # hitting the .ko cache without having to setup a kernel tree. - self._kmod_tree = mod.kernel_tree + self._kmod_build_env = mod.kernel_build_env return mod def cached_pull(self, src, dst, **kwargs): diff --git a/lisa/tests/base.py b/lisa/tests/base.py index 5fe208eab21ef72e6693d58aa866b2492984d2ac..9a1870954732eb70a6d596603f1ce92a16c52c08 100644 --- a/lisa/tests/base.py +++ b/lisa/tests/base.py @@ -31,6 +31,7 @@ import itertools import types import warnings from operator import attrgetter +import typing from datetime import datetime from collections import OrderedDict, ChainMap @@ -61,7 +62,6 @@ from lisa.trace import FtraceCollector, FtraceConf, DmesgCollector, ComposedColl from lisa.conf import ( SimpleMultiSrcConf, KeyDesc, TopLevelKeyDesc, ) -from lisa._generic import TypedList from lisa.pelt import pelt_settling_time @@ -1426,7 +1426,7 @@ class DmesgTestConf(TestConfBase): {yaml_example} """ STRUCTURE = TopLevelKeyDesc('dmesg', 'Dmesg test configuration', ( - KeyDesc('ignored-patterns', 'List of Python regex matching dmesg entries *content* to be ignored (see :class:`devlib.collector.dmesg.KernelLogEntry` for how the message is split)', [TypedList[str]]), + KeyDesc('ignored-patterns', 'List of Python regex matching dmesg entries *content* to be ignored (see :class:`devlib.collector.dmesg.KernelLogEntry` for how the message is split)', [typing.Sequence[str]]), )) diff --git a/lisa/trace.py b/lisa/trace.py index 63e04e8f2a859c69271250d9c0dd5f5f5f547ff0..d5bc53c64c10923da5cfbb67ce81aaad98aae74e 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -44,7 +44,7 @@ import subprocess import itertools import functools import fnmatch -from typing import Union +import typing from difflib import get_close_matches import numpy as np @@ -57,10 +57,9 @@ import devlib from lisa.utils import Loggable, HideExekallID, memoized, lru_memoized, deduplicate, take, deprecate, nullcontext, measure_time, checksum, newtype, groupby, PartialInit, kwargs_forwarded_to, kwargs_dispatcher, ComposedContextManager, get_nested_key, bothmethod from lisa.conf import SimpleMultiSrcConf, LevelKeyDesc, KeyDesc, TopLevelKeyDesc, Configurable -from lisa._generic import TypedList from lisa.datautils import SignalDesc, df_add_delta, df_deduplicate, df_window, df_window_signals, series_convert from lisa.version import VERSION_TOKEN -from lisa._typeclass import FromString, IntListFromStringInstance +from lisa._typeclass import FromString from lisa._kmod import LISAFtraceDynamicKmod from lisa._assets import get_bin @@ -100,7 +99,7 @@ class TaskID(namedtuple('TaskID', ('pid', 'comm'))): _STR_PARSE_REGEX = re.compile(r'\[?([0-9]+):([a-zA-Z0-9_-]+)\]?') -class TaskIDFromStringInstance(FromString, types=TaskID): +class _TaskIDFromStringInstance(FromString, types=TaskID): """ Instance of :class:`lisa._typeclass.FromString` for :class:`TaskID` type. """ @@ -134,7 +133,7 @@ class TaskIDFromStringInstance(FromString, types=TaskID): """).strip() -class TaskIDListFromStringInstance(FromString, types=TypedList[TaskID]): +class _TaskIDSeqFromStringInstance(FromString, types=(typing.List[TaskID], typing.Sequence[TaskID])): """ Instance of :class:`lisa._typeclass.FromString` for lists :class:`TaskID` type. """ @@ -157,13 +156,12 @@ class TaskIDListFromStringInstance(FromString, types=TypedList[TaskID]): CPU = newtype(int, 'CPU', doc='Alias to ``int`` used for CPU IDs') -class CPUListFromStringInstance(FromString, types=TypedList[CPU]): - # Use the same implementation as for TypedList[int] - from_str = IntListFromStringInstance.from_str +class _CPUSeqFromStringInstance(FromString, types=(typing.List[CPU], typing.Sequence[CPU])): + from_str = FromString(typing.Sequence[int]).from_str @classmethod def get_format_description(cls, short): - return FromString(TypedList[int]).get_format_description(short=short) + return FromString(typing.Sequence[int]).get_format_description(short=short) class MissingMetadataError(KeyError): @@ -765,9 +763,7 @@ class TxtTraceParserBase(TraceParserBase): will be used as is. """ - # Since re.Match is only importable directly since Python >= 3.7, use a - # dummy match to get the type - _RE_MATCH_CLS = re.match('x', 'x').__class__ + _RE_MATCH_CLS = re.Match def __init__(self, lines, @@ -5627,9 +5623,9 @@ class FtraceConf(SimpleMultiSrcConf, HideExekallID): {yaml_example} """ STRUCTURE = TopLevelKeyDesc('ftrace-conf', 'FTrace configuration', ( - KeyDesc('events', 'FTrace events to trace', [TypedList[str], TraceEventCheckerBase]), - KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [TypedList[Union[str, None], None]]), - KeyDesc('functions', 'FTrace functions to trace', [TypedList[str]]), + KeyDesc('events', 'FTrace events to trace', [typing.Sequence[str], TraceEventCheckerBase]), + KeyDesc('events-namespaces', 'FTrace events namespaces to use. See Trace namespace constructor parameter.', [typing.Sequence[typing.Union[str, None]], None]), + KeyDesc('functions', 'FTrace functions to trace', [typing.Sequence[str]]), KeyDesc('buffer-size', 'FTrace buffer size', [int]), KeyDesc('trace-clock', 'Clock used while tracing (see "trace_clock" in ftrace.txt kernel doc)', [str, None]), KeyDesc('saved-cmdlines-nr', 'Number of saved cmdlines with associated PID while tracing', [int]), diff --git a/lisa/utils.py b/lisa/utils.py index 8032cc3c958cef3efc078634aa99a059faf5fe06..585b5eba65b1579af017a6db1755fcc132e246b0 100644 --- a/lisa/utils.py +++ b/lisa/utils.py @@ -490,7 +490,7 @@ def get_cls_name(cls, style=None, fully_qualified=True): """ Get a prettily-formated name for the class given as parameter - :param cls: class to get the name from + :param cls: Class or typing hint to get the name from. :type cls: type :param style: When "rst", a RestructuredText snippet is returned @@ -499,17 +499,25 @@ def get_cls_name(cls, style=None, fully_qualified=True): """ if cls is None: return 'None' - - if fully_qualified or style == 'rst': - mod_name = inspect.getmodule(cls).__name__ - mod_name = mod_name + '.' if mod_name not in ('builtins', '__main__') else '' else: - mod_name = '' + try: + qualname = cls.__qualname__ + # type annotations like typing.Union[str, int] do not have a __qualname__ + except AttributeError: + name = str(cls) + else: + if fully_qualified or style == 'rst': + mod_name = inspect.getmodule(cls).__name__ + mod_name = mod_name + '.' if mod_name not in ('builtins', '__main__') else '' + else: + mod_name = '' - name = mod_name + cls.__qualname__ - if style == 'rst': - name = f':class:`~{name}`' - return name + name = mod_name + cls.__qualname__ + + if style == 'rst': + name = f':class:`~{name}`' + + return name def get_common_ancestor(classes): @@ -1649,6 +1657,7 @@ def set_nested_key(mapping, key_path, val, level=None): :type level: collections.abc.Callable """ assert key_path + input_mapping = mapping if level is None: # This should work for dict and most basic structures @@ -1663,6 +1672,7 @@ def set_nested_key(mapping, key_path, val, level=None): mapping = new_level mapping[key_path[-1]] = val + return input_mapping def loopify(items): @@ -3679,10 +3689,33 @@ class DirCache(Loggable): """ Return the token associated with the given ``key``. """ + def normalize(x): + def with_typ(key): + return ( + x.__class__.__module__, + x.__class__.__qualname__, + key, + ) + + if isinstance(x, str): + return x + elif isinstance(x, Mapping): + return with_typ(sorted( + (normalize(k), normalize(v)) + for k, v in x.items() + )) + elif isinstance(x, Iterable): + return with_typ(tuple(map(normalize, x))) + else: + return with_typ(repr(x)) + + key = normalize(key) + key = repr(key).encode('utf-8') + h = hashlib.sha256() - for x in key: - h.update(repr(x).encode('utf-8')) + h.update(key) token = h.hexdigest() + return token def _get_path(self, key): @@ -3713,8 +3746,20 @@ class DirCache(Loggable): :param key: Key of the cache entry. All the components of the key must be isomorphic to their ``repr()``, otherwise the cache will be hit - in cases where it should not. - :type key: tuple(str) + in cases where it should not. For convenience, some types are + normalized: + + * :class:`~collections.abc.Mapping` is only considered for its keys + and values and type name. Keys are sorted are sorted. If the + passed object contains other relevant metadata, it should be + rendered to a string first by the caller. + + * :class:`~collections.abc.Iterable` keys are normalized and the + object is only considered as an iterable. If other relevant + metadata is contained in the object, it should be rendered to a + string by the caller. + + :type key: object .. note:: The return folder must never be modified, as it would lead to races. diff --git a/setup.py b/setup.py index 80f940a5c038acf8a7a4ccef09a856d15e7e37ba..f747205ab4fb2aa492be884d14b0443baa834bec 100755 --- a/setup.py +++ b/setup.py @@ -96,7 +96,7 @@ extras_require['all'] = sorted(set( itertools.chain.from_iterable(extras_require.values()) )) -python_requires = '>= 3.7' +python_requires = '>= 3.8' if __name__ == "__main__": @@ -156,6 +156,8 @@ if __name__ == "__main__": "pyelftools", # To get symbol names in kernel module "cffi", # unshare syscall + + "typeguard", ], extras_require=extras_require, diff --git a/tests/test_conf.py b/tests/test_conf.py index 56d6110b982bf51f33a89a542f0d5f711e4b3950..49f820b60d8f305f8cc1b8091c743512ed6d9925 100644 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -19,11 +19,11 @@ import os import copy from unittest import TestCase +import typing import pytest from lisa.conf import MultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, DerivedKeyDesc, DeferredValue -from lisa._generic import TypedList from .utils import StorageTestCase, HOST_PLAT_INFO, HOST_TARGET_CONF """ A test suite for the MultiSrcConf subclasses.""" @@ -86,8 +86,8 @@ def compute_derived(base_conf): INTERNAL_STRUCTURE = ( KeyDesc('foo', 'foo help', [int]), - KeyDesc('bar', 'bar help', [TypedList[int]]), - KeyDesc('multitypes', 'multitypes help', [TypedList[int], str, None]), + KeyDesc('bar', 'bar help', [typing.Sequence[int]]), + KeyDesc('multitypes', 'multitypes help', [typing.Sequence[int], str, None]), LevelKeyDesc('sublevel', 'sublevel help', ( KeyDesc('subkey', 'subkey help', [int]), )),