From f91c1541f8c423c128e816c0caf1f405d6510d58 Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Mon, 26 Nov 2018 16:21:22 +0000 Subject: [PATCH 1/9] env: Fix reporting TestEnv was wrongly reporting ftrace events/functions as being enabled, although it was not using data that was modified by devlib in anyway. --- lisa/env.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/lisa/env.py b/lisa/env.py index 32be5571a..ceb176adb 100644 --- a/lisa/env.py +++ b/lisa/env.py @@ -661,15 +661,6 @@ class TestEnv(Loggable, HideExekallID): autoview = False ) - if events: - logger.info('Enabled tracepoints:') - for event in events: - logger.info(' %s', event) - if functions: - logger.info('Kernel functions profiled:') - for function in functions: - logger.info(' %s', function) - return ftrace @contextlib.contextmanager -- GitLab From ea0f8d88c0ae18cfc63c30b13a166a6334286008 Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Tue, 27 Nov 2018 12:09:20 +0000 Subject: [PATCH 2/9] shell: init_env: more robust shell detection Use BASH_VERSION and ZSH_VERSION to make shell detection easier and more robust. --- init_env | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/init_env b/init_env index 5e59e95c2..df96b0dcb 100644 --- a/init_env +++ b/init_env @@ -19,7 +19,7 @@ # LISA Shell: Initialization # Running under BASH -if grep "bash" /proc/$$/cmdline &>/dev/null; then +if [[ -n $BASH_VERSION ]]; then # Get base installation path of LISA export LISA_HOME=$(readlink -f "$(dirname "$BASH_SOURCE")") @@ -30,7 +30,7 @@ if grep "bash" /proc/$$/cmdline &>/dev/null; then hash -r # Running under ZSH -elif grep "zsh" /proc/$$/cmdline &>/dev/null; then +elif [[ -n $ZSH_VERSION ]]; then # Get base installation path of LISA export LISA_HOME=${${${(%):-%N}:h}:A} -- GitLab From c8ac65f7c59e458b350ea2a1ef17e84bef2157f0 Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Tue, 27 Nov 2018 12:11:56 +0000 Subject: [PATCH 3/9] shell: init_env: robustify error management Make sure we will return an error by default in init_env if something bad happens. --- init_env | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/init_env b/init_env index df96b0dcb..107679b2d 100644 --- a/init_env +++ b/init_env @@ -18,12 +18,14 @@ # LISA Shell: Initialization +_lisa_shell_ret=1 + # Running under BASH if [[ -n $BASH_VERSION ]]; then # Get base installation path of LISA export LISA_HOME=$(readlink -f "$(dirname "$BASH_SOURCE")") - source "$(dirname "$BASH_SOURCE")/shell/lisa_shell"; RET=$? + source "$(dirname "$BASH_SOURCE")/shell/lisa_shell"; _lisa_shell_ret=$? PS1="\[${LISASHELL_BLUE}\][LISAShell \[${LISASHELL_LCYAN}\]\W\[${LISASHELL_BLUE}\]] \> \[${LISASHELL_RESET}\]" # Make sure that bash picks up new location for all binaries @@ -37,7 +39,7 @@ elif [[ -n $ZSH_VERSION ]]; then # Source the script in "sh" emulation mode. This will also mark the # functions declared there to be executed in emulated mode, so they will # work as well - emulate sh -c "source "$LISA_HOME/shell/lisa_shell""; RET=$? + emulate sh -c "source "$LISA_HOME/shell/lisa_shell""; _lisa_shell_ret=$? # Make sure that zsh picks up new location for all binaries hash -r @@ -46,7 +48,7 @@ else # Check if a bash shell is available if which bash &>/dev/null; then # Switch to a BASH shell - exec bash --init-file ./init_env; RET=$? + exec bash --init-file ./init_env; _lisa_shell_ret=$? else echo "ERROR: A BASH shell is not available in PATH" fi @@ -55,6 +57,6 @@ else echo "Please, source this configuration from a terminal running BASH." fi -return $RET +return $_lisa_shell_ret # vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab -- GitLab From 8ceeed2992a7522e8a443262817dbd18cea2088a Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Tue, 27 Nov 2018 15:00:40 +0000 Subject: [PATCH 4/9] gitignore: Add some Python artifacts --- .gitignore | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 4276b4cd3..2cebb1c54 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ *~ .* *.pyc +__pycache__ +*.egg-info /results* *.log /tools/android-sdk-linux @@ -9,5 +11,4 @@ server.url /vagrant /tools/wa_venv /tools/wa_user_directory/dependencies -src/buildroot -*.egg-info +/src/buildroot -- GitLab From 352f9fbce1cc434eacaac39ed24d671766f3ada0 Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Tue, 27 Nov 2018 16:06:43 +0000 Subject: [PATCH 5/9] energy_meter: Cleanup Also add utils.Subclassable to allow discovering indirect subclasses of a mother class. --- lisa/analysis/base.py | 13 ---- lisa/analysis/proxy.py | 4 +- lisa/energy_meter.py | 136 +++++++++++++---------------------------- lisa/env.py | 21 +++++++ lisa/utils.py | 12 ++++ 5 files changed, 77 insertions(+), 109 deletions(-) diff --git a/lisa/analysis/base.py b/lisa/analysis/base.py index e8b491e02..fe222ff31 100644 --- a/lisa/analysis/base.py +++ b/lisa/analysis/base.py @@ -60,19 +60,6 @@ class AnalysisBase: self._big_cpus = plat_info['clusters']['big'] self._little_cpus = plat_info['clusters']['little'] - @classmethod - def get_subclasses(cls, cls_set=None): - """Get all indirect sublcasses of AnalysisBase.""" - if cls_set is None: - cls_set = set() - - for subcls in cls.__subclasses__(): - if subcls not in cls_set: - cls_set.add(subcls) - cls_set.update(subcls.get_subclasses(cls_set)) - - return cls_set - def _plot_setup(self, width=16, height=4, ncols=1, nrows=1): figure, axes = plt.subplots( ncols=ncols, nrows=nrows, figsize=(width, height * nrows) diff --git a/lisa/analysis/proxy.py b/lisa/analysis/proxy.py index 5952af510..65537ee4f 100644 --- a/lisa/analysis/proxy.py +++ b/lisa/analysis/proxy.py @@ -25,7 +25,7 @@ import inspect import itertools from lisa.analysis.base import AnalysisBase -from lisa.utils import Loggable +from lisa.utils import Loggable, get_subclasses class AnalysisProxy(Loggable): """ @@ -41,7 +41,7 @@ class AnalysisProxy(Loggable): # will have had a chance to get registered at that point self._class_map = { cls.name: cls - for cls in AnalysisBase.get_subclasses() + for cls in get_subclasses(AnalysisBase) # Classes without a "name" attribute directly defined in their # scope will not get registered. That allows having unnamed # intermediate base classes that are not meant to be exposed. diff --git a/lisa/energy_meter.py b/lisa/energy_meter.py index f6bdc2e2d..685978f52 100644 --- a/lisa/energy_meter.py +++ b/lisa/energy_meter.py @@ -15,13 +15,15 @@ # limitations under the License. # -import devlib +import abc import json import os import os.path import psutil import time import logging +import inspect +import abc from collections import namedtuple from subprocess import Popen, PIPE, STDOUT @@ -31,109 +33,58 @@ import numpy as np import pandas as pd from bart.common.Utils import area_under_curve - -from lisa.utils import Loggable +import devlib +from lisa.utils import Loggable, get_subclasses # Default energy measurements for each board -DEFAULT_ENERGY_METER = { - - # ARM TC2: by default use HWMON - 'tc2' : { - 'instrument' : 'hwmon', - 'channel_map' : { - 'LITTLE' : 'A7 Jcore', - 'big' : 'A15 Jcore', - } - }, - - # ARM Juno: by default use HWMON - 'juno' : { - 'instrument' : 'hwmon', - # if the channels do not contain a core name we can match to the - # little/big cores on the board, use a channel_map section to - # indicate which channel is which - 'channel_map' : { - 'LITTLE' : 'BOARDLITTLE', - 'big' : 'BOARDBIG', - } - }, - -} - EnergyReport = namedtuple('EnergyReport', ['channels', 'report_file', 'data_frame']) -class EnergyMeter(Loggable): - - _meter = None - - def __init__(self, target, res_dir=None): +class EnergyMeter(Loggable, abc.ABC): + def __init__(self, target, res_dir): self._target = target - res_dir = res_dir if res_dir else tempfile.gettempdir() self._res_dir = res_dir - #TODO: cleanup force and that singleton pattern @classmethod - def getInstance(cls, target, conf, force=False, res_dir=None): - - if not force and EnergyMeter._meter: - return EnergyMeter._meter - + def get_meter(cls, name, conf, target, res_dir=None): logger = cls.get_logger() + logger.debug('Results dir: %s', res_dir) - # Initialize energy meter based on configuration - if 'emeter' in conf: - emeter = conf['emeter'] - logger.debug('using user-defined configuration') - - # Initialize energy probe to board default - elif 'board' in conf and \ - conf['board'] in DEFAULT_ENERGY_METER: - emeter = DEFAULT_ENERGY_METER[conf['board']] - logger.debug('using default energy meter for [%s]', - conf['board']) - else: - return None - - if emeter['instrument'] == 'hwmon': - EnergyMeter._meter = HWMon(target, emeter, res_dir) - elif emeter['instrument'] == 'aep': - EnergyMeter._meter = AEP(target, emeter, res_dir) - elif emeter['instrument'] == 'monsoon': - EnergyMeter._meter = Monsoon(target, emeter, res_dir) - elif emeter['instrument'] == 'acme': - EnergyMeter._meter = ACME(target, emeter, res_dir) - elif emeter['instrument'] == 'gem5': - EnergyMeter._meter = Gem5EnergyMeter(target, emeter, res_dir) + for subcls in get_subclasses(cls): + if not inspect.isabstract(subcls): + if name == subcls.name: + return subcls(target, conf, res_dir) - logger.debug('Results dir: %s', res_dir) - return EnergyMeter._meter + raise ValueError('No EnergyMeter has name "{}"'.format(name)) + @abc.abstractmethod + def name(): + pass + + @abc.abstractmethod def sample(self): - raise NotImplementedError('Missing implementation') + pass + @abc.abstractmethod def reset(self): - raise NotImplementedError('Missing implementation') + pass - def report(self, out_dir): - raise NotImplementedError('Missing implementation') + @abc.abstractmethod + def report(self): + pass class HWMon(EnergyMeter): + name = 'hwmon' def __init__(self, target, conf=None, res_dir=None): - super(HWMon, self).__init__(target, res_dir) + super().__init__(target, res_dir) logger = self.get_logger() - # The HWMon energy meter - self._hwmon = None - # Energy readings self.readings = {} if 'hwmon' not in self._target.modules: - logger.info('HWMON module not enabled') - logger.warning('Energy sampling disabled by configuration') - return + raise RuntimeError('HWMON devlib module not enabled') # Initialize HWMON instrument logger.info('Scanning for HWMON channels, may take some time...') @@ -159,7 +110,7 @@ class HWMon(EnergyMeter): self._target.little_core.upper()] if all(s in available_sites for s in bl_sites): logger.info('Using default big.LITTLE hwmon channels') - self._channels = dict(list(zip(['big', 'LITTLE'], bl_sites))) + self._channels = dict(zip(['big', 'LITTLE'], bl_sites)) if not self._channels: logger.info('Using all hwmon energy channels') @@ -177,8 +128,6 @@ class HWMon(EnergyMeter): def sample(self): logger = self.get_logger() - if self._hwmon is None: - return None samples = self._hwmon.take_measurement() for s in samples: site = s.channel.site @@ -200,8 +149,6 @@ class HWMon(EnergyMeter): return self.readings def reset(self): - if self._hwmon is None: - return self.sample() for site in self.readings: self.readings[site]['delta'] = 0 @@ -209,8 +156,6 @@ class HWMon(EnergyMeter): self.get_logger().debug('RESET: %s', self.readings) def report(self, out_dir, out_file='energy.json'): - if self._hwmon is None: - return (None, None) # Retrive energy consumption data nrg = self.sample() # Reformat data for output generation @@ -225,7 +170,7 @@ class HWMon(EnergyMeter): clusters_nrg[channel] = nrg_total # Dump data as JSON file - nrg_file = '{}/{}'.format(out_dir, out_file) + nrg_file = os.path.join(out_dir, out_file) with open(nrg_file, 'w') as ofile: json.dump(clusters_nrg, ofile, sort_keys=True, indent=4) @@ -288,9 +233,10 @@ class _DevlibContinuousEnergyMeter(EnergyMeter): return channels_nrg class AEP(_DevlibContinuousEnergyMeter): + name = 'aep' def __init__(self, target, conf, res_dir): - super(AEP, self).__init__(target, res_dir) + super().__init__(target, res_dir) logger = self.get_logger() # Configure channels for energy measurements @@ -312,9 +258,10 @@ class Monsoon(_DevlibContinuousEnergyMeter): """ Monsoon Solutions energy monitor """ + name = 'monsoon' def __init__(self, target, conf, res_dir): - super(Monsoon, self).__init__(target, res_dir) + super().__init__(target, res_dir) self._instrument = devlib.MonsoonInstrument(self._target, **conf['conf']) self._instrument.reset() @@ -334,9 +281,10 @@ class ACME(EnergyMeter): """ BayLibre's ACME board based EnergyMeter """ + name = 'acme' def __init__(self, target, conf, res_dir): - super(ACME, self).__init__(target, res_dir) + super().__init__(target, res_dir) logger = self.get_logger() # Assume iio-capture is available in PATH @@ -414,7 +362,7 @@ class ACME(EnergyMeter): self._iio_device(channel)], stdout=PIPE, stderr=STDOUT) - # Wait few milliseconds before to check if there is any output + # Wait some time before to check if there is any output sleep(1) # Check that all required channels have been started @@ -450,8 +398,7 @@ class ACME(EnergyMeter): logger = self.get_logger() channels_nrg = {} channels_stats = {} - for channel in self._channels: - ch_id = self._channels[channel] + for channel, ch_id in self._channels.items(): if self._iio[ch_id] is None: continue @@ -501,22 +448,23 @@ class ACME(EnergyMeter): channels_nrg['{}'.format(channel)] = nrg['energy'] # Dump energy data - nrg_file = '{}/{}'.format(out_dir, out_energy) + nrg_file = os.path.join(out_dir, out_energy) with open(nrg_file, 'w') as ofile: json.dump(channels_nrg, ofile, sort_keys=True, indent=4) # Dump energy stats nrg_stats_file = os.path.splitext(out_energy)[0] + \ '_stats' + os.path.splitext(out_energy)[1] - nrg_stats_file = '{}/{}'.format(out_dir, nrg_stats_file) + nrg_stats_file = os.path.join(out_dir, nrg_stats_file) with open(nrg_stats_file, 'w') as ofile: json.dump(channels_stats, ofile, sort_keys=True, indent=4) return EnergyReport(channels_nrg, nrg_file, None) class Gem5EnergyMeter(_DevlibContinuousEnergyMeter): + name = 'gem5' def __init__(self, target, conf, res_dir): - super(Gem5EnergyMeter, self).__init__(target, res_dir) + super().__init__(target, res_dir) power_sites = list(conf['channel_map'].values()) self._instrument = devlib.Gem5PowerInstrument(self._target, power_sites) diff --git a/lisa/env.py b/lisa/env.py index ceb176adb..4737aba91 100644 --- a/lisa/env.py +++ b/lisa/env.py @@ -133,6 +133,10 @@ class TargetConf(MultiSrcConf, HideExekallID): KeyDesc('functions', 'FTrace functions to trace', [StrList]), KeyDesc('buffsize', 'FTrace buffer size', [int]), )), + LevelKeyDesc('emeter', 'Energy meter configuration', ( + KeyDesc('name', 'Energy meter name to use', [str]), + KeyDesc('conf', 'Energy meter configuration', [Mapping]), + )), LevelKeyDesc('devlib', 'devlib configuration', ( LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', ( KeyDesc('class', 'Name of the class to use', [str]), @@ -739,6 +743,23 @@ class TestEnv(Loggable, HideExekallID): for domain in self.target.cpufreq.iter_domains(): self.target.cpuidle.enable_all(domain[0]) + def get_emeter(self, res_dir=None): + spec = self.target_conf['emeter'] + name = spec['name'] + conf = spec['conf'] + + res_dir = res_dir if res_dir else self.get_res_dir( + name='EnergyMeter-{}'.format(name), + symlink=False + ) + + return EnergyMeter.get_meter( + name=name, + conf=conf, + target=self.target, + res_dir=res_dir, + ) + class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system, simulator, **kwargs): simulator_args = copy.copy(simulator.get('args', [])) diff --git a/lisa/utils.py b/lisa/utils.py index d7086f672..edb8df126 100644 --- a/lisa/utils.py +++ b/lisa/utils.py @@ -64,6 +64,18 @@ class Loggable: name += '.' + suffix return logging.getLogger(name) +def get_subclasses(cls, cls_set=None): + """Get all indirect subclasses of the class.""" + if cls_set is None: + cls_set = set() + + for subcls in cls.__subclasses__(): + if subcls not in cls_set: + cls_set.add(subcls) + cls_set.update(get_subclasses(subcls, cls_set)) + + return cls_set + class HideExekallID: """Hide the subclasses in the simplified ID format of exekall. -- GitLab From 6a25dbc39833adcf51fa19b3dcde6b4c6400235a Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Tue, 27 Nov 2018 16:07:23 +0000 Subject: [PATCH 6/9] utils: Allow relative path in !include Relative paths will be interpreted relatively to the file, not the current folder. --- lisa/utils.py | 32 ++++++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/lisa/utils.py b/lisa/utils.py index edb8df126..8d3fe5255 100644 --- a/lisa/utils.py +++ b/lisa/utils.py @@ -32,6 +32,7 @@ import pkgutil import operator import numbers import difflib +import threading import ruamel.yaml from ruamel.yaml import YAML @@ -220,6 +221,21 @@ class Serializable(Loggable): kwargs = loader.construct_mapping(node, deep=True) return loader.make_python_instance(suffix, node, kwds=kwargs, newobj=False) + + # Allow !include to use relative paths from the current file. Since we + # introduce a global state, we use thread-local storage. + _included_path = threading.local() + _included_path.val = None + @staticmethod + @contextlib.contextmanager + def _set_relative_include_root(path): + old = Serializable._included_path.val + Serializable._included_path.val = path + try: + yield + finally: + Serializable._included_path.val = old + @classmethod def _yaml_include_constructor(cls, loader, node): """ @@ -231,8 +247,14 @@ class Serializable(Loggable): path = loader.construct_scalar(node) assert isinstance(path, str) path = os.path.expandvars(path) - with open(path, 'r', encoding=cls.YAML_ENCODING) as f: - return cls._yaml.load(f) + + # Paths are relative to the file that is being included + if not os.path.isabs(path): + path = os.path.join(Serializable._included_path.val, path) + + with cls._set_relative_include_root(path): + with open(path, 'r', encoding=cls.YAML_ENCODING) as f: + return cls._yaml.load(f) @classmethod def _yaml_env_var_constructor(cls, loader, suffix, node): @@ -324,6 +346,7 @@ class Serializable(Loggable): @classmethod def _from_path(cls, filepath, fmt): + filepath = str(filepath) if fmt is None: fmt = cls.DEFAULT_SERIALIZATION_FMT @@ -336,8 +359,9 @@ class Serializable(Loggable): else: raise ValueError('Unknown format "{}"'.format(fmt)) - with open(str(filepath), **kwargs) as fh: - instance = loader(fh) + with cls._set_relative_include_root(os.path.dirname(filepath)): + with open(filepath, **kwargs) as fh: + instance = loader(fh) return instance -- GitLab From af95e3393809374081d6402e8705c071de0fc57a Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Tue, 27 Nov 2018 17:29:33 +0000 Subject: [PATCH 7/9] env: Fix collect_ftrace contextmanager Always stop ftrace even if an exception is raised. --- lisa/env.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/lisa/env.py b/lisa/env.py index 4737aba91..e8223ecb0 100644 --- a/lisa/env.py +++ b/lisa/env.py @@ -721,10 +721,11 @@ class TestEnv(Loggable, HideExekallID): ftrace.start() - yield ftrace - - ftrace.stop() - ftrace.get_trace(output_file) + try: + yield ftrace + finally: + ftrace.stop() + ftrace.get_trace(output_file) @contextlib.contextmanager def disable_idle_states(self): -- GitLab From 7595128f0fa48a48ee1315232fbe2bad03c571a1 Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Wed, 28 Nov 2018 10:59:28 +0000 Subject: [PATCH 8/9] env: make a comment clearer --- lisa/env.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lisa/env.py b/lisa/env.py index e8223ecb0..4d8c1d25f 100644 --- a/lisa/env.py +++ b/lisa/env.py @@ -247,7 +247,8 @@ class TestEnv(Loggable, HideExekallID): # Take the board name from the target configuration so it becomes # available for later inspection. That board name is mostly free form - # and should not be relied upon. + # and no specific value should be expected for a given kind of board + # (i.e. a Juno board might be named "foo-bar-juno-on-my-desk") if board_name: self.plat_info.add_src('target-conf', dict(name=board_name)) -- GitLab From 4af3ad3379d56a6380c5b8a36896caa71e0ccc70 Mon Sep 17 00:00:00 2001 From: Douglas RAILLARD Date: Wed, 28 Nov 2018 18:57:28 +0000 Subject: [PATCH 9/9] exekall: fix circular import dependency Split common utils in _utils. --- tools/exekall/exekall/_utils.py | 485 ++++++++++++++++++++++++++++++++ tools/exekall/exekall/engine.py | 21 +- tools/exekall/exekall/utils.py | 478 +------------------------------ 3 files changed, 505 insertions(+), 479 deletions(-) create mode 100644 tools/exekall/exekall/_utils.py diff --git a/tools/exekall/exekall/_utils.py b/tools/exekall/exekall/_utils.py new file mode 100644 index 000000000..8a57a44b5 --- /dev/null +++ b/tools/exekall/exekall/_utils.py @@ -0,0 +1,485 @@ +#! /usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# +# Copyright (C) 2018, ARM Limited and contributors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import types +import uuid +import inspect +import functools +import fnmatch +import collections +import contextlib +import importlib +import io +import itertools +import logging +import pathlib +import pickle +import sys +import traceback + +class NotSerializableError(Exception): + pass + +def get_class_from_name(cls_name, module_map): + possible_mod_set = { + mod_name + for mod_name in module_map.keys() + if cls_name.startswith(mod_name) + } + + # Longest match in term of number of components + possible_mod_list = sorted(possible_mod_set, key=lambda name: len(name.split('.'))) + if possible_mod_list: + mod_name = possible_mod_list[-1] + else: + return None + + mod = module_map[mod_name] + cls_name = cls_name[len(mod_name)+1:] + return _get_class_from_name(cls_name, mod) + +def _get_class_from_name(cls_name, namespace): + if isinstance(namespace, collections.abc.Mapping): + namespace = types.SimpleNamespace(**namespace) + + split = cls_name.split('.', 1) + try: + obj = getattr(namespace, split[0]) + except AttributeError as e: + raise ValueError('Object not found') from e + + if len(split) > 1: + return _get_class_from_name('.'.join(split[1:]), obj) + else: + return obj + +def create_uuid(): + return uuid.uuid4().hex + +def get_mro(cls): + if cls is type(None) or cls is None: + return (type(None), object) + else: + assert isinstance(cls, type) + return inspect.getmro(cls) + +def get_name(obj, full_qual=True, qual=True): + # full_qual enabled implies qual enabled + _qual = qual or full_qual + # qual disabled implies full_qual disabled + full_qual = full_qual and qual + qual = _qual + + # Add the module's name in front of the name to get a fully + # qualified name + if full_qual: + module_name = obj.__module__ + module_name = ( + module_name + '.' + if module_name != '__main__' and module_name != 'builtins' + else '' + ) + else: + module_name = '' + + if qual: + _get_name = lambda x: x.__qualname__ + else: + _get_name = lambda x: x.__name__ + + # Classmethods appear as bound method of classes. Since each subclass will + # get a different bound method object, we want to reflect that in the + # name we use, instead of always using the same name that the method got + # when it was defined + if inspect.ismethod(obj): + name = _get_name(obj.__self__) + '.' + obj.__name__ + else: + name = _get_name(obj) + + return module_name + name + +def get_src_loc(obj): + try: + src_line = inspect.getsourcelines(obj)[1] + src_file = inspect.getsourcefile(obj) + src_file = str(pathlib.Path(src_file).resolve()) + except (OSError, TypeError): + src_line, src_file = None, None + + return (src_file, src_line) + +def is_serializable(obj, raise_excep=False): + """ + Try to Pickle the object to see if that raises any exception. + """ + stream = io.StringIO() + try: + # This may be slow for big objects but it is the only way to be sure + # it can actually be serialized + pickle.dumps(obj) + except (TypeError, pickle.PickleError) as e: + debug('Cannot serialize instance of {}: {}'.format( + type(obj).__qualname__, str(e) + )) + if raise_excep: + raise NotSerializableError(obj) from e + return False + else: + return True + +# Call the given function at most once per set of parameters +def once(callable_): + return functools.lru_cache(maxsize=None, typed=True)(callable_) + +def remove_indices(iterable, ignored_indices): + return [v for i, v in enumerate(iterable) if i not in ignored_indices] + +# Basic reimplementation of typing.get_type_hints for Python versions that +# do not have a typing module available, and also avoids creating Optional[] +# when the parameter has a None default value. +def resolve_annotations(annotations, module_vars): + return { + # If we get a string, evaluate it in the global namespace of the + # module in which the callable was defined + param: cls if not isinstance(cls, str) else eval(cls, module_vars) + for param, cls in annotations.items() + } + +def get_module_basename(path): + path = pathlib.Path(path) + module_name = inspect.getmodulename(str(path)) + # This is either garbage or a package + if module_name is None: + module_name = path.name + return module_name + +def iterate_cb(iterator, pre_hook=None, post_hook=None): + with contextlib.suppress(StopIteration): + for i in itertools.count(): + # Do not execute pre_hook on the first iteration + if pre_hook and i: + pre_hook() + val = next(iterator) + if post_hook: + post_hook() + + yield val + +def format_exception(e): + elements = traceback.format_exception(type(e), e, e.__traceback__) + return ''.join(elements) + + +# Logging level above CRITICAL that is always displayed and used for output +LOGGING_OUT_LEVEL = 60 + +class ExekallFormatter(logging.Formatter): + def __init__(self, fmt, *args, **kwargs): + self.default_fmt = logging.Formatter(fmt, *args, **kwargs) + self.out_fmt = logging.Formatter('%(message)s', *args, **kwargs) + + def format(self, record): + # level above CRITICAL, so it is always displayed + if record.levelno == LOGGING_OUT_LEVEL: + return self.out_fmt.format(record) + # regular levels are logged with the regular formatter + else: + return self.default_fmt.format(record) + +def setup_logging(log_level, debug_log_file=None, info_log_file=None, verbose=0): + logging.addLevelName(LOGGING_OUT_LEVEL, 'OUT') + level=getattr(logging, log_level.upper()) + + verbose_formatter = ExekallFormatter('[%(name)s/%(filename)s:%(lineno)s][%(asctime)s] %(levelname)s %(message)s') + normal_formatter = ExekallFormatter('[%(name)s][%(asctime)s] %(levelname)s %(message)s') + + logger = logging.getLogger() + # We do not filter anything at the logger level, only at the handler level + logger.setLevel(logging.NOTSET) + + console_handler = logging.StreamHandler() + console_handler.setLevel(level) + formatter = verbose_formatter if verbose else normal_formatter + console_handler.setFormatter(formatter) + logger.addHandler(console_handler) + + if debug_log_file: + file_handler = logging.FileHandler(str(debug_log_file), encoding='utf-8') + file_handler.setLevel(logging.DEBUG) + file_handler.setFormatter(verbose_formatter) + logger.addHandler(file_handler) + + if info_log_file: + file_handler = logging.FileHandler(str(info_log_file), encoding='utf-8') + file_handler.setLevel(logging.INFO) + file_handler.setFormatter(normal_formatter) + logger.addHandler(file_handler) + + # Redirect all warnings of the "warnings" module as log entries + logging.captureWarnings(True) + +EXEKALL_LOGGER = logging.getLogger('EXEKALL') + +def out(msg): + EXEKALL_LOGGER.log(LOGGING_OUT_LEVEL, msg) + +def info(msg): + """Write a log message at the INFO level.""" + EXEKALL_LOGGER.info(msg) + +def debug(msg): + """Write a log message at the DEBUG level.""" + EXEKALL_LOGGER.debug(msg) + +def warn(msg): + """Write a log message at the WARNING level.""" + EXEKALL_LOGGER.warning(msg) + +def error(msg): + """Write a log message at the ERROR level.""" + EXEKALL_LOGGER.error(msg) + +def infer_mod_name(python_src): + """ + Compute the module name of a Python source file by inferring its top-level + package + """ + python_src = pathlib.Path(python_src) + module_path = None + + # First look for the outermost package we find in the parent directories. + # If we were supplied a path, it will not try to go past its highest folder. + for folder in reversed(python_src.parents): + if pathlib.Path(folder, '__init__.py').exists(): + package_root_parent = folder.parents[0] + module_path = python_src.relative_to(package_root_parent) + break + # If no package was found, we try to find it through sys.path in case it is + # only using namespace packages + else: + for package_root_parent in sys.path: + try: + module_path = python_src.relative_to(package_root_parent) + break + except ValueError: + continue + + # If we found the top-level package + if module_path is not None: + module_parents = list(module_path.parents) + module_basename = get_module_basename(module_path) + + # Import all parent package_names before we import the module + for package_name in reversed(module_parents[:-1]): + package_name = import_file( + pathlib.Path(package_root_parent, package_name), + module_name = '.'.join(package_name.parts), + is_package = True, + ) + + module_name = '.'.join(( + ('.'.join(module_parents[0].parts)), + module_basename + )) + else: + module_name = get_module_basename(python_src) + + return module_name + +def find_customization_module_set(module_set): + def build_full_names(l_l): + """Explode list of lists, and build full package names.""" + for l in l_l: + for i, _ in enumerate(l): + i += 1 + yield '.'.join(l[:i]) + + try: + import_excep = ModuleNotFoundError + # Python < 3.6 + except NameError: + import_excep = AttributeError + + package_names_list = [ + module.__name__.split('.') + for module in module_set + ] + package_name_set = set(build_full_names(package_names_list)) + + customization_module_set = set() + + for name in package_name_set: + customize_name = name + '.exekall_customize' + # Only hide ModuleNotFoundError exceptions when looking up that + # specific module, we don't want to hide issues inside the module + # itself. + module_exists = False + with contextlib.suppress(import_excep): + module_exists = importlib.util.find_spec(customize_name) + + if module_exists: + # Importing that module is enough to make the adaptor visible + # to the Adaptor base class + customize_module = importlib.import_module(customize_name) + customization_module_set.add(customize_module) + + return customization_module_set + +def import_file(python_src, module_name=None, is_package=False): + python_src = pathlib.Path(python_src) + if python_src.is_dir(): + is_package = True + + if module_name is None: + module_name = infer_mod_name(python_src) + + # Check if the module has already been imported + if module_name in sys.modules: + return sys.modules[module_name] + + is_namespace_package = False + if is_package: + # Signify that it is a package to + # importlib.util.spec_from_file_location + submodule_search_locations = [str(python_src)] + init_py = pathlib.Path(python_src, '__init__.py') + # __init__.py does not exists for namespace packages + if init_py.exists(): + python_src = init_py + else: + is_namespace_package = True + else: + submodule_search_locations = None + + # Python >= 3.5 style + if hasattr(importlib.util, 'module_from_spec'): + # We manually build a ModuleSpec for namespace packages, since + # spec_from_file_location apparently does not handle them + if is_namespace_package: + spec = importlib.machinery.ModuleSpec( + name=module_name, + # loader is None for namespace packages + loader=None, + is_package=True + ) + else: + spec = importlib.util.spec_from_file_location(module_name, str(python_src), + submodule_search_locations=submodule_search_locations) + if spec is None: + raise ValueError('Could not find module "{module}" at {path}'.format( + module=module_name, + path=python_src + )) + + module = importlib.util.module_from_spec(spec) + # Register module before executing it so relative imports will work + sys.modules[module_name] = module + # Nothing to execute in a namespace package + if not is_namespace_package: + spec.loader.exec_module(module) + # Python <= v3.4 style + else: + module = importlib.machinery.SourceFileLoader( + module_name, str(python_src)).load_module() + + sys.modules[module_name] = module + importlib.invalidate_caches() + return module + +def flatten_nested_seq(seq): + return list(itertools.chain.from_iterable(seq)) + +def load_serial_from_db(db, uuid_seq=None, type_pattern_seq=None): + + def uuid_predicate(serial): + return ( + serial.value_uuid in uuid_seq + or serial.excep_uuid in uuid_seq + ) + + def type_pattern_predicate(serial): + return match_base_cls(type(serial.value), type_pattern_seq) + + if type_pattern_seq and not uuid_seq: + predicate = type_pattern_predicate + + elif uuid_seq and not type_pattern_seq: + predicate = uuid_predicate + + elif not uuid_seq and not type_pattern_seq: + predicate = lambda serial: True + + else: + def predicate(serial): + return uuid_predicate(serial) and type_pattern_predicate(serial) + + return db.obj_store.get_by_predicate(predicate) + +def match_base_cls(cls, pattern_list): + # Match on the name of the class of the object and all its base classes + for base_cls in get_mro(cls): + base_cls_name = get_name(base_cls, full_qual=True) + if not base_cls_name: + continue + if any( + fnmatch.fnmatch(base_cls_name, pattern) + for pattern in pattern_list + ): + return True + + return False + +def match_name(name, pattern_list): + if name is None: + return False + return any( + fnmatch.fnmatch(name, pattern) + for pattern in pattern_list + ) + +def get_recursive_module_set(module_set, package_set): + """Retrieve the set of all modules recurisvely imported from the modules in + `module_set`, if they are (indirectly) part of one of the packages named in + `package_set`. + """ + + recursive_module_set = set() + for module in module_set: + _get_recursive_module_set(module, recursive_module_set, package_set) + + return recursive_module_set + +def _get_recursive_module_set(module, module_set, package_set): + if module in module_set: + return + module_set.add(module) + for imported_module in vars(module).values(): + if ( + isinstance(imported_module, types.ModuleType) + # We only recurse into modules that are part of the given set + # of packages + and any( + # Either a submodule of one of the packages or one of the + # packages themselves + imported_module.__name__.split('.', 1)[0] == package + for package in package_set + ) + ): + _get_recursive_module_set(imported_module, module_set, package_set) + + diff --git a/tools/exekall/exekall/engine.py b/tools/exekall/exekall/engine.py index a15f90481..64c51fe1e 100644 --- a/tools/exekall/exekall/engine.py +++ b/tools/exekall/exekall/engine.py @@ -29,7 +29,12 @@ import pprint import ruamel.yaml -import exekall.utils as utils +import exekall._utils as utils + +def take_first(iterable): + for i in iterable: + return i + return engine.NoValue class NoOperatorError(Exception): pass @@ -525,7 +530,7 @@ class Expression: # We only get the ID's of the parameter ExprValue that lead to the # ExprValue we are interested in param_id_map = OrderedDict( - (param, utils.take_first(param_expr._get_id( + (param, take_first(param_expr._get_id( with_tags = with_tags, full_qual = full_qual, qual = qual, @@ -877,7 +882,7 @@ class Expression: expr_val_list = [expr_val.value for expr_val in expr_val_set] assert expr_val_list[1:] == expr_val_list[:-1] - expr_data = utils.take_first(expr_val_set) + expr_data = take_first(expr_val_set) return (format_expr_value(expr_data, lambda x:''), '') # Prior to execution, we don't have an ExprValue yet else: @@ -904,7 +909,7 @@ class Expression: self.get_param_map(reusable=False).items(), ) - first_param = utils.take_first(self.param_map.keys()) + first_param = take_first(self.param_map.keys()) for param, param_expr in param_map_chain: # Rename "self" parameter for more natural-looking output @@ -1041,7 +1046,7 @@ class Expression: # Rename "self" parameter to the name of the variable we are # going to apply the method on if self.op.is_method: - first_param = utils.take_first(param_expr_val_map) + first_param = take_first(param_expr_val_map) param_expr_val = param_expr_val_map.pop(first_param) self_param = make_var(make_method_self_name(param_expr_val.expr)) param_expr_val_map[self_param] = param_expr_val @@ -1377,7 +1382,7 @@ class Operator: for param, value_list in param_callable_map.items(): # We just get the type of the first item in the list, which should # work in most cases - param_type = type(utils.take_first(value_list)) + param_type = type(take_first(value_list)) # Create an artificial new type that will only be produced by # the PrebuiltOperator @@ -1541,7 +1546,7 @@ class Operator: def get_prototype(self): sig = self.signature - first_param = utils.take_first(sig.parameters) + first_param = take_first(sig.parameters) annotation_map = utils.resolve_annotations(self.annotations, self.callable_globals) extra_ignored_param = set() @@ -1888,7 +1893,7 @@ class ExprValue: def get_id(self, *args, with_tags=True, **kwargs): # There exists only one ID for a given ExprValue so we just return it # instead of an iterator. - return utils.take_first(self.expr.get_id(with_tags=with_tags, + return take_first(self.expr.get_id(with_tags=with_tags, expr_val=self, *args, **kwargs)) def get_failed_values(self): diff --git a/tools/exekall/exekall/utils.py b/tools/exekall/exekall/utils.py index 69548a685..f92bfe477 100644 --- a/tools/exekall/exekall/utils.py +++ b/tools/exekall/exekall/utils.py @@ -16,216 +16,16 @@ # limitations under the License. # -import collections -import contextlib -import fnmatch -import functools -import importlib -import inspect -import io -import itertools -import logging -import pathlib -import pickle -import sys -import traceback -import types -import uuid +# This module is allowed to import engine, as engine does not import it (so +# there is no circular dependency). Most utils should go in _utils unless they +# really need to depend on the engine, without being part of it. +import inspect import exekall.engine as engine -def get_src_loc(obj): - try: - src_line = inspect.getsourcelines(obj)[1] - src_file = inspect.getsourcefile(obj) - src_file = str(pathlib.Path(src_file).resolve()) - except (OSError, TypeError): - src_line, src_file = None, None - - return (src_file, src_line) - -class NotSerializableError(Exception): - pass - -def is_serializable(obj, raise_excep=False): - """ - Try to Pickle the object to see if that raises any exception. - """ - stream = io.StringIO() - try: - # This may be slow for big objects but it is the only way to be sure - # it can actually be serialized - pickle.dumps(obj) - except (TypeError, pickle.PickleError) as e: - debug('Cannot serialize instance of {}: {}'.format( - type(obj).__qualname__, str(e) - )) - if raise_excep: - raise NotSerializableError(obj) from e - return False - else: - return True - -def remove_indices(iterable, ignored_indices): - return [v for i, v in enumerate(iterable) if i not in ignored_indices] - -def flatten_nested_seq(seq): - return list(itertools.chain.from_iterable(seq)) - -def load_serial_from_db(db, uuid_seq=None, type_pattern_seq=None): - - def uuid_predicate(serial): - return ( - serial.value_uuid in uuid_seq - or serial.excep_uuid in uuid_seq - ) - - def type_pattern_predicate(serial): - return match_base_cls(type(serial.value), type_pattern_seq) - - if type_pattern_seq and not uuid_seq: - predicate = type_pattern_predicate - - elif uuid_seq and not type_pattern_seq: - predicate = uuid_predicate - - elif not uuid_seq and not type_pattern_seq: - predicate = lambda serial: True - - else: - def predicate(serial): - return uuid_predicate(serial) and type_pattern_predicate(serial) - - return db.obj_store.get_by_predicate(predicate) - -def match_base_cls(cls, pattern_list): - # Match on the name of the class of the object and all its base classes - for base_cls in get_mro(cls): - base_cls_name = get_name(base_cls, full_qual=True) - if not base_cls_name: - continue - if any( - fnmatch.fnmatch(base_cls_name, pattern) - for pattern in pattern_list - ): - return True - - return False - -def match_name(name, pattern_list): - if name is None: - return False - return any( - fnmatch.fnmatch(name, pattern) - for pattern in pattern_list - ) - -def get_mro(cls): - if cls is type(None) or cls is None: - return (type(None), object) - else: - assert isinstance(cls, type) - return inspect.getmro(cls) - - -def get_name(obj, full_qual=True, qual=True): - # full_qual enabled implies qual enabled - _qual = qual or full_qual - # qual disabled implies full_qual disabled - full_qual = full_qual and qual - qual = _qual - - # Add the module's name in front of the name to get a fully - # qualified name - if full_qual: - module_name = obj.__module__ - module_name = ( - module_name + '.' - if module_name != '__main__' and module_name != 'builtins' - else '' - ) - else: - module_name = '' - - if qual: - _get_name = lambda x: x.__qualname__ - else: - _get_name = lambda x: x.__name__ - - # Classmethods appear as bound method of classes. Since each subclass will - # get a different bound method object, we want to reflect that in the - # name we use, instead of always using the same name that the method got - # when it was defined - if inspect.ismethod(obj): - name = _get_name(obj.__self__) + '.' + obj.__name__ - else: - name = _get_name(obj) - - return module_name + name - -def get_class_from_name(cls_name, module_map): - possible_mod_set = { - mod_name - for mod_name in module_map.keys() - if cls_name.startswith(mod_name) - } - - # Longest match in term of number of components - possible_mod_list = sorted(possible_mod_set, key=lambda name: len(name.split('.'))) - if possible_mod_list: - mod_name = possible_mod_list[-1] - else: - return None - - mod = module_map[mod_name] - cls_name = cls_name[len(mod_name)+1:] - return _get_class_from_name(cls_name, mod) - -def _get_class_from_name(cls_name, namespace): - if isinstance(namespace, collections.abc.Mapping): - namespace = types.SimpleNamespace(**namespace) - - split = cls_name.split('.', 1) - try: - obj = getattr(namespace, split[0]) - except AttributeError as e: - raise ValueError('Object not found') from e - - if len(split) > 1: - return _get_class_from_name('.'.join(split[1:]), obj) - else: - return obj - - -def get_recursive_module_set(module_set, package_set): - """Retrieve the set of all modules recurisvely imported from the modules in - `module_set`, if they are (indirectly) part of one of the packages named in - `package_set`. - """ - - recursive_module_set = set() - for module in module_set: - _get_recursive_module_set(module, recursive_module_set, package_set) - - return recursive_module_set - -def _get_recursive_module_set(module, module_set, package_set): - if module in module_set: - return - module_set.add(module) - for imported_module in vars(module).values(): - if ( - isinstance(imported_module, types.ModuleType) - # We only recurse into modules that are part of the given set - # of packages - and any( - # Either a submodule of one of the packages or one of the - # packages themselves - imported_module.__name__.split('.', 1)[0] == package - for package in package_set - ) - ): - _get_recursive_module_set(imported_module, module_set, package_set) +# Re-export all _utils here +from exekall._utils import * +from exekall.engine import take_first def get_callable_set(module_set, verbose=False): # We keep the search local to the packages these modules are defined in, to @@ -275,175 +75,6 @@ def _get_callable_set(module, verbose): callable_pool.add(callable_) return callable_pool -# Basic reimplementation of typing.get_type_hints for Python versions that -# do not have a typing module available, and also avoids creating Optional[] -# when the parameter has a None default value. -def resolve_annotations(annotations, module_vars): - return { - # If we get a string, evaluate it in the global namespace of the - # module in which the callable was defined - param: cls if not isinstance(cls, str) else eval(cls, module_vars) - for param, cls in annotations.items() - } - -def find_customization_module_set(module_set): - def build_full_names(l_l): - """Explode list of lists, and build full package names.""" - for l in l_l: - for i, _ in enumerate(l): - i += 1 - yield '.'.join(l[:i]) - - try: - import_excep = ModuleNotFoundError - # Python < 3.6 - except NameError: - import_excep = AttributeError - - package_names_list = [ - module.__name__.split('.') - for module in module_set - ] - package_name_set = set(build_full_names(package_names_list)) - - customization_module_set = set() - - for name in package_name_set: - customize_name = name + '.exekall_customize' - # Only hide ModuleNotFoundError exceptions when looking up that - # specific module, we don't want to hide issues inside the module - # itself. - module_exists = False - with contextlib.suppress(import_excep): - module_exists = importlib.util.find_spec(customize_name) - - if module_exists: - # Importing that module is enough to make the adaptor visible - # to the Adaptor base class - customize_module = importlib.import_module(customize_name) - customization_module_set.add(customize_module) - - return customization_module_set - -def import_file(python_src, module_name=None, is_package=False): - python_src = pathlib.Path(python_src) - if python_src.is_dir(): - is_package = True - - if module_name is None: - module_name = infer_mod_name(python_src) - - # Check if the module has already been imported - if module_name in sys.modules: - return sys.modules[module_name] - - is_namespace_package = False - if is_package: - # Signify that it is a package to - # importlib.util.spec_from_file_location - submodule_search_locations = [str(python_src)] - init_py = pathlib.Path(python_src, '__init__.py') - # __init__.py does not exists for namespace packages - if init_py.exists(): - python_src = init_py - else: - is_namespace_package = True - else: - submodule_search_locations = None - - # Python >= 3.5 style - if hasattr(importlib.util, 'module_from_spec'): - # We manually build a ModuleSpec for namespace packages, since - # spec_from_file_location apparently does not handle them - if is_namespace_package: - spec = importlib.machinery.ModuleSpec( - name=module_name, - # loader is None for namespace packages - loader=None, - is_package=True - ) - else: - spec = importlib.util.spec_from_file_location(module_name, str(python_src), - submodule_search_locations=submodule_search_locations) - if spec is None: - raise ValueError('Could not find module "{module}" at {path}'.format( - module=module_name, - path=python_src - )) - - module = importlib.util.module_from_spec(spec) - # Register module before executing it so relative imports will work - sys.modules[module_name] = module - # Nothing to execute in a namespace package - if not is_namespace_package: - spec.loader.exec_module(module) - # Python <= v3.4 style - else: - module = importlib.machinery.SourceFileLoader( - module_name, str(python_src)).load_module() - - sys.modules[module_name] = module - importlib.invalidate_caches() - return module - -def infer_mod_name(python_src): - """ - Compute the module name of a Python source file by inferring its top-level - package - """ - python_src = pathlib.Path(python_src) - module_path = None - - # First look for the outermost package we find in the parent directories. - # If we were supplied a path, it will not try to go past its highest folder. - for folder in reversed(python_src.parents): - if pathlib.Path(folder, '__init__.py').exists(): - package_root_parent = folder.parents[0] - module_path = python_src.relative_to(package_root_parent) - break - # If no package was found, we try to find it through sys.path in case it is - # only using namespace packages - else: - for package_root_parent in sys.path: - try: - module_path = python_src.relative_to(package_root_parent) - break - except ValueError: - continue - - # If we found the top-level package - if module_path is not None: - module_parents = list(module_path.parents) - module_basename = get_module_basename(module_path) - - # Import all parent package_names before we import the module - for package_name in reversed(module_parents[:-1]): - package_name = import_file( - pathlib.Path(package_root_parent, package_name), - module_name = '.'.join(package_name.parts), - is_package = True, - ) - - module_name = '.'.join(( - ('.'.join(module_parents[0].parts)), - module_basename - )) - else: - module_name = get_module_basename(python_src) - - return module_name - -def get_module_basename(path): - path = pathlib.Path(path) - module_name = inspect.getmodulename(str(path)) - # This is either garbage or a package - if module_name is None: - module_name = path.name - return module_name - -def create_uuid(): - return uuid.uuid4().hex - def sweep_number( callable_, param, start, stop=None, step=1): @@ -464,98 +95,3 @@ def sweep_number( yield type_(i) i += step -# Call the given function at most once per set of parameters -def once(callable_): - return functools.lru_cache(maxsize=None, typed=True)(callable_) - -def iterate_cb(iterator, pre_hook=None, post_hook=None): - with contextlib.suppress(StopIteration): - for i in itertools.count(): - # Do not execute pre_hook on the first iteration - if pre_hook and i: - pre_hook() - val = next(iterator) - if post_hook: - post_hook() - - yield val - -def format_exception(e): - elements = traceback.format_exception(type(e), e, e.__traceback__) - return ''.join(elements) - -# Logging level above CRITICAL that is always displayed and used for output -LOGGING_OUT_LEVEL = 60 - -class ExekallFormatter(logging.Formatter): - def __init__(self, fmt, *args, **kwargs): - self.default_fmt = logging.Formatter(fmt, *args, **kwargs) - self.out_fmt = logging.Formatter('%(message)s', *args, **kwargs) - - def format(self, record): - # level above CRITICAL, so it is always displayed - if record.levelno == LOGGING_OUT_LEVEL: - return self.out_fmt.format(record) - # regular levels are logged with the regular formatter - else: - return self.default_fmt.format(record) - -def setup_logging(log_level, debug_log_file=None, info_log_file=None, verbose=0): - logging.addLevelName(LOGGING_OUT_LEVEL, 'OUT') - level=getattr(logging, log_level.upper()) - - verbose_formatter = ExekallFormatter('[%(name)s/%(filename)s:%(lineno)s][%(asctime)s] %(levelname)s %(message)s') - normal_formatter = ExekallFormatter('[%(name)s][%(asctime)s] %(levelname)s %(message)s') - - logger = logging.getLogger() - # We do not filter anything at the logger level, only at the handler level - logger.setLevel(logging.NOTSET) - - console_handler = logging.StreamHandler() - console_handler.setLevel(level) - formatter = verbose_formatter if verbose else normal_formatter - console_handler.setFormatter(formatter) - logger.addHandler(console_handler) - - if debug_log_file: - file_handler = logging.FileHandler(str(debug_log_file), encoding='utf-8') - file_handler.setLevel(logging.DEBUG) - file_handler.setFormatter(verbose_formatter) - logger.addHandler(file_handler) - - if info_log_file: - file_handler = logging.FileHandler(str(info_log_file), encoding='utf-8') - file_handler.setLevel(logging.INFO) - file_handler.setFormatter(normal_formatter) - logger.addHandler(file_handler) - - # Redirect all warnings of the "warnings" module as log entries - logging.captureWarnings(True) - -EXEKALL_LOGGER = logging.getLogger('EXEKALL') - -def out(msg): - EXEKALL_LOGGER.log(LOGGING_OUT_LEVEL, msg) - -def info(msg): - """Write a log message at the INFO level.""" - EXEKALL_LOGGER.info(msg) - -def debug(msg): - """Write a log message at the DEBUG level.""" - EXEKALL_LOGGER.debug(msg) - -def warn(msg): - """Write a log message at the WARNING level.""" - EXEKALL_LOGGER.warning(msg) - -def error(msg): - """Write a log message at the ERROR level.""" - EXEKALL_LOGGER.error(msg) - - -def take_first(iterable): - for i in iterable: - return i - return engine.NoValue - -- GitLab