diff --git a/doc/conf.py b/doc/conf.py index 1a9f1278a73e099fd53f3e4d96c3fbfe3866dda1..c6b0006f21a20ccbc1397f83ffdc41deb8caa8b9 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -31,6 +31,7 @@ import pickle import shutil import shlex from urllib.parse import urlparse +import itertools from sphinx.domains.python import PythonDomain @@ -45,7 +46,7 @@ sys.path.insert(0, os.path.abspath('../')) # Import our packages after modifying sys.path import lisa -from lisa.utils import sphinx_nitpick_ignore, setup_logging, get_obj_name, DirCache +from lisa.utils import sphinx_nitpick_ignore, setup_logging, get_obj_name, DirCache, resolve_dotted_name from lisa.version import VERSION_TOKEN from lisa._doc.helpers import ( autodoc_process_test_method, autodoc_process_analysis_events, @@ -55,6 +56,7 @@ from lisa._doc.helpers import ( DocPlotConf, autodoc_pre_make_plots, intersphinx_warn_missing_reference_handler, ) +from lisa.analysis.base import TraceAnalysisBase import devlib @@ -93,6 +95,14 @@ def getvar(name, default=_NO_DEFAULT): return v +def copy_file(src, dst): + src = Path(src) + dst = Path(dst) + + dst.unlink(missing_ok=True) + shutil.copy2(src, dst) + + def prepare(home, enable_plots, outdir): configs = {} outdir = Path(outdir).resolve() @@ -222,21 +232,55 @@ def prepare(home, enable_plots, outdir): notebooks = [ 'examples/analysis_plots.ipynb', ] + + plot_conf_path = Path(home, 'doc', 'plot_conf.yml') if enable_plots: + + def get_plot_methods(names=None): + meths = set(itertools.chain.from_iterable( + subclass.get_plot_methods() + for subclass in TraceAnalysisBase.get_analysis_classes().values() + )) + + if names is None: + return meths + else: + meths = { + get_obj_name(f): f + for f in meths + } + return { + f + for name in names + if (f := meths.get(name)) + } + def populate(key, temp_path): + (names, notebooks, *_) = key + plot_methods = get_plot_methods(names) + # We pre-generate all the plots, otherwise we would end up running # polars code in a multiprocessing subprocess created by forking # CPython, leading to deadlocks: # https://github.com/sphinx-doc/sphinx/issues/12201 hv.extension('bokeh') - plot_conf_path = Path(home, 'doc', 'plot_conf.yml') plot_conf = DocPlotConf.from_yaml_map(plot_conf_path) - plots = autodoc_pre_make_plots(plot_conf) + plots = autodoc_pre_make_plots(plot_conf, plot_methods) + plots = { + # Serialize by name so pickle does not raise an exception + # because of the wrappers with the updated __qualname__ and + # __module__. Otherwise, their name resolves to something else + # and it pickle does not allow that. + get_obj_name(k): v + for k, v in plots.items() + } with open(temp_path / 'plots.pickle', 'wb') as f: pickle.dump(plots, f) for _path in notebooks: + _path = Path(_path) + in_path = notebooks_in_base / _path out_path = temp_path / 'ipynb' / _path out_path.parent.mkdir(parents=True, exist_ok=True) @@ -244,6 +288,7 @@ def prepare(home, enable_plots, outdir): out_path.unlink() except FileNotFoundError: pass + logging.info(f'Refreshing notebook: {in_path}') subprocess.check_call([ 'jupyter', @@ -259,19 +304,30 @@ def prepare(home, enable_plots, outdir): import panel as pn import jupyterlab + plot_methods = { + get_obj_name(f): f + for f in get_plot_methods() + } dir_cache = DirCache('doc_plots', populate=populate) key = ( + sorted(plot_methods.keys()), + notebooks, hv.__version__, bokeh.__version__, pn.__version__, jupyterlab.__version__, + plot_conf_path.read_text(), ) cache_path = dir_cache.get_entry(key) + with open(cache_path / 'plots.pickle', 'rb') as f: - plots = pickle.load(f) + plots = { + plot_methods[name]: v + for name, v in pickle.load(f).items() + } for _path in notebooks: - shutil.copy2( + copy_file( cache_path / 'ipynb' / _path, Path(home, 'doc', 'workflows', 'ipynb') / _path, ) @@ -279,7 +335,7 @@ def prepare(home, enable_plots, outdir): else: plots = {} for _path in notebooks: - shutil.copy2( + copy_file( notebooks_in_base / _path, Path(home, 'doc', 'workflows', 'ipynb') / _path, ) diff --git a/doc/energy_analysis.rst b/doc/energy_analysis.rst index a65772d3f2de5f9deb57bd1df517cd114a950d48..fc8864a1c8a40a34ea0bfb796c0557d34ee73230 100644 --- a/doc/energy_analysis.rst +++ b/doc/energy_analysis.rst @@ -23,7 +23,7 @@ by EAS, and lets us do some energy analysis. providing this target does have an energy model. Its most noteworthy use is in our :meth:`EAS behavioural tests -`, as it lets us +`, as it lets us estimate the amount of energy consumed in an execution trace and compare this to an estimated energy-optimal placement. diff --git a/doc/workflows/automated_testing.rst b/doc/workflows/automated_testing.rst index 4bbd275c7fad26b642bc890e9c737437705793e2..1e68a00f44275c4c0c2a3fbbf12c8be3dfd7a9bb 100644 --- a/doc/workflows/automated_testing.rst +++ b/doc/workflows/automated_testing.rst @@ -225,7 +225,7 @@ Later on, the processing methods can be run from the data collected: processing code over the set of data acquired during an earlier session. A typical use case would be to look at the impact of changing a margin of a test like the ``energy_est_threshold_pct`` parameter of - :meth:`~lisa_tests.kernel..scheduler.eas_behaviour.EASBehaviour.test_task_placement` + :meth:`~lisa_tests.arm.kernel.scheduler.eas_behaviour.EASBehaviour.test_task_placement` Aggregating results ------------------- diff --git a/lisa/_cli_tools/lisa_plot.py b/lisa/_cli_tools/lisa_plot.py index d0716f15459a46201043b28b2771a83d0e8e2790..90ad3b8b9de689e7ce60ca380e89f0377cdf83cf 100755 --- a/lisa/_cli_tools/lisa_plot.py +++ b/lisa/_cli_tools/lisa_plot.py @@ -308,7 +308,7 @@ Available plots: events = sorted(events) print('Parsing trace events: {}'.format(', '.join(events))) - trace = Trace(args.trace, plat_info=plat_info, events=events, normalize_time=args.normalize_time, write_swap=True) + trace = Trace(args.trace, plat_info=plat_info, events=events, normalize_time=args.normalize_time) if args.window: window = args.window def clip(l, x, r): diff --git a/lisa/_doc/helpers.py b/lisa/_doc/helpers.py index 88f393ea5827147bc3e0597ccd3d4b240a8d9552..3656ef8d004297479cf5732f1cc37287c1f3fbac 100644 --- a/lisa/_doc/helpers.py +++ b/lisa/_doc/helpers.py @@ -1970,7 +1970,7 @@ class DocPlotConf(SimpleMultiSrcConf): KeyDesc('plots', 'Mapping of function qualnames to their settings', [Mapping], deepcopy_val=False), )) -def autodoc_pre_make_plots(conf): +def autodoc_pre_make_plots(conf, plot_methods): def spec_of_meth(conf, meth_name): plot_conf = conf['plots'] default_spec = plot_conf.get('default', {}) @@ -2037,11 +2037,6 @@ def autodoc_pre_make_plots(conf): print(f'Plot for {meth.__qualname__} generated in {m.delta}s') return rst_figure - plot_methods = set(itertools.chain.from_iterable( - subclass.get_plot_methods() - for subclass in TraceAnalysisBase.get_analysis_classes().values() - )) - preload_events(conf, plot_methods) plots = { meth: _make_plot(meth) @@ -2055,7 +2050,6 @@ def autodoc_process_analysis_plots(app, what, name, obj, options, lines, plots): if what != 'method': return - name = get_obj_name(obj) try: rst_figure = plots[name] except KeyError: @@ -2066,20 +2060,23 @@ def autodoc_process_analysis_plots(app, what, name, obj, options, lines, plots): lines[:0] = rst_figure.splitlines() -def ana_invocation(obj): - methods = { - func: subclass - for subclass in TraceAnalysisBase.get_analysis_classes().values() - for name, func in inspect.getmembers(subclass, callable) - } +def ana_invocation(obj, name=None): + if callable(obj): + if name: + try: + cls = _get_parent_namespace(name) + except ModuleNotFoundError: + raise ValueError(f'Cannot compute the parent namespace of: {obj}') + else: + cls = get_parent_namespace(obj) - try: - cls = methods[obj] - except (KeyError, TypeError): - raise ValueError(f'Could not find method {obj}') + if cls and (not inspect.ismodule(cls)) and issubclass(cls, AnalysisHelpers): + on_trace_name = f'trace.ana.{cls.name}.{obj.__name__}' + return f"*Called on* :class:`~lisa.trace.Trace` *instances as* ``{on_trace_name}()``" + else: + raise ValueError(f'{obj} is not a method of an analysis class') else: - on_trace_name = f'trace.ana.{cls.name}.{obj.__name__}' - return f"*Called on* :class:`~lisa.trace.Trace` *instances as* ``{on_trace_name}()``" + raise ValueError(f'{obj} is not a method') def autodoc_process_analysis_methods(app, what, name, obj, options, lines): @@ -2087,7 +2084,7 @@ def autodoc_process_analysis_methods(app, what, name, obj, options, lines): Append the list of required trace events """ try: - extra_doc = ana_invocation(obj) + extra_doc = ana_invocation(obj, name) except ValueError: pass else: diff --git a/lisa/analysis/base.py b/lisa/analysis/base.py index fe40cedb380d59229ab94610f4905feeea2c5db1..b8b340dd9a312aa853a6175f3b55a9e0008050e7 100644 --- a/lisa/analysis/base.py +++ b/lisa/analysis/base.py @@ -46,7 +46,7 @@ import polars as pl import pandas as pd -from lisa.utils import Loggable, deprecate, get_doc_url, get_short_doc, get_subclasses, guess_format, is_running_ipython, measure_time, memoized, update_wrapper_doc, _import_all_submodules, optional_kwargs +from lisa.utils import Loggable, deprecate, get_doc_url, get_short_doc, get_subclasses, guess_format, is_running_ipython, measure_time, memoized, update_wrapper_doc, _import_all_submodules, optional_kwargs, get_parent_namespace from lisa.trace import _CacheDataDesc from lisa.notebook import _hv_fig_to_pane, _hv_link_dataframes, _hv_has_options, axis_cursor_delta, axis_link_dataframes, make_figure from lisa.datautils import _df_to, _pandas_cleanup_df @@ -54,6 +54,22 @@ from lisa.datautils import _df_to, _pandas_cleanup_df # Ensure hv.extension() is called import lisa.notebook +# Make sure we associate each plot method with a single wrapped object, so that +# the resulting wrapper can be used as a key in dictionaries. +@functools.lru_cache(maxsize=None, typed=True) +def _wrap_plot_method(cls, f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs) + + # Wrap the method so that we record the actual class they were + # looked up on, rather than the base class they happen to be + # defined in. + wrapper.__qualname__ = f'{cls.__qualname__}.{f.__name__}' + wrapper.__module__ = cls.__module__ + return wrapper + + class AnalysisHelpers(Loggable, abc.ABC): """ @@ -393,7 +409,7 @@ class AnalysisHelpers(Loggable, abc.ABC): @classmethod def _get_doc_methods(cls, prefix, instance=None, ignored=None): - ignored = set(ignored) or set() + ignored = set(ignored or []) obj = instance if instance is not None else cls def predicate(f): @@ -410,7 +426,7 @@ class AnalysisHelpers(Loggable, abc.ABC): ) return [ - f + _wrap_plot_method(cls, f) for name, f in inspect.getmembers(obj, predicate=predicate) if f not in ignored ] @@ -1368,20 +1384,15 @@ class TraceAnalysisBase(AnalysisHelpers): it and call the resulting bound method with ``meth_kwargs`` extra keyword arguments. """ - for subcls in cls.get_analysis_classes().values(): - for name, f in inspect.getmembers(subcls): - if f is meth: - break - else: - continue - break + classes = cls.get_analysis_classes().values() + subcls = get_parent_namespace(meth) + if subcls in classes: + # Create an analysis instance and bind the method to it + analysis = subcls(trace=trace) + meth = meth.__get__(analysis, type(analysis)) + + return meth(**meth_kwargs) else: - raise ValueError(f'{meth.__qualname__} is not a method of any subclasses of {cls.__qualname__}') - - # Create an analysis instance and bind the method to it - analysis = subcls(trace=trace) - meth = meth.__get__(analysis, type(analysis)) - - return meth(**meth_kwargs) + raise ValueError(f'Parent class of {meth} is not a registered analysis') # vim :set tabstop=4 shiftwidth=4 expandtab textwidth=80