diff --git a/.gitignore b/.gitignore index d5bbadf532f3ac88cbb7940c2f6b0273618c4267..7185698e24e37f9851c4df5c86dd53b4910458cc 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,6 @@ server.url /vagrant /tools/wa_user_directory/dependencies /src/buildroot -_build custom_requirements.txt devmode_extra_requirements.txt extra_requirements.txt diff --git a/doc/.gitignore b/doc/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..604f5a62985f157b43a99a079e79972293dff02b --- /dev/null +++ b/doc/.gitignore @@ -0,0 +1,3 @@ +_build +sections/api/generated/ + diff --git a/doc/conf.py b/doc/conf.py index a8d81f2eab3e6f6630bfefa24d75bb2046429664..a7662ca7d3aa675cd5c07e1edc070bc58f301293 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -25,9 +25,18 @@ import importlib import types import contextlib from pathlib import Path +import importlib +from operator import attrgetter +import pickle +import shutil from sphinx.domains.python import PythonDomain + +# Signal to lisa.utils.is_running_sphinx() that we are indeed running under +# sphinx before we import anything +os.environ['_LISA_DOC_SPHINX_RUNNING'] = '1' + # This shouldn't be needed, as using a virtualenv + setup.py should set up the # sys.path correctly. However that seems to be half broken on ReadTheDocs, so # manually set it here @@ -35,34 +44,35 @@ sys.path.insert(0, os.path.abspath('../')) # Import our packages after modifying sys.path import lisa -from lisa.utils import import_all_submodules, sphinx_nitpick_ignore +from lisa.utils import sphinx_nitpick_ignore, setup_logging, get_obj_name, DirCache from lisa._doc.helpers import ( autodoc_process_test_method, autodoc_process_analysis_events, autodoc_process_analysis_plots, autodoc_process_analysis_methods, - autodoc_skip_member_handler, - DocPlotConf, get_xref_type, autodoc_pre_make_plots + autodoc_skip_member_handler, autodoc_process_inherited_members, + autodoc_process_inherited_signature, autodoc_process_bases_handler, + DocPlotConf, autodoc_pre_make_plots, + intersphinx_warn_missing_reference_handler, ) -# Do not rely on LISA_HOME as it may not be set and will default to current -# folder, which is not what we want here. -HOME = Path(__file__).parent.parent.resolve() +import devlib + +def prepare(home, enable_plots): + # This ugly hack is required because by default TestCase.__module__ is + # equal to 'case', so sphinx replaces all of our TestCase uses to + # unittest.case.TestCase, which doesn't exist in the doc. + for name, obj in vars(unittest).items(): + try: + m = obj.__module__ + obj.__module__ = 'unittest' if m == 'unittest.case' else m + except Exception: + pass -# This ugly hack is required because by default TestCase.__module__ is -# equal to 'case', so sphinx replaces all of our TestCase uses to -# unittest.case.TestCase, which doesn't exist in the doc. -for name, obj in vars(unittest).items(): - try: - m = obj.__module__ - obj.__module__ = 'unittest' if m == 'unittest.case' else m - except Exception: - pass -def prepare(): def run(cmd, **kwargs): return subprocess.run( cmd, - cwd=HOME, + cwd=home, **kwargs, ) @@ -86,7 +96,7 @@ def prepare(): # If LISA_HOME is set, sourcing the script won't work source_env.pop('LISA_HOME', None) - init_env = HOME / 'init_env' + init_env = home / 'init_env' script = textwrap.dedent( f""" source {init_env} >&2 && @@ -95,18 +105,85 @@ def prepare(): ) out = subprocess.check_output( ['bash', '-c', script], - cwd=HOME, + cwd=home, # Reset the environment, including LISA_HOME to allow sourcing without # any issue env=source_env, ) os.environ.update(json.loads(out)) -# Only the top-level import has the "builtins" __name__. This prevents -# re-running prepare() when conf.py is imported by the processes spawned by -# sphinx -if __name__ == 'builtins': - prepare() + # Re-run the notebook to ensure the version of bokeh used is the same as + # the one that will be added via html_js_files. Otherwise, the plot display + # will be broken. + notebooks_in_base = Path(home, 'ipynb') + notebooks = [ + 'examples/analysis_plots.ipynb', + ] + if enable_plots: + def populate(key, temp_path): + # We pre-generate all the plots, otherwise we would end up running + # polars code in a multiprocessing subprocess created by forking + # CPython, leading to deadlocks: + # https://github.com/sphinx-doc/sphinx/issues/12201 + hv.extension('bokeh') + + plot_conf_path = Path(home, 'doc', 'plot_conf.yml') + plot_conf = DocPlotConf.from_yaml_map(plot_conf_path) + plots = autodoc_pre_make_plots(plot_conf) + with open(temp_path / 'plots.pickle', 'wb') as f: + pickle.dump(plots, f) + + for _path in notebooks: + in_path = notebooks_in_base / _path + out_path = temp_path / 'ipynb' / _path + out_path.parent.mkdir(parents=True, exist_ok=True) + try: + out_path.unlink() + except FileNotFoundError: + pass + logging.info(f'Refreshing notebook: {in_path}') + subprocess.check_call([ + 'jupyter', + 'nbconvert', + in_path, + '--execute', + '--to=notebook', + f'--output={out_path}' + ]) + + import holoviews as hv + import bokeh + import panel as pn + import jupyterlab + + dir_cache = DirCache('doc_plots', populate=populate) + key = ( + hv.__version__, + bokeh.__version__, + pn.__version__, + jupyterlab.__version__, + ) + cache_path = dir_cache.get_entry(key) + with open(cache_path / 'plots.pickle', 'rb') as f: + plots = pickle.load(f) + + for _path in notebooks: + shutil.copy2( + cache_path / 'ipynb' / _path, + Path(home, 'doc', 'workflows', 'ipynb') / _path, + ) + + else: + plots = {} + for _path in notebooks: + shutil.copy2( + notebooks_in_base / _path, + Path(home, 'doc', 'workflows', 'ipynb') / _path, + ) + + return plots + + # -- General configuration ------------------------------------------------ @@ -117,6 +194,7 @@ if __name__ == 'builtins': # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ + 'lisa._doc.helpers', 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', @@ -129,22 +207,17 @@ extensions = [ 'nbsphinx', ] -# Fix for the broken flyout ReadTheDocs menu as recommended here: -# https://github.com/readthedocs/sphinx_rtd_theme/issues/1452#issuecomment-1490504991 -# https://github.com/readthedocs/readthedocs.org/issues/10242 -# https://github.com/readthedocs/sphinx_rtd_theme/issues/1452 -# https://github.com/readthedocs/sphinx_rtd_theme/pull/1448 RTD = (os.getenv('READTHEDOCS') == 'True') if RTD: - extensions.append( - "sphinxcontrib.jquery" - ) + pass # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = { + '.rst': 'restructuredtext', +} # The encoding of source files. #source_encoding = 'utf-8-sig' @@ -154,7 +227,7 @@ master_doc = 'index' # General information about the project. project = 'LISA' -copyright = '2017, ARM-Software' +copyright = '2024, ARM-Software' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -206,7 +279,7 @@ pygments_style = 'sphinx' # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = 'pydata_sphinx_theme' # Allow interactive bokeh plots in the documentation @@ -221,7 +294,12 @@ else: # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} + +# For pydata theme +html_theme_options = { + # Increase show_toc_level value to get API listings in sidebar + "show_toc_level": 2, +} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] @@ -397,6 +475,13 @@ autoclass_content = 'both' autodoc_member_order = 'bysource' autodoc_default_options = { + # Show the members of the documented entity (e.g. class or module) + 'members': None, + # autodoc_process_inherited_members() will replace docstrings for each + # inherited member instance with a stub that links to the definition in the + # base class. That sidesteps the issue of the docstring of the inherited + # member not being valid reST. + 'inherited-members': None, # Show parent class 'show-inheritance': None, # Show members even if they don't have docstrings @@ -413,6 +498,7 @@ autodoc_default_options = { '__init__', # Uninteresting + '__doc__', '__weakref__', '__module__', '__abstractmethods__', @@ -420,62 +506,17 @@ autodoc_default_options = { '__eq__', '__str__', '__repr__', - '__iter__', - '__len__', '__dict__', + '__annotations__', + '__instance_dir__', ]) } autodoc_inherit_docstrings = True -ignored_refs = { - # They don't have a separate doc yet - r'lisa_tests.*', - - # gi.repository is strangely laid out, and the module in which Variant - # (claims) to actually be defined in is not actually importable it seems - r'gi\..*', - - # Devlib does not use autodoc (for now) and does not use module.qualname - # names, which makes all xref to it fail - r'devlib.*', - r'docutils\.parsers.*', - r'ipywidgets.*', - - # Since trappy is not always installed, just hardcode the references we - # have since there wont be more in the future. - r'trappy.*', - - # All private "things": either having a ._ somewhere in their full name or - # starting with an underscore - r'(.*\._.*|_.*)', - - # Various LISA classes that cannot be crossed referenced successfully but - # that cannot be fixed because of Sphinx limitations and external - # constraints on names. - r'ITEM_CLS', - - # Python <= 3.8 has a formatting issue in typing.Union[..., None] that - # makes it appear as typing.Union[..., NoneType], leading to a broken - # reference since the intersphinx inventory of the stdlib does not provide - # any link for NoneType. - r'NoneType', - - # Sphinx currently fails at finding the target for references like - # :class:`typing.List[str]` since it does not seem to have specific support - # for the bracketed syntax in that role. - r'typing.*', - - - # Polars intersphinx inventory is incomplete: - # https://github.com/pola-rs/polars/issues/7027 - # https://docs.pola.rs/py-polars/html/objects.inv - r'polars.*', +non_ignored_refs = { + r'lisa\..*', } -ignored_refs.update( - re.escape(f'{x.__module__}.{x.__qualname__}') - for x in sphinx_nitpick_ignore() -) -ignored_refs = set(map(re.compile, ignored_refs)) +non_ignored_refs = set(map(re.compile, non_ignored_refs)) # Workaround for: https://github.com/jupyter-widgets/ipywidgets/issues/3930 @@ -485,52 +526,58 @@ suppress_warnings = [ ] -class CustomPythonDomain(PythonDomain): - def find_obj(self, env, modname, classname, name, type, searchmode=0): - refs = super().find_obj(env, modname, classname, name, type, searchmode) - if len(refs) == 1: - return refs - elif any( - regex.match(name) - for regex in ignored_refs - ): - refs = super().find_obj(env, modname, classname, 'lisa._doc.helpers.PlaceHolderRef', 'class', 0) - assert refs - return refs - else: - return refs +# Workaround for: https://github.com/sphinx-doc/sphinx/issues/11279 +viewcode_follow_imported_members = False def setup(app): - app.add_domain(CustomPythonDomain, override=True) + setup_logging(level=logging.INFO) - # We pre-generate all the plots, otherwise we would end up running polars - # code in a multiprocessing subprocess created by forking CPython, leading - # to deadlocks: - # https://github.com/sphinx-doc/sphinx/issues/12201 - if int(os.environ.get('LISA_DOC_BUILD_PLOT', '1')): - import holoviews as hv - hv.extension('bokeh') + # Do not rely on LISA_HOME as it may not be set and will default to current + # folder, which is not what we want here. + home = Path(__file__).parent.parent.resolve() - plot_conf_path = os.path.join(HOME, 'doc', 'plot_conf.yml') - plot_conf = DocPlotConf.from_yaml_map(plot_conf_path) - plots = autodoc_pre_make_plots(plot_conf) - else: - plots = {} + enable_plots = bool(int(os.environ.get('LISA_DOC_BUILD_PLOT', '1'))) + + plots = prepare( + home=home, + enable_plots=enable_plots, + ) _autodoc_process_analysis_plots_handler = functools.partial( autodoc_process_analysis_plots, - plots=plots, + plots={ + get_obj_name(x): fig + for x, fig in plots.items() + } ) _autodoc_skip_member_handler = functools.partial( autodoc_skip_member_handler, default_exclude_members=autodoc_default_options.get('exclude-members') ) + _intersphinx_warn_missing_reference_handler = functools.partial( + intersphinx_warn_missing_reference_handler, + non_ignored_refs=non_ignored_refs, + ) + # Use a custom class so that a confused user could easily find it back in + # the sources. + class ExecState: + def __init__(self, plots): + self.plots = plots + + app.connect('warn-missing-reference', _intersphinx_warn_missing_reference_handler, priority=0) app.connect('autodoc-process-docstring', autodoc_process_test_method) - app.connect('autodoc-process-docstring', autodoc_process_analysis_events) app.connect('autodoc-process-docstring', autodoc_process_analysis_methods) - app.connect('autodoc-skip-member', _autodoc_skip_member_handler) + app.connect('autodoc-process-docstring', autodoc_process_analysis_events) app.connect('autodoc-process-docstring', _autodoc_process_analysis_plots_handler) + # Applied at the end to ensure we can just fully replace the docstring content whole + app.connect('autodoc-process-docstring', autodoc_process_inherited_members) + app.connect('autodoc-process-signature', autodoc_process_inherited_signature) + app.connect('autodoc-process-bases', autodoc_process_bases_handler) + + app.connect('autodoc-skip-member', _autodoc_skip_member_handler) + app.connect('lisa-exec-state', lambda app: ExecState(plots=plots)) + # vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab: diff --git a/doc/contributors_guide.rst b/doc/contributors_guide.rst index df3fc7a811c8fdb77aa5191ab79f54261b7f9ff0..9bd472221ca342d0f63df091a20e396e2961f8b2 100644 --- a/doc/contributors_guide.rst +++ b/doc/contributors_guide.rst @@ -30,6 +30,11 @@ fashion to the typical GitHub workflow: "Change branches" link next to "From XXX into main". Click this link and select "tooling/lisa" in the "Target branch" project drop down. ⚠️ +Merge requests that are primarily constituted of style reformatting will be +closed without comment unless the matter was discussed previously with the +maintainer. Note that such discussion will be expected to be carried with +arguments. Stating opinions or arguments of authority and such will lead to the +end of the discussion. How to reach us =============== @@ -52,6 +57,9 @@ code. Documentation ============= +Docstring style ++++++++++++++++ + Docstring documentation should follow the ReST/Sphinx style. Classes, class attributes and public methods must be documented. If deemed necessary, private methods can be documented as well. @@ -93,6 +101,18 @@ introduced by ``**Example**::`` and located: * In the class docstring if they involve multiple methods of the class. * In the method/function otherwise. +How to build +++++++++++++ + +- Install ``doc`` optional dependencies of ``lisa`` package (``lisa-install`` + does that by default) +- Run: + + .. code:: shell + + lisa-doc-build + +- Find the HTML in ``doc/_build/html`` Commits ======= @@ -184,18 +204,9 @@ self-tests. It's a nice way to showcase that your code works, and also how it works. On top of that, it makes sure that later changes won't break it. It's possible to write tests that require a live target - see -:meth:`~tests.utils.create_local_target`. However, as these tests are meant to -be run by Github Actions as part of our pull-request validation, they have to -be designed to work on a target with limited privilege. - -API -+++ - -Utilities ---------- - -.. automodule:: tests.utils - :members: +``create_local_target()``. However, as these tests are meant to be run by the +CI as part of our pull-request validation, they have to be designed to work on +a target with limited privilege. Updating binary tools @@ -204,5 +215,5 @@ Updating binary tools LISA comes with a number of prebuilt static binaries in ``lisa/_assets/binaries``. They are all built according to recipes in ``tools/recipes/``, and can be re-built and installed using e.g.: -``lisa-build-asset trace-cmd --native-build``. See ``lisa-build-asset --help`` -for more options. +``lisa-build-asset trace-cmd``. See ``lisa-build-asset --help`` for more +options. diff --git a/doc/energy_analysis.rst b/doc/energy_analysis.rst index c1fbe1368f0344e85a6dc6506883b27835d35578..eb3e5e97af3d07d779c5439895582c8f8413992b 100644 --- a/doc/energy_analysis.rst +++ b/doc/energy_analysis.rst @@ -203,14 +203,5 @@ commands: API === -Energy model -++++++++++++ - -.. automodule:: lisa.energy_model - :members: - -Energy meters -+++++++++++++ - -.. automodule:: lisa.energy_meter - :members: +* Energy model: :mod:`lisa.energy_model` +* Energy meters: :mod:`lisa.energy_meter` diff --git a/doc/exekall b/doc/exekall index aa5ea274a902b52bf7d8ece77cdf7bc28d34d75b..aa7c8b497f8b8e6208f27adb097d5bd2df0d0647 120000 --- a/doc/exekall +++ b/doc/exekall @@ -1 +1 @@ -../tools/exekall/doc/ \ No newline at end of file +../tools/exekall/doc \ No newline at end of file diff --git a/doc/getting_started.rst b/doc/getting_started.rst new file mode 100644 index 0000000000000000000000000000000000000000..daf5e1d44f4086fd854277c4be83c81b631d7cf4 --- /dev/null +++ b/doc/getting_started.rst @@ -0,0 +1,11 @@ +.. _getting-started-page: + +Getting Started +=============== + +.. toctree:: + :maxdepth: 2 + + ../../setup + + diff --git a/doc/images/overview.png b/doc/images/overview.png deleted file mode 100644 index 5ab1351488149d9524afdadf09d0ea83891120b2..0000000000000000000000000000000000000000 Binary files a/doc/images/overview.png and /dev/null differ diff --git a/doc/index.rst b/doc/index.rst index 32b9a9dd0c3ef18a789dd6831692c05f8cff54b5..68dd94ae88b9bdae8abe569be08ec185c09dbf5f 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -3,76 +3,36 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -.. module:: lisa - -LISA Documentation -================== - -LISA - "Linux Integrated System Analysis" is a toolkit for interactive analysis -and automated regression testing of Linux kernel behaviour. - -- See the README on the project's `GitLab home page`__ for an overview. -- Once you have LISA running, take a look at the tutorial and example notebooks - included with the installation. - -__ https://gitlab.arm.com/tooling/lisa - -Contributions to LISA and its documentation are very welcome, and handled -via GitLab merge requests. - -.. _Readme: - -Contents: - .. toctree:: - :maxdepth: 2 - - ────── Getting started ────── + :maxdepth: 2 + :hidden: - overview - setup - workflows/index + sections/guides/index + sections/tools/index + sections/api/index + sections/changes/index - ──── Tools documentation ──── - - lisa_shell/man/man - bisector/index - exekall/index - - ───── API documentation ───── - - target - workloads - kernel_tests - trace_analysis - energy_analysis - misc_utilities - stat_comparison - - ──────── Changes ──────── - - changes - contributors_guide - - -Indices and tables +LISA Documentation ================== -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - - -Building this documentation -============================== +Welcome to LISA documentation. LISA - "Linux Integrated System Analysis" is a +toolkit for interactive analysis and automated regression testing of Linux +kernel behaviour. LISA's goal is to help Linux kernel developers measure the +impact of modifications in core parts of the kernel. The focus is on the +scheduler (e.g. EAS), power management and thermal frameworks. However LISA is +generic and can be used for other purposes. -- Install ``doc`` optional dependencies of ``lisa`` package (``lisa-install`` - does that by default) -- Run: +LISA has a "host"/"target" model. LISA itself runs on a host machine, and uses +the :mod`devlib` package to interact with the target via SSH or ADB. LISA is +flexible with regard to the target OS; its only expectation is a Linux +kernel-based system. Android, GNU/Linux and busybox style systems have all been +used. - .. code:: shell +LISA provides features to describe workloads (notably using rt-app) and run +them on targets. It can collect trace files from the target OS (e.g. ftrace +traces) and parse them. These traces can then be parsed and analysed in order +to examine detailed target behaviour during the workload's execution. - source init_env - lisa-doc-build +See :ref:`getting-started-page` for setup instruction. -- Find the HTML in ``doc/_build/html`` +See https://gitlab.arm.com/tooling/lisa for the source repository. diff --git a/doc/kernel_tests.rst b/doc/kernel_tests.rst index f49dc33716db54439e01cf449eb32fcd2d473d66..c1062abbe9e4e075106455384773ae8f5a571c55 100644 --- a/doc/kernel_tests.rst +++ b/doc/kernel_tests.rst @@ -184,8 +184,4 @@ It can be executed using: API === -Base classes -++++++++++++ - -.. automodule:: lisa.tests.base - :members: +See :mod:`lisa.tests.base`. diff --git a/doc/man1/bisector.1 b/doc/man1/bisector.1 index fb07430984fe3eea817bdd25de75bfe52385548e..ae8b2cf55ad6557a6f58f6ac47ba86eb82edeeea 100644 --- a/doc/man1/bisector.1 +++ b/doc/man1/bisector.1 @@ -68,8 +68,7 @@ reporting, and additional run behaviors. .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: bisector [\-h] [\-\-cli\-options CLI_OPTIONS] {step\-help,run,report,edit,monitor\-server,monitor} ... @@ -101,16 +100,14 @@ optional arguments: subcommands: {step\-help,run,report,edit,monitor\-server,monitor} -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS bisector run .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: bisector run [\-h] [\-\-cli\-options CLI_OPTIONS] [\-o OPTION] [\-\-debug] [\-\-steps STEPS] [\-\-allowed\-bad ALLOWED_BAD] [\-\-skip SKIP] [\-\-only ONLY] [\-\-git\-clean] [\-\-inline CLASS NAME] @@ -199,16 +196,14 @@ optional arguments: installed. \-\-no\-dbus Disable DBus even when pydbus module is available. -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS bisector report .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: bisector report [\-h] [\-\-cli\-options CLI_OPTIONS] [\-o OPTION] [\-\-debug] [\-\-steps STEPS] [\-\-allowed\-bad ALLOWED_BAD] [\-\-skip SKIP] [\-\-only ONLY] [\-\-export EXPORT] [\-\-cache] @@ -259,8 +254,7 @@ optional arguments: file is modified. This is mostly useful when working with big YAML files that are long to load. -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS bisector step\-help @@ -269,8 +263,7 @@ Steps\-specific options to be used with \fBbisector run \-o\fP and \fBbisector r .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX LISA\-test (test) \-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\- @@ -788,16 +781,14 @@ yield (yield) -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS bisector monitor .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: bisector monitor [\-h] [\-\-cli\-options CLI_OPTIONS] [\-\-status] [\-\-prop PROP] [\-\-list] [\-\-pause | \-\-stop | \-\-continue | \-\-kill | \-\-log | \-\-report ... | \-\-notif enable/disable PROPERTY] @@ -834,16 +825,14 @@ optional arguments: given property changes. \(aqall\(aq will select all properties. -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS bisector monitor\-server .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: bisector monitor\-server [\-h] [\-\-cli\-options CLI_OPTIONS] [\-\-notif enable/disable PROPERTY] @@ -865,16 +854,14 @@ optional arguments: given property changes. \(aqall\(aq will select all properties. -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS bisector edit .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: bisector edit [\-h] [\-o OPTION] [\-\-debug] [\-\-steps STEPS] report Modify the properties of the steps in an existing report. @@ -898,8 +885,7 @@ optional arguments: classes if necessary. It is otherwise ignored and \-\-option must be used to edit the report. -.ft P -.fi +.EE .UNINDENT .UNINDENT .SH CONFIGURATION @@ -913,8 +899,7 @@ The YAML file is structured as following: .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX # Top\-level \(dqsteps\(dq key is important as the same file can be used to host other # information. steps: @@ -952,8 +937,7 @@ steps: use\-systemd\-run: true timeout: 3600 cmd: lisa\-test \(aqOneSmallTask*\(aq -.ft P -.fi +.EE .UNINDENT .UNINDENT .sp @@ -983,8 +967,7 @@ A typical flow of \fBbisector\fP looks like that: .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX # Run the steps and generate a report. # systemd\-run will be used for all steps by using \(dq\-o\(dq without specifying a step name or category. @@ -1037,8 +1020,7 @@ bisector report bisector.report.yml.gz \-oexport\-logs=logs # \-o .XXXX=YYYY # This command will only show iteration #2 for the eas_behaviour step bisector report bisector.report.yml.gz \-oeas_behaviour.iterations=2 -.ft P -.fi +.EE .UNINDENT .UNINDENT .SH REFERENCES diff --git a/doc/man1/exekall.1 b/doc/man1/exekall.1 index f20b9cb4ec6a2a37a1a21b5cf5abbcf245ae69b0..dbfb2139fa3936834b70d3ad4986ea3f260b32f5 100644 --- a/doc/man1/exekall.1 +++ b/doc/man1/exekall.1 @@ -39,8 +39,7 @@ discovered from Python PEP 484 parameter and return value annotations. .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: exekall [\-h] [\-\-debug] {run,merge,compare,show} ... Test runner @@ -57,16 +56,14 @@ optional arguments: subcommands: {run,merge,compare,show} -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS exekall run .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: exekall run [\-h] [\-\-dependency DEPENDENCY] [\-s ID_PATTERN] [\-\-list] [\-n N] [\-\-load\-db LOAD_DB] [\-\-load\-type TYPE_PATTERN] [\-\-replay REPLAY | \-\-load\-uuid LOAD_UUID] @@ -180,16 +177,14 @@ advanced arguments: \-\-adaptor ADAPTOR Adaptor to use from the customization module, if there is more than one to choose from. -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS exekall compare .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: exekall compare [\-h] db db Compare two DBs produced by exekall run. @@ -208,16 +203,14 @@ positional arguments: optional arguments: \-h, \-\-help show this help message and exit -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS exekall show .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: exekall show [\-h] db Show the content of a ValueDB created by exekall \(ga\(garun\(ga\(ga @@ -236,16 +229,14 @@ positional arguments: optional arguments: \-h, \-\-help show this help message and exit -.ft P -.fi +.EE .UNINDENT .UNINDENT .SS exekall merge .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX usage: exekall merge [\-h] \-o OUTPUT [\-\-copy] artifact_dirs [artifact_dirs ...] Merge artifact directories of \(dqexekall run\(dq executions. @@ -269,8 +260,7 @@ optional arguments: contains subexpression\(aqs values. \-\-copy Force copying files, instead of using hardlinks. -.ft P -.fi +.EE .UNINDENT .UNINDENT .SH EXECUTING EXPRESSIONS @@ -352,13 +342,11 @@ same object within any given expression. Executing the IDs \fBg:f(p2=g)\fP and .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX x = g() res1 = f(x, p2=x) res2 = h(x) -.ft P -.fi +.EE .UNINDENT .UNINDENT .sp @@ -454,8 +442,7 @@ would be: .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX import abc class BaseConf(abc.ABC): @abc.abstractmethod @@ -493,8 +480,7 @@ def process1(x:Stage1) \-> Stage2: def process2(x:Stage1, conf:BaseConf, has_default_val=33) \-> Stage2: return Stage2(x.conf.x == 0) -.ft P -.fi +.EE .UNINDENT .UNINDENT .sp @@ -510,8 +496,7 @@ Haskell\(aqs associated type families is supported: .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX import typing class Base: @@ -529,8 +514,7 @@ class Derived1(Base): class Derived2(Base): X = \(aqhello\(aq ASSOCIATED_CLS = type(X) -.ft P -.fi +.EE .UNINDENT .UNINDENT .sp diff --git a/doc/man1/lisa.1 b/doc/man1/lisa.1 index 335746a7181214b383632068540211f7fa064ea8..f5fd59c226fe5feb43d8ff62404dd54cf38a632b 100644 --- a/doc/man1/lisa.1 +++ b/doc/man1/lisa.1 @@ -1,3 +1,4 @@ +'\" t .\" Man page generated from reStructuredText. . . @@ -44,11 +45,9 @@ In order to use the shell, source the script: .INDENT 0.0 .INDENT 3.5 .sp -.nf -.ft C +.EX source init_env -.ft P -.fi +.EE .UNINDENT .UNINDENT .sp @@ -104,9 +103,8 @@ subtree. All other options are passed to \fIgit log\fP\&. .INDENT 3.5 Usage: \fBlisa\-jupyter CMD [NETIF [PORT]]\fP .TS -center; -|l|l|. -_ +box center; +l|l. T{ \fICMD\fP T} T{ @@ -125,7 +123,6 @@ T{ T} T{ the tcp port for the server (default: 8888) T} -_ .TE .UNINDENT .UNINDENT @@ -184,9 +181,8 @@ interpolated. .sp The following environment variables are available: .TS -center; -|l|l|l|. -_ +box center; +l|l|l. T{ EXEKALL_ARTIFACT_ROOT T} T{ @@ -286,7 +282,6 @@ Path to venv to be used by lisa\-install T} T{ \&.lisa\-venv\- T} -_ .TE .sp If an environment variable is defined prior to sourcing \fBinit_env\fP, it will diff --git a/doc/misc_utilities.rst b/doc/misc_utilities.rst deleted file mode 100644 index 18ae53ecb64b51b1ae75f8549c4acb329f92f4c7..0000000000000000000000000000000000000000 --- a/doc/misc_utilities.rst +++ /dev/null @@ -1,87 +0,0 @@ -*********************** -Miscellaneous utilities -*********************** - -Configuration management -======================== - -Configuration files are managed by subclasses of -:class:`lisa.conf.MultiSrcConf`. It allows loading from a YAML file (not to be -confused with serializing the instance). - - -.. automodule:: lisa.conf - :members: - -Regression testing -================== - -.. automodule:: lisa.regression - :members: - -Utilities -========= - -.. automodule:: lisa.utils - :members: - -Kernel modules -============== - -.. automodule:: lisa._kmod - :members: - -Generic types -============= - -.. automodule:: lisa._generic - :members: - -Typeclasses -=========== - -.. automodule:: lisa._typeclass - :members: - -Monads -====== - -.. automodule:: lisa.monad - :members: - -Fuzzing -======= - -.. automodule:: lisa.fuzz - :members: - -Dataframe and Series handling utilities -======================================= - -.. automodule:: lisa.datautils - :members: - -Interactive notebooks utilities -=============================== - -.. automodule:: lisa.notebook - :members: - -PELT signals simulations -======================== - -.. automodule:: lisa.pelt - :members: - -Sphinx documentation -==================== - -.. automodule:: lisa._doc.helpers - :members: - - -Version -======= - -.. automodule:: lisa.version - :members: diff --git a/doc/overview.rst b/doc/overview.rst deleted file mode 100644 index bf07c4d5eca2b3528aff0c43f97e0037522fd50b..0000000000000000000000000000000000000000 --- a/doc/overview.rst +++ /dev/null @@ -1,23 +0,0 @@ -******** -Overview -******** - -The LISA project provides a toolkit that supports regression testing and -interactive analysis of Linux kernel behavior. LISA stands for Linux -Integrated/Interactive System Analysis. LISA's goal is to help Linux kernel -developers measure the impact of modifications in core parts of the -kernel. The focus is on the scheduler (e.g. EAS), power management and thermal -frameworks. However LISA is generic and can be used for other purposes. - -LISA has a "host"/"target" model. LISA itself runs on a host machine, and uses -the devlib toolkit to interact with the target via SSH, ADB or telnet. LISA is -flexible with regard to the target OS; its only expectation is a Linux -kernel-based system. Android, GNU/Linux and busybox style systems have all been -used. - -LISA provides features to describe workloads (notably using rt-app) and run -them on targets. It can collect trace files from the target OS (e.g. systrace -and ftrace traces) and parse them. These traces can then be parsed and analysed -in order to examine detailed target behaviour during the workload's execution. - -.. image:: images/overview.png diff --git a/doc/sections/api/index.rst b/doc/sections/api/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..ac623c4265df0b1597b8f2f4c45d98f05c0f13c9 --- /dev/null +++ b/doc/sections/api/index.rst @@ -0,0 +1,9 @@ +************* +API Reference +************* + +This the reference documentation of all the APIs shipped in the ``lisa`` +repository. The main package is :mod:`lisa`. + +.. toctree:: + lisa diff --git a/doc/sections/api/lisa.rst b/doc/sections/api/lisa.rst new file mode 100644 index 0000000000000000000000000000000000000000..bf29bb84fc24047df596cc141ff6f79ac59acaf2 --- /dev/null +++ b/doc/sections/api/lisa.rst @@ -0,0 +1,5 @@ +**** +lisa +**** + +.. module-listing:: lisa diff --git a/doc/sections/changes/api_stability.rst b/doc/sections/changes/api_stability.rst new file mode 100644 index 0000000000000000000000000000000000000000..c5a109cc5d877224e717757a704e70ee9879661d --- /dev/null +++ b/doc/sections/changes/api_stability.rst @@ -0,0 +1,37 @@ +.. _api-stability-page: + +************** +APIs stability +************** + +APIs inside LISA are split between private and public ones: + +* Public APIs can be expected to stay stable, or undergo a deprecation cycle + where they will trigger an :exc:`DeprecationWarning` and be documented as + such before being removed. Exceptions to that rule are documented explicitly + as such. + +* Private APIs can be changed at all points. + +Public APIs consist of classes and functions with names not starting with an +underscore, defined in modules with a name not starting with an underscore (or +any of its parent modules or containing class). + +Everything else is private. + +.. note:: User subclassing is usually more at risk of breakage than other uses + of the APIs. Behaviors are usually not restricted to a single method, which + means the subclass would have to override multiple of them to preserve + important API laws. This is unfortunately not future-proof, as new versions + can add new methods that would also require being overridden and kept in + sync. If for some reason subclassing is required, please get in touch in the + `GitLab issue tracker `_ + before relying on that for production. + +.. note:: Instance attributes are considered public following the same + convention as functions and classes. Only reading from them is expected in + user code though, any attempt to modify or delete them is outside of the + bounds of what the public API exposes (unless stated explicitly otherwise). + This means that a minor version change could swap an instance attribute for + a read-only property. It also means that any problem following the + modification of an attribute by a user will not be considered as a bug. diff --git a/doc/sections/changes/breaking_changes.rst b/doc/sections/changes/breaking_changes.rst new file mode 100644 index 0000000000000000000000000000000000000000..3a8876c25092299ca965bbe98014b8202e4d503a --- /dev/null +++ b/doc/sections/changes/breaking_changes.rst @@ -0,0 +1,25 @@ +**************** +Breaking changes +**************** + +Here is a list of commits introducing breaking changes in LISA: + +.. exec:: + + from lisa.utils import LISA_HOME + from lisa._git import find_commits, log + + pattern = 'BREAK' + + repo = LISA_HOME + commits = find_commits(repo, grep=pattern) + ignored_sha1s = { + '30d75656c7ff8a159dd52164269e69eed6dfccad', + } + for sha1 in commits: + if sha1 in ignored_sha1s: + continue + commit_log = log(repo, ref=sha1, format='%cd%n%H%n%B') + entry = '.. code-block:: text\n\n {}\n'.format(commit_log.replace('\n', '\n ')) + print(entry) + diff --git a/doc/sections/changes/changelog.rst b/doc/sections/changes/changelog.rst new file mode 100644 index 0000000000000000000000000000000000000000..27fedc11c0a0e68e9434026315cf00b9a1e0dadc --- /dev/null +++ b/doc/sections/changes/changelog.rst @@ -0,0 +1,14 @@ +********* +Changelog +********* + +.. exec:: + + from lisa.utils import LISA_HOME + from lisa._doc.helpers import make_changelog + + repo = LISA_HOME + changelog = make_changelog( + repo=LISA_HOME, + ) + print(changelog) diff --git a/doc/sections/changes/deprecated_api.rst b/doc/sections/changes/deprecated_api.rst new file mode 100644 index 0000000000000000000000000000000000000000..cedbc07a091b958aa7e4e86ac2153875fd33a6c5 --- /dev/null +++ b/doc/sections/changes/deprecated_api.rst @@ -0,0 +1,11 @@ +*************** +Deprecated APIs +*************** + +Here is a list of deprecated APIs in LISA, sorted by version in which they will +be removed: + +.. exec:: + + from lisa._doc.helpers import get_deprecated_table + print(get_deprecated_table()) diff --git a/doc/sections/changes/index.rst b/doc/sections/changes/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..0799a6fc64af1ba67473dbf1399401e008c83d61 --- /dev/null +++ b/doc/sections/changes/index.rst @@ -0,0 +1,13 @@ +Changelog +========= + +.. toctree:: + :maxdepth: 1 + + changelog + api_stability + deprecated_api + breaking_changes + versioning + release_process + transitioning_from_lisa_legacy diff --git a/doc/sections/changes/release_process.rst b/doc/sections/changes/release_process.rst new file mode 100644 index 0000000000000000000000000000000000000000..9a4a3740f473fd88fcb806309815b233d04927d9 --- /dev/null +++ b/doc/sections/changes/release_process.rst @@ -0,0 +1,38 @@ +*************** +Release Process +*************** + +Making a new release involves the following steps: + +1. Update ``version_tuple`` in :mod:`lisa.version`. + +2. Ensure LISA as a whole refers to relevant versions of: + + * Alpine Linux in :mod:`lisa._kmod` + * Ubuntu in ``Vagrantfile`` + * Binary dependencies in :mod:`lisa._assets` + * Android SDK installed by ``install_base.sh`` + * Java version used by Android SDK in ``install_base.sh`` + +3. Ensure LISA can work with currently published version of devlib. + +4. Create a ``vX.Y.Z`` tag. + +5. Make the Python wheel. See ``tools/make-release.sh`` for some + indications on that part. + +6. Install that wheel in a _fresh_ :ref:`Vagrant VM`. Ensure + that the VM is reinstalled from scratch and that the vagrant box in use is + up to date. + +7. Run ``tools/tests.sh`` in the VM and ensure no deprecated item scheduled + for removal in the new version is still present in the sources (should + result in import-time exceptions). + +8. Ensure all CIs in use are happy. + +9. Push the ``vX.Y.Z`` tag in the main repo + +10. Update the ``release`` branch to be at the same commit as the ``vX.Y.Z`` tag. + +11. Upload the wheel on PyPI. diff --git a/doc/changes.rst b/doc/sections/changes/transitioning_from_lisa_legacy.rst similarity index 63% rename from doc/changes.rst rename to doc/sections/changes/transitioning_from_lisa_legacy.rst index 8fc678ed69b304e19e8f0936f7f24b21f1a21644..8e8040f9727acb7b6833e84140b0448124f56fad 100644 --- a/doc/changes.rst +++ b/doc/sections/changes/transitioning_from_lisa_legacy.rst @@ -1,153 +1,3 @@ -************** -APIs stability -************** -.. _api-stability: - -APIs inside LISA are split between private and public ones: - - * Public APIs can be expected to stay stable, or undergo a deprecation cycle - where they will trigger an :exc:`DeprecationWarning` and be documented as - such before being removed. Exceptions to that rule are documented explicitly - as such. - - * Private APIs can be changed at all points. - -Public APIs consist of classes and functions with names not starting with an -underscore, defined in modules with a name not starting with an underscore (or -any of its parent modules or containing class). - -Everything else is private. - -.. note:: User subclassing is usually more at risk of breakage than other uses - of the APIs. Behaviors are usually not restricted to a single method, which - means the subclass would have to override multiple of them to preserve - important API laws. This is unfortunately not future-proof, as new versions - can add new methods that would also require being overridden and kept in - sync. If for some reason subclassing is required, please get in touch in the - `GitLab issue tracker `_ - before relying on that for production. - -.. note:: Instance attributes are considered public following the same - convention as functions and classes. Only reading from them is expected in - user code though, any attempt to modify or delete them is outside of the - bounds of what the public API exposes (unless stated explicitly otherwise). - This means that a minor version change could swap an instance attribute for - a read-only property. It also means that any problem following the - modification of an attribute by a user will not be considered as a bug. - -********** -Versioning -********** - -LISA releases on :ref:`PyPI` are done following semantic versioning -as defined in https://semver.org/. As pointed by `api-stability`_, classes are -split on the following axes for the purpose of semver tracking: - - * A set of methods and attributes in general: Adding a method entails a minor - version bump, even though it can technically cause a breaking change in a - user subclass that happened to use the same name. - - * Inheritance tree: the MRO of a class is not considered as part of the stable - public API and can therefore change at any point. Classes named ``*Base`` - can usually be relied on for ``issubclass()`` and ``isinstance()`` but that - is not a hard rule. The reason behind that is that even adding a class to - the hierarchy can break existing uses of ``isinstance()`` so there is - essentially no way of making any change to the inheritance tree that is not - a breaking change. - -********* -Changelog -********* - -.. exec:: - - from lisa.utils import LISA_HOME - from lisa._doc.helpers import make_changelog - - repo = LISA_HOME - changelog = make_changelog( - repo=LISA_HOME, - ) - print(changelog) - -**************** -Breaking changes -**************** - -Here is a list of commits introducing breaking changes in LISA: - -.. exec:: - - from lisa.utils import LISA_HOME - from lisa._git import find_commits, log - - pattern = 'BREAK' - - repo = LISA_HOME - commits = find_commits(repo, grep=pattern) - ignored_sha1s = { - '30d75656c7ff8a159dd52164269e69eed6dfccad', - } - for sha1 in commits: - if sha1 in ignored_sha1s: - continue - commit_log = log(repo, ref=sha1, format='%cd%n%H%n%B') - entry = '.. code-block:: text\n\n {}\n'.format(commit_log.replace('\n', '\n ')) - print(entry) - -*************** -Deprecated APIs -*************** - -Here is a list of deprecated APIs in LISA, sorted by version in which they will -be removed: - -.. exec:: - - from lisa._doc.helpers import get_deprecated_table - print(get_deprecated_table()) - - -*************** -Release Process -*************** - -Making a new release involves the following steps: - - 1. Update ``version_tuple`` in :mod:`lisa.version`. - - 2. Ensure LISA as a whole refers to relevant versions of: - - * Alpine Linux in :mod:`lisa._kmod` - * Ubuntu in ``Vagrantfile`` - * Binary dependencies in :mod:`lisa._assets` - * Android SDK installed by ``install_base.sh`` - * Java version used by Android SDK in ``install_base.sh`` - - 3. Ensure LISA can work with currently published version of devlib. - - 4. Create a ``vX.Y.Z`` tag. - - 5. Make the Python wheel. See ``tools/make-release.sh`` for some - indications on that part. - - 6. Install that wheel in a _fresh_ :ref:`Vagrant VM`. Ensure - that the VM is reinstalled from scratch and that the vagrant box in use is - up to date. - - 7. Run ``tools/tests.sh`` in the VM and ensure no deprecated item scheduled - for removal in the new version is still present in the sources (should - result in import-time exceptions). - - 8. Ensure all CIs in use are happy. - - 9. Push the ``vX.Y.Z`` tag in the main repo - - 10. Update the ``release`` branch to be at the same commit as the ``vX.Y.Z`` tag. - - 11. Upload the wheel on PyPI. - - ****************************** Transitioning from LISA legacy ****************************** @@ -201,16 +51,15 @@ We now mandate the use of absolute imports, which look like this:: from lisa.trace import Trace .. tip:: - - This can help you figure out what you are really importing: + This can help you figure out what you are really importing:: >>> import trace >>> print(trace.__path__) /usr/lib/python3.5/trace.py - if that doesn't work you can try + If that doesn't work you can try:: - >>> print(xxx.__file__) + >>> print(xxx.__file__) .. warning:: diff --git a/doc/sections/changes/versioning.rst b/doc/sections/changes/versioning.rst new file mode 100644 index 0000000000000000000000000000000000000000..e8cbd17c8a93678e13a9ce748a1a5abf2d2b0ab8 --- /dev/null +++ b/doc/sections/changes/versioning.rst @@ -0,0 +1,19 @@ +********** +Versioning +********** + +LISA releases on :ref:`PyPI` are done following semantic versioning +as defined in https://semver.org/. As pointed by :ref:`api-stability-page`, classes are +split on the following axes for the purpose of semver tracking: + +* A set of methods and attributes in general: Adding a method entails a minor + version bump, even though it can technically cause a breaking change in a + user subclass that happened to use the same name. + +* Inheritance tree: the MRO of a class is not considered as part of the stable + public API and can therefore change at any point. Classes named ``*Base`` + can usually be relied on for ``issubclass()`` and ``isinstance()`` but that + is not a hard rule. The reason behind that is that even adding a class to + the hierarchy can break existing uses of ``isinstance()`` so there is + essentially no way of making any change to the inheritance tree that is not + a breaking change. diff --git a/doc/sections/guides/index.rst b/doc/sections/guides/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..d60fe79c35ddc82eeb147e3787e2994f40340373 --- /dev/null +++ b/doc/sections/guides/index.rst @@ -0,0 +1,20 @@ +.. _guides-page: + +Guides +====== + +.. toctree:: + :maxdepth: 2 + + ../../getting_started + ../../workflows/notebook + ../../workflows/automated_testing + ../../workflows/workload_automation + + ../../target + ../../trace_analysis + ../../workloads + + ../../energy_analysis + ../../kernel_tests + ../../contributors_guide diff --git a/doc/sections/tools/index.rst b/doc/sections/tools/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..0e53eb2ef6a29e7a00cc61d12a5522a794166d31 --- /dev/null +++ b/doc/sections/tools/index.rst @@ -0,0 +1,11 @@ +Tools +===== + +.. toctree:: + :maxdepth: 1 + + ../../lisa_shell/man/man + ../../bisector/index + ../../exekall/index + + diff --git a/doc/setup.rst b/doc/setup.rst index fd726063e86f49007ff29609c005435e26dd31b5..8e762deaba7ba01b47690b27e36a77f957272bf5 100644 --- a/doc/setup.rst +++ b/doc/setup.rst @@ -448,4 +448,4 @@ What next ? =========== The next step depends on the intended use case, further information at -:ref:`workflows-page` +:ref:`guides-page`. diff --git a/doc/stat_comparison.rst b/doc/stat_comparison.rst deleted file mode 100644 index 5f3a7436f25a4851b086152b6d1ac30b335ceb29..0000000000000000000000000000000000000000 --- a/doc/stat_comparison.rst +++ /dev/null @@ -1,15 +0,0 @@ -*********************** -Statistical comparison -*********************** - - -Statistical comparisons -======================= - -.. automodule:: lisa.stats - :members: - -Workload Automation -=================== -.. automodule:: lisa.wa - :members: diff --git a/doc/target.rst b/doc/target.rst index 76384522d3095c997feb49d6e4b4f76e04e735c5..84907f41d6f67a30bbdaeea0b347ab021f8685ae 100644 --- a/doc/target.rst +++ b/doc/target.rst @@ -59,14 +59,5 @@ platform frequencies. API === -Target -++++++ - -.. automodule:: lisa.target - :members: - -Platform Info -+++++++++++++ - -.. automodule:: lisa.platforms.platinfo - :members: +* Target: :mod:`lisa.target` +* Platform info: :mod:`lisa.platforms.platinfo` diff --git a/doc/trace_analysis.rst b/doc/trace_analysis.rst index 8870a8163f16956885b93e5bc29e8778ddc66152..f0df7bc8ad8fe82cc32de5103ff8078c3e4b6e24 100644 --- a/doc/trace_analysis.rst +++ b/doc/trace_analysis.rst @@ -46,7 +46,13 @@ Or from an existing :class:`~lisa.trace.Trace` object:: Then all the dataframe APIs will return :class:`polars.LazyFrame` instances instead of :class:`pandas.DataFrame`. -.. seealso:: See the :class:`~lisa.trace.Trace` documentation for more details. + +Here are the main entry points in the trace analysis APIs: + +* Trace manipulation class: :class:`lisa.trace.Trace` +* Trace analysis package: :mod:`lisa.analysis` +* Trace analysis base classes: :mod:`lisa.analysis.base` + Available analysis ================== @@ -62,165 +68,47 @@ to study specific trace windows. from lisa._doc.helpers import get_analysis_list print(get_analysis_list("df")) -Plots -+++++ - -When run in a notebook, these plots will be displayed automatically. By default, -they are also saved in the same directory as your ``trace.dat`` - -.. exec:: - from lisa._doc.helpers import get_analysis_list - print(get_analysis_list("plot")) - -API -=== - -Trace -+++++ - -.. autoclass:: lisa.trace.Trace - :members: - :inherited-members: - -.. automodule:: lisa.trace - :members: - :exclude-members: Trace, TraceParserBase, EventParserBase, TxtTraceParserBase, MetaTxtTraceParser, TxtTraceParser, SimpleTxtTraceParser, HRTxtTraceParser, SysTraceParser, TxtEventParser, CustomFieldsTxtEventParser, PrintTxtEventParser, TraceDumpTraceParser - -Analysis proxy -++++++++++++++ - -.. automodule:: lisa.analysis._proxy - :members: - -Analysis base class -+++++++++++++++++++ - -.. automodule:: lisa.analysis.base - :members: - -Load tracking -+++++++++++++ - -.. automodule:: lisa.analysis.load_tracking - :members: - -CPUs -++++ - -.. automodule:: lisa.analysis.cpus - :members: - -Frequency -+++++++++ - -.. automodule:: lisa.analysis.frequency - :members: - -Tasks -+++++ - -.. These two autoclasses should not be necessary, but sphinx doesn't seem - to like Enums and refuses to do anything with TaskState unless explicetely - told to. -.. autoclass:: lisa.analysis.tasks.StateInt - :members: - -.. autoclass:: lisa.analysis.tasks.TaskState - :members: - -.. automodule:: lisa.analysis.tasks - :members: - :exclude-members: StateInt, TaskState - -rt-app -++++++ - -.. automodule:: lisa.analysis.rta - :members: - -Idle -++++ - -.. automodule:: lisa.analysis.idle - :members: - -Latency -+++++++ - -.. automodule:: lisa.analysis.latency - :members: - -Status -++++++ - -.. automodule:: lisa.analysis.status - :members: - -Thermal +Gallery +++++++ -.. automodule:: lisa.analysis.thermal - :members: - -Pixel 6 -+++++++ - -.. automodule:: lisa.analysis.pixel6 - :members: - -Function profiling -++++++++++++++++++ - -.. automodule:: lisa.analysis.functions - :members: - -Interactive notebook helper -+++++++++++++++++++++++++++ - -.. automodule:: lisa.analysis.notebook - :members: - - -Trace parsers -+++++++++++++ - -.. note:: :class:`lisa.trace.Trace` is the class to use to manipulate a trace - file, trace parsers are backend objects that are usually not - manipulated by the user. - -.. autoclass:: lisa.trace.TraceParserBase - :members: +.. exec:: + # Get the state exposed by lisa-exec-state sphinx hook + plots = state.plots -.. autoclass:: lisa.trace.EventParserBase - :members: + if plots: + from itertools import starmap -.. autoclass:: lisa.trace.TxtTraceParserBase - :members: + from lisa.analysis.base import TraceAnalysisBase + from lisa.utils import get_obj_name, groupby, get_parent_namespace -.. autoclass:: lisa.trace.TxtTraceParser - :members: + from lisa._doc.helpers import ana_invocation -.. autoclass:: lisa.trace.MetaTxtTraceParser - :members: -.. autoclass:: lisa.trace.SimpleTxtTraceParser - :members: + def make_entry(f, rst_fig): + name = get_obj_name(f, style='rst', abbrev=True) + rst_fig = rst_fig or 'No plot available' + invocation = ana_invocation(f) + return f'\n\n{name}\n{"." * len(name)}\n\n{invocation}\n\n{rst_fig}' -.. autoclass:: lisa.trace.HRTxtTraceParser - :members: + def make_sections(section, entries): + entries = sorted(starmap(make_entry, entries)) + entries = '\n\n'.join(entries) + return f'{section}\n{"-" * len(section)}\n\n{entries}' -.. autoclass:: lisa.trace.SysTraceParser - :members: + def key(item): + f, fig = item + ns = get_parent_namespace(f) + assert isinstance(ns, type) + assert issubclass(ns, TraceAnalysisBase) + return ns.name -.. autoclass:: lisa.trace.TxtEventParser - :members: + sections = groupby(plots.items(), key=key) + sections = sorted(starmap(make_sections, sections)) + sections = '\n\n'.join(sections) -.. autoclass:: lisa.trace.CustomFieldsTxtEventParser - :members: + print(sections) -.. autoclass:: lisa.trace.PrintTxtEventParser - :members: + else: + print('No plots available') -.. autoclass:: lisa.trace.TraceDumpTraceParser - :members: diff --git a/doc/workflows/index.rst b/doc/workflows/index.rst deleted file mode 100644 index 994afa61d7e6f0ec2b5c7b923e7c65b84ba5bed8..0000000000000000000000000000000000000000 --- a/doc/workflows/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. _workflows-page: - -********* -Workflows -********* - -Workflow pages are intended as use-case oriented high level documentation. Since -``lisa`` (as a Python package) can be used in a number of different ways and -with various tools, there is a specialized page for each. - -.. toctree:: - :maxdepth: 2 - - notebook - automated_testing - workload_automation diff --git a/doc/workflows/ipynb/examples/analysis_plots.ipynb b/doc/workflows/ipynb/examples/analysis_plots.ipynb deleted file mode 120000 index ffe68dd1fc3cc0130b13ba7f1b5c6690589732c4..0000000000000000000000000000000000000000 --- a/doc/workflows/ipynb/examples/analysis_plots.ipynb +++ /dev/null @@ -1 +0,0 @@ -../../../../ipynb/examples/analysis_plots.ipynb \ No newline at end of file diff --git a/doc/workflows/notebook.rst b/doc/workflows/notebook.rst index 71f306ce086b5d47242c8bfd2a4844dfb80b6de8..edf99b68fa3210837c389b161be0dd1629cc4a05 100644 --- a/doc/workflows/notebook.rst +++ b/doc/workflows/notebook.rst @@ -1,6 +1,6 @@ -********* -Notebooks -********* +******************** +JupyterLab Notebooks +******************** Starting the server =================== diff --git a/doc/workloads.rst b/doc/workloads.rst index 530604d7cc72ce285ad9008cd1bb7cd9715102db..41e793cec3747dbb826e7a102796ce1faf332307 100644 --- a/doc/workloads.rst +++ b/doc/workloads.rst @@ -18,22 +18,8 @@ Available workloads =================== Most of these workloads are based on the :class:`~lisa.wlgen.workload.Workload` -class, see the documentation for common functionalities. +class, see the documentation for common functionalities: -Base class -++++++++++ - -.. automodule:: lisa.wlgen.workload - :members: - -rt-app -++++++ - -.. automodule:: lisa.wlgen.rta - :members: - -Sysbench -++++++++ - -.. automodule:: lisa.wlgen.sysbench - :members: + * Base infrastructure: :mod:`lisa.wlgen.workload` + * ``rt-app``: :mod:`lisa.wlgen.rta` + * ``sysbench``: :mod:`lisa.wlgen.sysbench` diff --git a/ipynb/examples/analysis_plots.ipynb b/ipynb/examples/analysis_plots.ipynb index 73a3491f3acc64dfaa1f487504d48e1d53abb8bd..d7edeec65adc45670c20a13058390228a05649d2 100644 --- a/ipynb/examples/analysis_plots.ipynb +++ b/ipynb/examples/analysis_plots.ipynb @@ -1,1161 +1,19 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "94324427-d939-4415-bc59-44bc270f7003", + "metadata": {}, + "source": [ + "## Analysis plots" + ] + }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "94324427-d939-4415-bc59-44bc270f7002", "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "(function(root) {\n", - " function now() {\n", - " return new Date();\n", - " }\n", - "\n", - " var force = true;\n", - " var py_version = '3.4.2'.replace('rc', '-rc.').replace('.dev', '-dev.');\n", - " var reloading = false;\n", - " var Bokeh = root.Bokeh;\n", - "\n", - " if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n", - " root._bokeh_timeout = Date.now() + 5000;\n", - " root._bokeh_failed_load = false;\n", - " }\n", - "\n", - " function run_callbacks() {\n", - " try {\n", - " root._bokeh_onload_callbacks.forEach(function(callback) {\n", - " if (callback != null)\n", - " callback();\n", - " });\n", - " } finally {\n", - " delete root._bokeh_onload_callbacks;\n", - " }\n", - " console.debug(\"Bokeh: all callbacks have finished\");\n", - " }\n", - "\n", - " function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n", - " if (css_urls == null) css_urls = [];\n", - " if (js_urls == null) js_urls = [];\n", - " if (js_modules == null) js_modules = [];\n", - " if (js_exports == null) js_exports = {};\n", - "\n", - " root._bokeh_onload_callbacks.push(callback);\n", - "\n", - " if (root._bokeh_is_loading > 0) {\n", - " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", - " return null;\n", - " }\n", - " if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n", - " run_callbacks();\n", - " return null;\n", - " }\n", - " if (!reloading) {\n", - " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", - " }\n", - "\n", - " function on_load() {\n", - " root._bokeh_is_loading--;\n", - " if (root._bokeh_is_loading === 0) {\n", - " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n", - " run_callbacks()\n", - " }\n", - " }\n", - " window._bokeh_on_load = on_load\n", - "\n", - " function on_error() {\n", - " console.error(\"failed to load \" + url);\n", - " }\n", - "\n", - " var skip = [];\n", - " if (window.requirejs) {\n", - " window.requirejs.config({'packages': {}, 'paths': {'tabulator': 'https://cdn.jsdelivr.net/npm/tabulator-tables@5.5.0/dist/js/tabulator.min', 'moment': 'https://cdn.jsdelivr.net/npm/luxon/build/global/luxon.min'}, 'shim': {}});\n", - " require([\"tabulator\"], function(Tabulator) {\n", - "\twindow.Tabulator = Tabulator\n", - "\ton_load()\n", - " })\n", - " require([\"moment\"], function(moment) {\n", - "\twindow.moment = moment\n", - "\ton_load()\n", - " })\n", - " root._bokeh_is_loading = css_urls.length + 2;\n", - " } else {\n", - " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n", - " }\n", - "\n", - " var existing_stylesheets = []\n", - " var links = document.getElementsByTagName('link')\n", - " for (var i = 0; i < links.length; i++) {\n", - " var link = links[i]\n", - " if (link.href != null) {\n", - "\texisting_stylesheets.push(link.href)\n", - " }\n", - " }\n", - " for (var i = 0; i < css_urls.length; i++) {\n", - " var url = css_urls[i];\n", - " if (existing_stylesheets.indexOf(url) !== -1) {\n", - "\ton_load()\n", - "\tcontinue;\n", - " }\n", - " const element = document.createElement(\"link\");\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.rel = \"stylesheet\";\n", - " element.type = \"text/css\";\n", - " element.href = url;\n", - " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n", - " document.body.appendChild(element);\n", - " } if (((window.Tabulator !== undefined) && (!(window.Tabulator instanceof HTMLElement))) || window.requirejs) {\n", - " var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js'];\n", - " for (var i = 0; i < urls.length; i++) {\n", - " skip.push(urls[i])\n", - " }\n", - " } if (((window.moment !== undefined) && (!(window.moment instanceof HTMLElement))) || window.requirejs) {\n", - " var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js'];\n", - " for (var i = 0; i < urls.length; i++) {\n", - " skip.push(urls[i])\n", - " }\n", - " } var existing_scripts = []\n", - " var scripts = document.getElementsByTagName('script')\n", - " for (var i = 0; i < scripts.length; i++) {\n", - " var script = scripts[i]\n", - " if (script.src != null) {\n", - "\texisting_scripts.push(script.src)\n", - " }\n", - " }\n", - " for (var i = 0; i < js_urls.length; i++) {\n", - " var url = js_urls[i];\n", - " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.src = url;\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.head.appendChild(element);\n", - " }\n", - " for (var i = 0; i < js_modules.length; i++) {\n", - " var url = js_modules[i];\n", - " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.src = url;\n", - " element.type = \"module\";\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.head.appendChild(element);\n", - " }\n", - " for (const name in js_exports) {\n", - " var url = js_exports[name];\n", - " if (skip.indexOf(url) >= 0 || root[name] != null) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.type = \"module\";\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " element.textContent = `\n", - " import ${name} from \"${url}\"\n", - " window.${name} = ${name}\n", - " window._bokeh_on_load()\n", - " `\n", - " document.head.appendChild(element);\n", - " }\n", - " if (!js_urls.length && !js_modules.length) {\n", - " on_load()\n", - " }\n", - " };\n", - "\n", - " function inject_raw_css(css) {\n", - " const element = document.createElement(\"style\");\n", - " element.appendChild(document.createTextNode(css));\n", - " document.body.appendChild(element);\n", - " }\n", - "\n", - " var js_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.4.2.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/panel.min.js\"];\n", - " var js_modules = [];\n", - " var js_exports = {};\n", - " var css_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/css/tabulator_simple.min.css?v=1.4.4\"];\n", - " var inline_js = [ function(Bokeh) {\n", - " Bokeh.set_log_level(\"info\");\n", - " },\n", - "function(Bokeh) {} // ensure no trailing comma for IE\n", - " ];\n", - "\n", - " function run_inline_js() {\n", - " if ((root.Bokeh !== undefined) || (force === true)) {\n", - " for (var i = 0; i < inline_js.length; i++) {\n", - "\ttry {\n", - " inline_js[i].call(root, root.Bokeh);\n", - "\t} catch(e) {\n", - "\t if (!reloading) {\n", - "\t throw e;\n", - "\t }\n", - "\t}\n", - " }\n", - " // Cache old bokeh versions\n", - " if (Bokeh != undefined && !reloading) {\n", - "\tvar NewBokeh = root.Bokeh;\n", - "\tif (Bokeh.versions === undefined) {\n", - "\t Bokeh.versions = new Map();\n", - "\t}\n", - "\tif (NewBokeh.version !== Bokeh.version) {\n", - "\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n", - "\t}\n", - "\troot.Bokeh = Bokeh;\n", - " }} else if (Date.now() < root._bokeh_timeout) {\n", - " setTimeout(run_inline_js, 100);\n", - " } else if (!root._bokeh_failed_load) {\n", - " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", - " root._bokeh_failed_load = true;\n", - " }\n", - " root._bokeh_is_initializing = false\n", - " }\n", - "\n", - " function load_or_wait() {\n", - " // Implement a backoff loop that tries to ensure we do not load multiple\n", - " // versions of Bokeh and its dependencies at the same time.\n", - " // In recent versions we use the root._bokeh_is_initializing flag\n", - " // to determine whether there is an ongoing attempt to initialize\n", - " // bokeh, however for backward compatibility we also try to ensure\n", - " // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n", - " // before older versions are fully initialized.\n", - " if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n", - " root._bokeh_is_initializing = false;\n", - " root._bokeh_onload_callbacks = undefined;\n", - " console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n", - " load_or_wait();\n", - " } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n", - " setTimeout(load_or_wait, 100);\n", - " } else {\n", - " root._bokeh_is_initializing = true\n", - " root._bokeh_onload_callbacks = []\n", - " var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n", - " if (!reloading && !bokeh_loaded) {\n", - "\troot.Bokeh = undefined;\n", - " }\n", - " load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n", - "\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n", - "\trun_inline_js();\n", - " });\n", - " }\n", - " }\n", - " // Give older versions of the autoload script a head-start to ensure\n", - " // they initialize before we start loading newer version.\n", - " setTimeout(load_or_wait, 100)\n", - "}(window));" - ], - "application/vnd.holoviews_load.v0+json": "(function(root) {\n function now() {\n return new Date();\n }\n\n var force = true;\n var py_version = '3.4.2'.replace('rc', '-rc.').replace('.dev', '-dev.');\n var reloading = false;\n var Bokeh = root.Bokeh;\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks;\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n if (js_exports == null) js_exports = {};\n\n root._bokeh_onload_callbacks.push(callback);\n\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n run_callbacks();\n return null;\n }\n if (!reloading) {\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n }\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n window._bokeh_on_load = on_load\n\n function on_error() {\n console.error(\"failed to load \" + url);\n }\n\n var skip = [];\n if (window.requirejs) {\n window.requirejs.config({'packages': {}, 'paths': {'tabulator': 'https://cdn.jsdelivr.net/npm/tabulator-tables@5.5.0/dist/js/tabulator.min', 'moment': 'https://cdn.jsdelivr.net/npm/luxon/build/global/luxon.min'}, 'shim': {}});\n require([\"tabulator\"], function(Tabulator) {\n\twindow.Tabulator = Tabulator\n\ton_load()\n })\n require([\"moment\"], function(moment) {\n\twindow.moment = moment\n\ton_load()\n })\n root._bokeh_is_loading = css_urls.length + 2;\n } else {\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n }\n\n var existing_stylesheets = []\n var links = document.getElementsByTagName('link')\n for (var i = 0; i < links.length; i++) {\n var link = links[i]\n if (link.href != null) {\n\texisting_stylesheets.push(link.href)\n }\n }\n for (var i = 0; i < css_urls.length; i++) {\n var url = css_urls[i];\n if (existing_stylesheets.indexOf(url) !== -1) {\n\ton_load()\n\tcontinue;\n }\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n } if (((window.Tabulator !== undefined) && (!(window.Tabulator instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } if (((window.moment !== undefined) && (!(window.moment instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } var existing_scripts = []\n var scripts = document.getElementsByTagName('script')\n for (var i = 0; i < scripts.length; i++) {\n var script = scripts[i]\n if (script.src != null) {\n\texisting_scripts.push(script.src)\n }\n }\n for (var i = 0; i < js_urls.length; i++) {\n var url = js_urls[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (var i = 0; i < js_modules.length; i++) {\n var url = js_modules[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (const name in js_exports) {\n var url = js_exports[name];\n if (skip.indexOf(url) >= 0 || root[name] != null) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onerror = on_error;\n element.async = false;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n element.textContent = `\n import ${name} from \"${url}\"\n window.${name} = ${name}\n window._bokeh_on_load()\n `\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n var js_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.4.2.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/panel.min.js\"];\n var js_modules = [];\n var js_exports = {};\n var css_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/css/tabulator_simple.min.css?v=1.4.4\"];\n var inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (var i = 0; i < inline_js.length; i++) {\n\ttry {\n inline_js[i].call(root, root.Bokeh);\n\t} catch(e) {\n\t if (!reloading) {\n\t throw e;\n\t }\n\t}\n }\n // Cache old bokeh versions\n if (Bokeh != undefined && !reloading) {\n\tvar NewBokeh = root.Bokeh;\n\tif (Bokeh.versions === undefined) {\n\t Bokeh.versions = new Map();\n\t}\n\tif (NewBokeh.version !== Bokeh.version) {\n\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n\t}\n\troot.Bokeh = Bokeh;\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n root._bokeh_is_initializing = false\n }\n\n function load_or_wait() {\n // Implement a backoff loop that tries to ensure we do not load multiple\n // versions of Bokeh and its dependencies at the same time.\n // In recent versions we use the root._bokeh_is_initializing flag\n // to determine whether there is an ongoing attempt to initialize\n // bokeh, however for backward compatibility we also try to ensure\n // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n // before older versions are fully initialized.\n if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n root._bokeh_is_initializing = false;\n root._bokeh_onload_callbacks = undefined;\n console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n load_or_wait();\n } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n setTimeout(load_or_wait, 100);\n } else {\n root._bokeh_is_initializing = true\n root._bokeh_onload_callbacks = []\n var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n if (!reloading && !bokeh_loaded) {\n\troot.Bokeh = undefined;\n }\n load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n\trun_inline_js();\n });\n }\n }\n // Give older versions of the autoload script a head-start to ensure\n // they initialize before we start loading newer version.\n setTimeout(load_or_wait, 100)\n}(window));" - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "\n", - "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n", - " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n", - "}\n", - "\n", - "\n", - " function JupyterCommManager() {\n", - " }\n", - "\n", - " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n", - " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", - " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", - " comm_manager.register_target(comm_id, function(comm) {\n", - " comm.on_msg(msg_handler);\n", - " });\n", - " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", - " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n", - " comm.onMsg = msg_handler;\n", - " });\n", - " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", - " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n", - " var messages = comm.messages[Symbol.asyncIterator]();\n", - " function processIteratorResult(result) {\n", - " var message = result.value;\n", - " console.log(message)\n", - " var content = {data: message.data, comm_id};\n", - " var buffers = []\n", - " for (var buffer of message.buffers || []) {\n", - " buffers.push(new DataView(buffer))\n", - " }\n", - " var metadata = message.metadata || {};\n", - " var msg = {content, buffers, metadata}\n", - " msg_handler(msg);\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " return messages.next().then(processIteratorResult);\n", - " })\n", - " }\n", - " }\n", - "\n", - " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n", - " if (comm_id in window.PyViz.comms) {\n", - " return window.PyViz.comms[comm_id];\n", - " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", - " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", - " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n", - " if (msg_handler) {\n", - " comm.on_msg(msg_handler);\n", - " }\n", - " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", - " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n", - " comm.open();\n", - " if (msg_handler) {\n", - " comm.onMsg = msg_handler;\n", - " }\n", - " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", - " var comm_promise = google.colab.kernel.comms.open(comm_id)\n", - " comm_promise.then((comm) => {\n", - " window.PyViz.comms[comm_id] = comm;\n", - " if (msg_handler) {\n", - " var messages = comm.messages[Symbol.asyncIterator]();\n", - " function processIteratorResult(result) {\n", - " var message = result.value;\n", - " var content = {data: message.data};\n", - " var metadata = message.metadata || {comm_id};\n", - " var msg = {content, metadata}\n", - " msg_handler(msg);\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " }) \n", - " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n", - " return comm_promise.then((comm) => {\n", - " comm.send(data, metadata, buffers, disposeOnDone);\n", - " });\n", - " };\n", - " var comm = {\n", - " send: sendClosure\n", - " };\n", - " }\n", - " window.PyViz.comms[comm_id] = comm;\n", - " return comm;\n", - " }\n", - " window.PyViz.comm_manager = new JupyterCommManager();\n", - " \n", - "\n", - "\n", - "var JS_MIME_TYPE = 'application/javascript';\n", - "var HTML_MIME_TYPE = 'text/html';\n", - "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n", - "var CLASS_NAME = 'output';\n", - "\n", - "/**\n", - " * Render data to the DOM node\n", - " */\n", - "function render(props, node) {\n", - " var div = document.createElement(\"div\");\n", - " var script = document.createElement(\"script\");\n", - " node.appendChild(div);\n", - " node.appendChild(script);\n", - "}\n", - "\n", - "/**\n", - " * Handle when a new output is added\n", - " */\n", - "function handle_add_output(event, handle) {\n", - " var output_area = handle.output_area;\n", - " var output = handle.output;\n", - " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n", - " return\n", - " }\n", - " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n", - " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n", - " if (id !== undefined) {\n", - " var nchildren = toinsert.length;\n", - " var html_node = toinsert[nchildren-1].children[0];\n", - " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n", - " var scripts = [];\n", - " var nodelist = html_node.querySelectorAll(\"script\");\n", - " for (var i in nodelist) {\n", - " if (nodelist.hasOwnProperty(i)) {\n", - " scripts.push(nodelist[i])\n", - " }\n", - " }\n", - "\n", - " scripts.forEach( function (oldScript) {\n", - " var newScript = document.createElement(\"script\");\n", - " var attrs = [];\n", - " var nodemap = oldScript.attributes;\n", - " for (var j in nodemap) {\n", - " if (nodemap.hasOwnProperty(j)) {\n", - " attrs.push(nodemap[j])\n", - " }\n", - " }\n", - " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n", - " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n", - " oldScript.parentNode.replaceChild(newScript, oldScript);\n", - " });\n", - " if (JS_MIME_TYPE in output.data) {\n", - " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n", - " }\n", - " output_area._hv_plot_id = id;\n", - " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n", - " window.PyViz.plot_index[id] = Bokeh.index[id];\n", - " } else {\n", - " window.PyViz.plot_index[id] = null;\n", - " }\n", - " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n", - " var bk_div = document.createElement(\"div\");\n", - " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n", - " var script_attrs = bk_div.children[0].attributes;\n", - " for (var i = 0; i < script_attrs.length; i++) {\n", - " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n", - " }\n", - " // store reference to server id on output_area\n", - " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n", - " }\n", - "}\n", - "\n", - "/**\n", - " * Handle when an output is cleared or removed\n", - " */\n", - "function handle_clear_output(event, handle) {\n", - " var id = handle.cell.output_area._hv_plot_id;\n", - " var server_id = handle.cell.output_area._bokeh_server_id;\n", - " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n", - " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n", - " if (server_id !== null) {\n", - " comm.send({event_type: 'server_delete', 'id': server_id});\n", - " return;\n", - " } else if (comm !== null) {\n", - " comm.send({event_type: 'delete', 'id': id});\n", - " }\n", - " delete PyViz.plot_index[id];\n", - " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n", - " var doc = window.Bokeh.index[id].model.document\n", - " doc.clear();\n", - " const i = window.Bokeh.documents.indexOf(doc);\n", - " if (i > -1) {\n", - " window.Bokeh.documents.splice(i, 1);\n", - " }\n", - " }\n", - "}\n", - "\n", - "/**\n", - " * Handle kernel restart event\n", - " */\n", - "function handle_kernel_cleanup(event, handle) {\n", - " delete PyViz.comms[\"hv-extension-comm\"];\n", - " window.PyViz.plot_index = {}\n", - "}\n", - "\n", - "/**\n", - " * Handle update_display_data messages\n", - " */\n", - "function handle_update_output(event, handle) {\n", - " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n", - " handle_add_output(event, handle)\n", - "}\n", - "\n", - "function register_renderer(events, OutputArea) {\n", - " function append_mime(data, metadata, element) {\n", - " // create a DOM node to render to\n", - " var toinsert = this.create_output_subarea(\n", - " metadata,\n", - " CLASS_NAME,\n", - " EXEC_MIME_TYPE\n", - " );\n", - " this.keyboard_manager.register_events(toinsert);\n", - " // Render to node\n", - " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n", - " render(props, toinsert[0]);\n", - " element.append(toinsert);\n", - " return toinsert\n", - " }\n", - "\n", - " events.on('output_added.OutputArea', handle_add_output);\n", - " events.on('output_updated.OutputArea', handle_update_output);\n", - " events.on('clear_output.CodeCell', handle_clear_output);\n", - " events.on('delete.Cell', handle_clear_output);\n", - " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n", - "\n", - " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n", - " safe: true,\n", - " index: 0\n", - " });\n", - "}\n", - "\n", - "if (window.Jupyter !== undefined) {\n", - " try {\n", - " var events = require('base/js/events');\n", - " var OutputArea = require('notebook/js/outputarea').OutputArea;\n", - " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n", - " register_renderer(events, OutputArea);\n", - " }\n", - " } catch(err) {\n", - " }\n", - "}\n" - ], - "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n console.log(message)\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n comm.open();\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n }) \n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n" - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "(function(root) {\n", - " function now() {\n", - " return new Date();\n", - " }\n", - "\n", - " var force = true;\n", - " var py_version = '3.4.2'.replace('rc', '-rc.').replace('.dev', '-dev.');\n", - " var reloading = true;\n", - " var Bokeh = root.Bokeh;\n", - "\n", - " if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n", - " root._bokeh_timeout = Date.now() + 5000;\n", - " root._bokeh_failed_load = false;\n", - " }\n", - "\n", - " function run_callbacks() {\n", - " try {\n", - " root._bokeh_onload_callbacks.forEach(function(callback) {\n", - " if (callback != null)\n", - " callback();\n", - " });\n", - " } finally {\n", - " delete root._bokeh_onload_callbacks;\n", - " }\n", - " console.debug(\"Bokeh: all callbacks have finished\");\n", - " }\n", - "\n", - " function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n", - " if (css_urls == null) css_urls = [];\n", - " if (js_urls == null) js_urls = [];\n", - " if (js_modules == null) js_modules = [];\n", - " if (js_exports == null) js_exports = {};\n", - "\n", - " root._bokeh_onload_callbacks.push(callback);\n", - "\n", - " if (root._bokeh_is_loading > 0) {\n", - " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", - " return null;\n", - " }\n", - " if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n", - " run_callbacks();\n", - " return null;\n", - " }\n", - " if (!reloading) {\n", - " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", - " }\n", - "\n", - " function on_load() {\n", - " root._bokeh_is_loading--;\n", - " if (root._bokeh_is_loading === 0) {\n", - " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n", - " run_callbacks()\n", - " }\n", - " }\n", - " window._bokeh_on_load = on_load\n", - "\n", - " function on_error() {\n", - " console.error(\"failed to load \" + url);\n", - " }\n", - "\n", - " var skip = [];\n", - " if (window.requirejs) {\n", - " window.requirejs.config({'packages': {}, 'paths': {'tabulator': 'https://cdn.jsdelivr.net/npm/tabulator-tables@5.5.0/dist/js/tabulator.min', 'moment': 'https://cdn.jsdelivr.net/npm/luxon/build/global/luxon.min'}, 'shim': {}});\n", - " require([\"tabulator\"], function(Tabulator) {\n", - "\twindow.Tabulator = Tabulator\n", - "\ton_load()\n", - " })\n", - " require([\"moment\"], function(moment) {\n", - "\twindow.moment = moment\n", - "\ton_load()\n", - " })\n", - " root._bokeh_is_loading = css_urls.length + 2;\n", - " } else {\n", - " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n", - " }\n", - "\n", - " var existing_stylesheets = []\n", - " var links = document.getElementsByTagName('link')\n", - " for (var i = 0; i < links.length; i++) {\n", - " var link = links[i]\n", - " if (link.href != null) {\n", - "\texisting_stylesheets.push(link.href)\n", - " }\n", - " }\n", - " for (var i = 0; i < css_urls.length; i++) {\n", - " var url = css_urls[i];\n", - " if (existing_stylesheets.indexOf(url) !== -1) {\n", - "\ton_load()\n", - "\tcontinue;\n", - " }\n", - " const element = document.createElement(\"link\");\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.rel = \"stylesheet\";\n", - " element.type = \"text/css\";\n", - " element.href = url;\n", - " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n", - " document.body.appendChild(element);\n", - " } if (((window.Tabulator !== undefined) && (!(window.Tabulator instanceof HTMLElement))) || window.requirejs) {\n", - " var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js'];\n", - " for (var i = 0; i < urls.length; i++) {\n", - " skip.push(urls[i])\n", - " }\n", - " } if (((window.moment !== undefined) && (!(window.moment instanceof HTMLElement))) || window.requirejs) {\n", - " var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js'];\n", - " for (var i = 0; i < urls.length; i++) {\n", - " skip.push(urls[i])\n", - " }\n", - " } var existing_scripts = []\n", - " var scripts = document.getElementsByTagName('script')\n", - " for (var i = 0; i < scripts.length; i++) {\n", - " var script = scripts[i]\n", - " if (script.src != null) {\n", - "\texisting_scripts.push(script.src)\n", - " }\n", - " }\n", - " for (var i = 0; i < js_urls.length; i++) {\n", - " var url = js_urls[i];\n", - " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.src = url;\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.head.appendChild(element);\n", - " }\n", - " for (var i = 0; i < js_modules.length; i++) {\n", - " var url = js_modules[i];\n", - " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.src = url;\n", - " element.type = \"module\";\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.head.appendChild(element);\n", - " }\n", - " for (const name in js_exports) {\n", - " var url = js_exports[name];\n", - " if (skip.indexOf(url) >= 0 || root[name] != null) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.type = \"module\";\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " element.textContent = `\n", - " import ${name} from \"${url}\"\n", - " window.${name} = ${name}\n", - " window._bokeh_on_load()\n", - " `\n", - " document.head.appendChild(element);\n", - " }\n", - " if (!js_urls.length && !js_modules.length) {\n", - " on_load()\n", - " }\n", - " };\n", - "\n", - " function inject_raw_css(css) {\n", - " const element = document.createElement(\"style\");\n", - " element.appendChild(document.createTextNode(css));\n", - " document.body.appendChild(element);\n", - " }\n", - "\n", - " var js_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js\"];\n", - " var js_modules = [];\n", - " var js_exports = {};\n", - " var css_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/css/tabulator_simple.min.css?v=1.4.4\"];\n", - " var inline_js = [ function(Bokeh) {\n", - " Bokeh.set_log_level(\"info\");\n", - " },\n", - "function(Bokeh) {} // ensure no trailing comma for IE\n", - " ];\n", - "\n", - " function run_inline_js() {\n", - " if ((root.Bokeh !== undefined) || (force === true)) {\n", - " for (var i = 0; i < inline_js.length; i++) {\n", - "\ttry {\n", - " inline_js[i].call(root, root.Bokeh);\n", - "\t} catch(e) {\n", - "\t if (!reloading) {\n", - "\t throw e;\n", - "\t }\n", - "\t}\n", - " }\n", - " // Cache old bokeh versions\n", - " if (Bokeh != undefined && !reloading) {\n", - "\tvar NewBokeh = root.Bokeh;\n", - "\tif (Bokeh.versions === undefined) {\n", - "\t Bokeh.versions = new Map();\n", - "\t}\n", - "\tif (NewBokeh.version !== Bokeh.version) {\n", - "\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n", - "\t}\n", - "\troot.Bokeh = Bokeh;\n", - " }} else if (Date.now() < root._bokeh_timeout) {\n", - " setTimeout(run_inline_js, 100);\n", - " } else if (!root._bokeh_failed_load) {\n", - " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", - " root._bokeh_failed_load = true;\n", - " }\n", - " root._bokeh_is_initializing = false\n", - " }\n", - "\n", - " function load_or_wait() {\n", - " // Implement a backoff loop that tries to ensure we do not load multiple\n", - " // versions of Bokeh and its dependencies at the same time.\n", - " // In recent versions we use the root._bokeh_is_initializing flag\n", - " // to determine whether there is an ongoing attempt to initialize\n", - " // bokeh, however for backward compatibility we also try to ensure\n", - " // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n", - " // before older versions are fully initialized.\n", - " if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n", - " root._bokeh_is_initializing = false;\n", - " root._bokeh_onload_callbacks = undefined;\n", - " console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n", - " load_or_wait();\n", - " } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n", - " setTimeout(load_or_wait, 100);\n", - " } else {\n", - " root._bokeh_is_initializing = true\n", - " root._bokeh_onload_callbacks = []\n", - " var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n", - " if (!reloading && !bokeh_loaded) {\n", - "\troot.Bokeh = undefined;\n", - " }\n", - " load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n", - "\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n", - "\trun_inline_js();\n", - " });\n", - " }\n", - " }\n", - " // Give older versions of the autoload script a head-start to ensure\n", - " // they initialize before we start loading newer version.\n", - " setTimeout(load_or_wait, 100)\n", - "}(window));" - ], - "application/vnd.holoviews_load.v0+json": "(function(root) {\n function now() {\n return new Date();\n }\n\n var force = true;\n var py_version = '3.4.2'.replace('rc', '-rc.').replace('.dev', '-dev.');\n var reloading = true;\n var Bokeh = root.Bokeh;\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks;\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n if (js_exports == null) js_exports = {};\n\n root._bokeh_onload_callbacks.push(callback);\n\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n run_callbacks();\n return null;\n }\n if (!reloading) {\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n }\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n window._bokeh_on_load = on_load\n\n function on_error() {\n console.error(\"failed to load \" + url);\n }\n\n var skip = [];\n if (window.requirejs) {\n window.requirejs.config({'packages': {}, 'paths': {'tabulator': 'https://cdn.jsdelivr.net/npm/tabulator-tables@5.5.0/dist/js/tabulator.min', 'moment': 'https://cdn.jsdelivr.net/npm/luxon/build/global/luxon.min'}, 'shim': {}});\n require([\"tabulator\"], function(Tabulator) {\n\twindow.Tabulator = Tabulator\n\ton_load()\n })\n require([\"moment\"], function(moment) {\n\twindow.moment = moment\n\ton_load()\n })\n root._bokeh_is_loading = css_urls.length + 2;\n } else {\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n }\n\n var existing_stylesheets = []\n var links = document.getElementsByTagName('link')\n for (var i = 0; i < links.length; i++) {\n var link = links[i]\n if (link.href != null) {\n\texisting_stylesheets.push(link.href)\n }\n }\n for (var i = 0; i < css_urls.length; i++) {\n var url = css_urls[i];\n if (existing_stylesheets.indexOf(url) !== -1) {\n\ton_load()\n\tcontinue;\n }\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n } if (((window.Tabulator !== undefined) && (!(window.Tabulator instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } if (((window.moment !== undefined) && (!(window.moment instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } var existing_scripts = []\n var scripts = document.getElementsByTagName('script')\n for (var i = 0; i < scripts.length; i++) {\n var script = scripts[i]\n if (script.src != null) {\n\texisting_scripts.push(script.src)\n }\n }\n for (var i = 0; i < js_urls.length; i++) {\n var url = js_urls[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (var i = 0; i < js_modules.length; i++) {\n var url = js_modules[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (const name in js_exports) {\n var url = js_exports[name];\n if (skip.indexOf(url) >= 0 || root[name] != null) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onerror = on_error;\n element.async = false;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n element.textContent = `\n import ${name} from \"${url}\"\n window.${name} = ${name}\n window._bokeh_on_load()\n `\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n var js_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js\"];\n var js_modules = [];\n var js_exports = {};\n var css_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/css/tabulator_simple.min.css?v=1.4.4\"];\n var inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (var i = 0; i < inline_js.length; i++) {\n\ttry {\n inline_js[i].call(root, root.Bokeh);\n\t} catch(e) {\n\t if (!reloading) {\n\t throw e;\n\t }\n\t}\n }\n // Cache old bokeh versions\n if (Bokeh != undefined && !reloading) {\n\tvar NewBokeh = root.Bokeh;\n\tif (Bokeh.versions === undefined) {\n\t Bokeh.versions = new Map();\n\t}\n\tif (NewBokeh.version !== Bokeh.version) {\n\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n\t}\n\troot.Bokeh = Bokeh;\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n root._bokeh_is_initializing = false\n }\n\n function load_or_wait() {\n // Implement a backoff loop that tries to ensure we do not load multiple\n // versions of Bokeh and its dependencies at the same time.\n // In recent versions we use the root._bokeh_is_initializing flag\n // to determine whether there is an ongoing attempt to initialize\n // bokeh, however for backward compatibility we also try to ensure\n // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n // before older versions are fully initialized.\n if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n root._bokeh_is_initializing = false;\n root._bokeh_onload_callbacks = undefined;\n console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n load_or_wait();\n } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n setTimeout(load_or_wait, 100);\n } else {\n root._bokeh_is_initializing = true\n root._bokeh_onload_callbacks = []\n var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n if (!reloading && !bokeh_loaded) {\n\troot.Bokeh = undefined;\n }\n load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n\trun_inline_js();\n });\n }\n }\n // Give older versions of the autoload script a head-start to ensure\n // they initialize before we start loading newer version.\n setTimeout(load_or_wait, 100)\n}(window));" - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "\n", - "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n", - " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n", - "}\n", - "\n", - "\n", - " function JupyterCommManager() {\n", - " }\n", - "\n", - " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n", - " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", - " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", - " comm_manager.register_target(comm_id, function(comm) {\n", - " comm.on_msg(msg_handler);\n", - " });\n", - " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", - " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n", - " comm.onMsg = msg_handler;\n", - " });\n", - " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", - " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n", - " var messages = comm.messages[Symbol.asyncIterator]();\n", - " function processIteratorResult(result) {\n", - " var message = result.value;\n", - " console.log(message)\n", - " var content = {data: message.data, comm_id};\n", - " var buffers = []\n", - " for (var buffer of message.buffers || []) {\n", - " buffers.push(new DataView(buffer))\n", - " }\n", - " var metadata = message.metadata || {};\n", - " var msg = {content, buffers, metadata}\n", - " msg_handler(msg);\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " return messages.next().then(processIteratorResult);\n", - " })\n", - " }\n", - " }\n", - "\n", - " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n", - " if (comm_id in window.PyViz.comms) {\n", - " return window.PyViz.comms[comm_id];\n", - " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", - " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", - " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n", - " if (msg_handler) {\n", - " comm.on_msg(msg_handler);\n", - " }\n", - " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", - " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n", - " comm.open();\n", - " if (msg_handler) {\n", - " comm.onMsg = msg_handler;\n", - " }\n", - " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", - " var comm_promise = google.colab.kernel.comms.open(comm_id)\n", - " comm_promise.then((comm) => {\n", - " window.PyViz.comms[comm_id] = comm;\n", - " if (msg_handler) {\n", - " var messages = comm.messages[Symbol.asyncIterator]();\n", - " function processIteratorResult(result) {\n", - " var message = result.value;\n", - " var content = {data: message.data};\n", - " var metadata = message.metadata || {comm_id};\n", - " var msg = {content, metadata}\n", - " msg_handler(msg);\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " }) \n", - " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n", - " return comm_promise.then((comm) => {\n", - " comm.send(data, metadata, buffers, disposeOnDone);\n", - " });\n", - " };\n", - " var comm = {\n", - " send: sendClosure\n", - " };\n", - " }\n", - " window.PyViz.comms[comm_id] = comm;\n", - " return comm;\n", - " }\n", - " window.PyViz.comm_manager = new JupyterCommManager();\n", - " \n", - "\n", - "\n", - "var JS_MIME_TYPE = 'application/javascript';\n", - "var HTML_MIME_TYPE = 'text/html';\n", - "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n", - "var CLASS_NAME = 'output';\n", - "\n", - "/**\n", - " * Render data to the DOM node\n", - " */\n", - "function render(props, node) {\n", - " var div = document.createElement(\"div\");\n", - " var script = document.createElement(\"script\");\n", - " node.appendChild(div);\n", - " node.appendChild(script);\n", - "}\n", - "\n", - "/**\n", - " * Handle when a new output is added\n", - " */\n", - "function handle_add_output(event, handle) {\n", - " var output_area = handle.output_area;\n", - " var output = handle.output;\n", - " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n", - " return\n", - " }\n", - " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n", - " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n", - " if (id !== undefined) {\n", - " var nchildren = toinsert.length;\n", - " var html_node = toinsert[nchildren-1].children[0];\n", - " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n", - " var scripts = [];\n", - " var nodelist = html_node.querySelectorAll(\"script\");\n", - " for (var i in nodelist) {\n", - " if (nodelist.hasOwnProperty(i)) {\n", - " scripts.push(nodelist[i])\n", - " }\n", - " }\n", - "\n", - " scripts.forEach( function (oldScript) {\n", - " var newScript = document.createElement(\"script\");\n", - " var attrs = [];\n", - " var nodemap = oldScript.attributes;\n", - " for (var j in nodemap) {\n", - " if (nodemap.hasOwnProperty(j)) {\n", - " attrs.push(nodemap[j])\n", - " }\n", - " }\n", - " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n", - " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n", - " oldScript.parentNode.replaceChild(newScript, oldScript);\n", - " });\n", - " if (JS_MIME_TYPE in output.data) {\n", - " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n", - " }\n", - " output_area._hv_plot_id = id;\n", - " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n", - " window.PyViz.plot_index[id] = Bokeh.index[id];\n", - " } else {\n", - " window.PyViz.plot_index[id] = null;\n", - " }\n", - " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n", - " var bk_div = document.createElement(\"div\");\n", - " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n", - " var script_attrs = bk_div.children[0].attributes;\n", - " for (var i = 0; i < script_attrs.length; i++) {\n", - " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n", - " }\n", - " // store reference to server id on output_area\n", - " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n", - " }\n", - "}\n", - "\n", - "/**\n", - " * Handle when an output is cleared or removed\n", - " */\n", - "function handle_clear_output(event, handle) {\n", - " var id = handle.cell.output_area._hv_plot_id;\n", - " var server_id = handle.cell.output_area._bokeh_server_id;\n", - " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n", - " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n", - " if (server_id !== null) {\n", - " comm.send({event_type: 'server_delete', 'id': server_id});\n", - " return;\n", - " } else if (comm !== null) {\n", - " comm.send({event_type: 'delete', 'id': id});\n", - " }\n", - " delete PyViz.plot_index[id];\n", - " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n", - " var doc = window.Bokeh.index[id].model.document\n", - " doc.clear();\n", - " const i = window.Bokeh.documents.indexOf(doc);\n", - " if (i > -1) {\n", - " window.Bokeh.documents.splice(i, 1);\n", - " }\n", - " }\n", - "}\n", - "\n", - "/**\n", - " * Handle kernel restart event\n", - " */\n", - "function handle_kernel_cleanup(event, handle) {\n", - " delete PyViz.comms[\"hv-extension-comm\"];\n", - " window.PyViz.plot_index = {}\n", - "}\n", - "\n", - "/**\n", - " * Handle update_display_data messages\n", - " */\n", - "function handle_update_output(event, handle) {\n", - " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n", - " handle_add_output(event, handle)\n", - "}\n", - "\n", - "function register_renderer(events, OutputArea) {\n", - " function append_mime(data, metadata, element) {\n", - " // create a DOM node to render to\n", - " var toinsert = this.create_output_subarea(\n", - " metadata,\n", - " CLASS_NAME,\n", - " EXEC_MIME_TYPE\n", - " );\n", - " this.keyboard_manager.register_events(toinsert);\n", - " // Render to node\n", - " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n", - " render(props, toinsert[0]);\n", - " element.append(toinsert);\n", - " return toinsert\n", - " }\n", - "\n", - " events.on('output_added.OutputArea', handle_add_output);\n", - " events.on('output_updated.OutputArea', handle_update_output);\n", - " events.on('clear_output.CodeCell', handle_clear_output);\n", - " events.on('delete.Cell', handle_clear_output);\n", - " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n", - "\n", - " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n", - " safe: true,\n", - " index: 0\n", - " });\n", - "}\n", - "\n", - "if (window.Jupyter !== undefined) {\n", - " try {\n", - " var events = require('base/js/events');\n", - " var OutputArea = require('notebook/js/outputarea').OutputArea;\n", - " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n", - " register_renderer(events, OutputArea);\n", - " }\n", - " } catch(err) {\n", - " }\n", - "}\n" - ], - "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n console.log(message)\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n comm.open();\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n }) \n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n" - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "c65dbe59-4a9d-49d9-97b7-e0d4305a7b6e" - } - }, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "import os\n", "from pathlib import Path\n", @@ -1169,717 +27,10 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "8c96cff3-0bb9-4822-97b9-0330f9e12333", "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "(function(root) {\n", - " function now() {\n", - " return new Date();\n", - " }\n", - "\n", - " var force = true;\n", - " var py_version = '3.4.2'.replace('rc', '-rc.').replace('.dev', '-dev.');\n", - " var reloading = false;\n", - " var Bokeh = root.Bokeh;\n", - "\n", - " if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n", - " root._bokeh_timeout = Date.now() + 5000;\n", - " root._bokeh_failed_load = false;\n", - " }\n", - "\n", - " function run_callbacks() {\n", - " try {\n", - " root._bokeh_onload_callbacks.forEach(function(callback) {\n", - " if (callback != null)\n", - " callback();\n", - " });\n", - " } finally {\n", - " delete root._bokeh_onload_callbacks;\n", - " }\n", - " console.debug(\"Bokeh: all callbacks have finished\");\n", - " }\n", - "\n", - " function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n", - " if (css_urls == null) css_urls = [];\n", - " if (js_urls == null) js_urls = [];\n", - " if (js_modules == null) js_modules = [];\n", - " if (js_exports == null) js_exports = {};\n", - "\n", - " root._bokeh_onload_callbacks.push(callback);\n", - "\n", - " if (root._bokeh_is_loading > 0) {\n", - " console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n", - " return null;\n", - " }\n", - " if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n", - " run_callbacks();\n", - " return null;\n", - " }\n", - " if (!reloading) {\n", - " console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n", - " }\n", - "\n", - " function on_load() {\n", - " root._bokeh_is_loading--;\n", - " if (root._bokeh_is_loading === 0) {\n", - " console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n", - " run_callbacks()\n", - " }\n", - " }\n", - " window._bokeh_on_load = on_load\n", - "\n", - " function on_error() {\n", - " console.error(\"failed to load \" + url);\n", - " }\n", - "\n", - " var skip = [];\n", - " if (window.requirejs) {\n", - " window.requirejs.config({'packages': {}, 'paths': {'tabulator': 'https://cdn.jsdelivr.net/npm/tabulator-tables@5.5.0/dist/js/tabulator.min', 'moment': 'https://cdn.jsdelivr.net/npm/luxon/build/global/luxon.min'}, 'shim': {}});\n", - " require([\"tabulator\"], function(Tabulator) {\n", - "\twindow.Tabulator = Tabulator\n", - "\ton_load()\n", - " })\n", - " require([\"moment\"], function(moment) {\n", - "\twindow.moment = moment\n", - "\ton_load()\n", - " })\n", - " root._bokeh_is_loading = css_urls.length + 2;\n", - " } else {\n", - " root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n", - " }\n", - "\n", - " var existing_stylesheets = []\n", - " var links = document.getElementsByTagName('link')\n", - " for (var i = 0; i < links.length; i++) {\n", - " var link = links[i]\n", - " if (link.href != null) {\n", - "\texisting_stylesheets.push(link.href)\n", - " }\n", - " }\n", - " for (var i = 0; i < css_urls.length; i++) {\n", - " var url = css_urls[i];\n", - " if (existing_stylesheets.indexOf(url) !== -1) {\n", - "\ton_load()\n", - "\tcontinue;\n", - " }\n", - " const element = document.createElement(\"link\");\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.rel = \"stylesheet\";\n", - " element.type = \"text/css\";\n", - " element.href = url;\n", - " console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n", - " document.body.appendChild(element);\n", - " } if (((window.Tabulator !== undefined) && (!(window.Tabulator instanceof HTMLElement))) || window.requirejs) {\n", - " var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js'];\n", - " for (var i = 0; i < urls.length; i++) {\n", - " skip.push(urls[i])\n", - " }\n", - " } if (((window.moment !== undefined) && (!(window.moment instanceof HTMLElement))) || window.requirejs) {\n", - " var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js'];\n", - " for (var i = 0; i < urls.length; i++) {\n", - " skip.push(urls[i])\n", - " }\n", - " } var existing_scripts = []\n", - " var scripts = document.getElementsByTagName('script')\n", - " for (var i = 0; i < scripts.length; i++) {\n", - " var script = scripts[i]\n", - " if (script.src != null) {\n", - "\texisting_scripts.push(script.src)\n", - " }\n", - " }\n", - " for (var i = 0; i < js_urls.length; i++) {\n", - " var url = js_urls[i];\n", - " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.src = url;\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.head.appendChild(element);\n", - " }\n", - " for (var i = 0; i < js_modules.length; i++) {\n", - " var url = js_modules[i];\n", - " if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onload = on_load;\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.src = url;\n", - " element.type = \"module\";\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " document.head.appendChild(element);\n", - " }\n", - " for (const name in js_exports) {\n", - " var url = js_exports[name];\n", - " if (skip.indexOf(url) >= 0 || root[name] != null) {\n", - "\tif (!window.requirejs) {\n", - "\t on_load();\n", - "\t}\n", - "\tcontinue;\n", - " }\n", - " var element = document.createElement('script');\n", - " element.onerror = on_error;\n", - " element.async = false;\n", - " element.type = \"module\";\n", - " console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n", - " element.textContent = `\n", - " import ${name} from \"${url}\"\n", - " window.${name} = ${name}\n", - " window._bokeh_on_load()\n", - " `\n", - " document.head.appendChild(element);\n", - " }\n", - " if (!js_urls.length && !js_modules.length) {\n", - " on_load()\n", - " }\n", - " };\n", - "\n", - " function inject_raw_css(css) {\n", - " const element = document.createElement(\"style\");\n", - " element.appendChild(document.createTextNode(css));\n", - " document.body.appendChild(element);\n", - " }\n", - "\n", - " var js_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.4.2.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/panel.min.js\"];\n", - " var js_modules = [];\n", - " var js_exports = {};\n", - " var css_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/css/tabulator_simple.min.css?v=1.4.4\"];\n", - " var inline_js = [ function(Bokeh) {\n", - " Bokeh.set_log_level(\"info\");\n", - " },\n", - "function(Bokeh) {} // ensure no trailing comma for IE\n", - " ];\n", - "\n", - " function run_inline_js() {\n", - " if ((root.Bokeh !== undefined) || (force === true)) {\n", - " for (var i = 0; i < inline_js.length; i++) {\n", - "\ttry {\n", - " inline_js[i].call(root, root.Bokeh);\n", - "\t} catch(e) {\n", - "\t if (!reloading) {\n", - "\t throw e;\n", - "\t }\n", - "\t}\n", - " }\n", - " // Cache old bokeh versions\n", - " if (Bokeh != undefined && !reloading) {\n", - "\tvar NewBokeh = root.Bokeh;\n", - "\tif (Bokeh.versions === undefined) {\n", - "\t Bokeh.versions = new Map();\n", - "\t}\n", - "\tif (NewBokeh.version !== Bokeh.version) {\n", - "\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n", - "\t}\n", - "\troot.Bokeh = Bokeh;\n", - " }} else if (Date.now() < root._bokeh_timeout) {\n", - " setTimeout(run_inline_js, 100);\n", - " } else if (!root._bokeh_failed_load) {\n", - " console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n", - " root._bokeh_failed_load = true;\n", - " }\n", - " root._bokeh_is_initializing = false\n", - " }\n", - "\n", - " function load_or_wait() {\n", - " // Implement a backoff loop that tries to ensure we do not load multiple\n", - " // versions of Bokeh and its dependencies at the same time.\n", - " // In recent versions we use the root._bokeh_is_initializing flag\n", - " // to determine whether there is an ongoing attempt to initialize\n", - " // bokeh, however for backward compatibility we also try to ensure\n", - " // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n", - " // before older versions are fully initialized.\n", - " if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n", - " root._bokeh_is_initializing = false;\n", - " root._bokeh_onload_callbacks = undefined;\n", - " console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n", - " load_or_wait();\n", - " } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n", - " setTimeout(load_or_wait, 100);\n", - " } else {\n", - " root._bokeh_is_initializing = true\n", - " root._bokeh_onload_callbacks = []\n", - " var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n", - " if (!reloading && !bokeh_loaded) {\n", - "\troot.Bokeh = undefined;\n", - " }\n", - " load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n", - "\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n", - "\trun_inline_js();\n", - " });\n", - " }\n", - " }\n", - " // Give older versions of the autoload script a head-start to ensure\n", - " // they initialize before we start loading newer version.\n", - " setTimeout(load_or_wait, 100)\n", - "}(window));" - ], - "application/vnd.holoviews_load.v0+json": "(function(root) {\n function now() {\n return new Date();\n }\n\n var force = true;\n var py_version = '3.4.2'.replace('rc', '-rc.').replace('.dev', '-dev.');\n var reloading = false;\n var Bokeh = root.Bokeh;\n\n if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n root._bokeh_timeout = Date.now() + 5000;\n root._bokeh_failed_load = false;\n }\n\n function run_callbacks() {\n try {\n root._bokeh_onload_callbacks.forEach(function(callback) {\n if (callback != null)\n callback();\n });\n } finally {\n delete root._bokeh_onload_callbacks;\n }\n console.debug(\"Bokeh: all callbacks have finished\");\n }\n\n function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n if (css_urls == null) css_urls = [];\n if (js_urls == null) js_urls = [];\n if (js_modules == null) js_modules = [];\n if (js_exports == null) js_exports = {};\n\n root._bokeh_onload_callbacks.push(callback);\n\n if (root._bokeh_is_loading > 0) {\n console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n return null;\n }\n if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n run_callbacks();\n return null;\n }\n if (!reloading) {\n console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n }\n\n function on_load() {\n root._bokeh_is_loading--;\n if (root._bokeh_is_loading === 0) {\n console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n run_callbacks()\n }\n }\n window._bokeh_on_load = on_load\n\n function on_error() {\n console.error(\"failed to load \" + url);\n }\n\n var skip = [];\n if (window.requirejs) {\n window.requirejs.config({'packages': {}, 'paths': {'tabulator': 'https://cdn.jsdelivr.net/npm/tabulator-tables@5.5.0/dist/js/tabulator.min', 'moment': 'https://cdn.jsdelivr.net/npm/luxon/build/global/luxon.min'}, 'shim': {}});\n require([\"tabulator\"], function(Tabulator) {\n\twindow.Tabulator = Tabulator\n\ton_load()\n })\n require([\"moment\"], function(moment) {\n\twindow.moment = moment\n\ton_load()\n })\n root._bokeh_is_loading = css_urls.length + 2;\n } else {\n root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n }\n\n var existing_stylesheets = []\n var links = document.getElementsByTagName('link')\n for (var i = 0; i < links.length; i++) {\n var link = links[i]\n if (link.href != null) {\n\texisting_stylesheets.push(link.href)\n }\n }\n for (var i = 0; i < css_urls.length; i++) {\n var url = css_urls[i];\n if (existing_stylesheets.indexOf(url) !== -1) {\n\ton_load()\n\tcontinue;\n }\n const element = document.createElement(\"link\");\n element.onload = on_load;\n element.onerror = on_error;\n element.rel = \"stylesheet\";\n element.type = \"text/css\";\n element.href = url;\n console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n document.body.appendChild(element);\n } if (((window.Tabulator !== undefined) && (!(window.Tabulator instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } if (((window.moment !== undefined) && (!(window.moment instanceof HTMLElement))) || window.requirejs) {\n var urls = ['https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js'];\n for (var i = 0; i < urls.length; i++) {\n skip.push(urls[i])\n }\n } var existing_scripts = []\n var scripts = document.getElementsByTagName('script')\n for (var i = 0; i < scripts.length; i++) {\n var script = scripts[i]\n if (script.src != null) {\n\texisting_scripts.push(script.src)\n }\n }\n for (var i = 0; i < js_urls.length; i++) {\n var url = js_urls[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (var i = 0; i < js_modules.length; i++) {\n var url = js_modules[i];\n if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onload = on_load;\n element.onerror = on_error;\n element.async = false;\n element.src = url;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n document.head.appendChild(element);\n }\n for (const name in js_exports) {\n var url = js_exports[name];\n if (skip.indexOf(url) >= 0 || root[name] != null) {\n\tif (!window.requirejs) {\n\t on_load();\n\t}\n\tcontinue;\n }\n var element = document.createElement('script');\n element.onerror = on_error;\n element.async = false;\n element.type = \"module\";\n console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n element.textContent = `\n import ${name} from \"${url}\"\n window.${name} = ${name}\n window._bokeh_on_load()\n `\n document.head.appendChild(element);\n }\n if (!js_urls.length && !js_modules.length) {\n on_load()\n }\n };\n\n function inject_raw_css(css) {\n const element = document.createElement(\"style\");\n element.appendChild(document.createTextNode(css));\n document.body.appendChild(element);\n }\n\n var js_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/js/tabulator.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/luxon/build/global/luxon.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.4.2.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.4.2.min.js\", \"https://cdn.holoviz.org/panel/1.4.4/dist/panel.min.js\"];\n var js_modules = [];\n var js_exports = {};\n var css_urls = [\"https://cdn.holoviz.org/panel/1.4.4/dist/bundled/datatabulator/tabulator-tables@5.5.0/dist/css/tabulator_simple.min.css?v=1.4.4\"];\n var inline_js = [ function(Bokeh) {\n Bokeh.set_log_level(\"info\");\n },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n ];\n\n function run_inline_js() {\n if ((root.Bokeh !== undefined) || (force === true)) {\n for (var i = 0; i < inline_js.length; i++) {\n\ttry {\n inline_js[i].call(root, root.Bokeh);\n\t} catch(e) {\n\t if (!reloading) {\n\t throw e;\n\t }\n\t}\n }\n // Cache old bokeh versions\n if (Bokeh != undefined && !reloading) {\n\tvar NewBokeh = root.Bokeh;\n\tif (Bokeh.versions === undefined) {\n\t Bokeh.versions = new Map();\n\t}\n\tif (NewBokeh.version !== Bokeh.version) {\n\t Bokeh.versions.set(NewBokeh.version, NewBokeh)\n\t}\n\troot.Bokeh = Bokeh;\n }} else if (Date.now() < root._bokeh_timeout) {\n setTimeout(run_inline_js, 100);\n } else if (!root._bokeh_failed_load) {\n console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n root._bokeh_failed_load = true;\n }\n root._bokeh_is_initializing = false\n }\n\n function load_or_wait() {\n // Implement a backoff loop that tries to ensure we do not load multiple\n // versions of Bokeh and its dependencies at the same time.\n // In recent versions we use the root._bokeh_is_initializing flag\n // to determine whether there is an ongoing attempt to initialize\n // bokeh, however for backward compatibility we also try to ensure\n // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n // before older versions are fully initialized.\n if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n root._bokeh_is_initializing = false;\n root._bokeh_onload_callbacks = undefined;\n console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n load_or_wait();\n } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n setTimeout(load_or_wait, 100);\n } else {\n root._bokeh_is_initializing = true\n root._bokeh_onload_callbacks = []\n var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n if (!reloading && !bokeh_loaded) {\n\troot.Bokeh = undefined;\n }\n load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n\trun_inline_js();\n });\n }\n }\n // Give older versions of the autoload script a head-start to ensure\n // they initialize before we start loading newer version.\n setTimeout(load_or_wait, 100)\n}(window));" - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "\n", - "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n", - " window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n", - "}\n", - "\n", - "\n", - " function JupyterCommManager() {\n", - " }\n", - "\n", - " JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n", - " if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", - " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", - " comm_manager.register_target(comm_id, function(comm) {\n", - " comm.on_msg(msg_handler);\n", - " });\n", - " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", - " window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n", - " comm.onMsg = msg_handler;\n", - " });\n", - " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", - " google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n", - " var messages = comm.messages[Symbol.asyncIterator]();\n", - " function processIteratorResult(result) {\n", - " var message = result.value;\n", - " console.log(message)\n", - " var content = {data: message.data, comm_id};\n", - " var buffers = []\n", - " for (var buffer of message.buffers || []) {\n", - " buffers.push(new DataView(buffer))\n", - " }\n", - " var metadata = message.metadata || {};\n", - " var msg = {content, buffers, metadata}\n", - " msg_handler(msg);\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " return messages.next().then(processIteratorResult);\n", - " })\n", - " }\n", - " }\n", - "\n", - " JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n", - " if (comm_id in window.PyViz.comms) {\n", - " return window.PyViz.comms[comm_id];\n", - " } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n", - " var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n", - " var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n", - " if (msg_handler) {\n", - " comm.on_msg(msg_handler);\n", - " }\n", - " } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n", - " var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n", - " comm.open();\n", - " if (msg_handler) {\n", - " comm.onMsg = msg_handler;\n", - " }\n", - " } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n", - " var comm_promise = google.colab.kernel.comms.open(comm_id)\n", - " comm_promise.then((comm) => {\n", - " window.PyViz.comms[comm_id] = comm;\n", - " if (msg_handler) {\n", - " var messages = comm.messages[Symbol.asyncIterator]();\n", - " function processIteratorResult(result) {\n", - " var message = result.value;\n", - " var content = {data: message.data};\n", - " var metadata = message.metadata || {comm_id};\n", - " var msg = {content, metadata}\n", - " msg_handler(msg);\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " return messages.next().then(processIteratorResult);\n", - " }\n", - " }) \n", - " var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n", - " return comm_promise.then((comm) => {\n", - " comm.send(data, metadata, buffers, disposeOnDone);\n", - " });\n", - " };\n", - " var comm = {\n", - " send: sendClosure\n", - " };\n", - " }\n", - " window.PyViz.comms[comm_id] = comm;\n", - " return comm;\n", - " }\n", - " window.PyViz.comm_manager = new JupyterCommManager();\n", - " \n", - "\n", - "\n", - "var JS_MIME_TYPE = 'application/javascript';\n", - "var HTML_MIME_TYPE = 'text/html';\n", - "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n", - "var CLASS_NAME = 'output';\n", - "\n", - "/**\n", - " * Render data to the DOM node\n", - " */\n", - "function render(props, node) {\n", - " var div = document.createElement(\"div\");\n", - " var script = document.createElement(\"script\");\n", - " node.appendChild(div);\n", - " node.appendChild(script);\n", - "}\n", - "\n", - "/**\n", - " * Handle when a new output is added\n", - " */\n", - "function handle_add_output(event, handle) {\n", - " var output_area = handle.output_area;\n", - " var output = handle.output;\n", - " if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n", - " return\n", - " }\n", - " var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n", - " var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n", - " if (id !== undefined) {\n", - " var nchildren = toinsert.length;\n", - " var html_node = toinsert[nchildren-1].children[0];\n", - " html_node.innerHTML = output.data[HTML_MIME_TYPE];\n", - " var scripts = [];\n", - " var nodelist = html_node.querySelectorAll(\"script\");\n", - " for (var i in nodelist) {\n", - " if (nodelist.hasOwnProperty(i)) {\n", - " scripts.push(nodelist[i])\n", - " }\n", - " }\n", - "\n", - " scripts.forEach( function (oldScript) {\n", - " var newScript = document.createElement(\"script\");\n", - " var attrs = [];\n", - " var nodemap = oldScript.attributes;\n", - " for (var j in nodemap) {\n", - " if (nodemap.hasOwnProperty(j)) {\n", - " attrs.push(nodemap[j])\n", - " }\n", - " }\n", - " attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n", - " newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n", - " oldScript.parentNode.replaceChild(newScript, oldScript);\n", - " });\n", - " if (JS_MIME_TYPE in output.data) {\n", - " toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n", - " }\n", - " output_area._hv_plot_id = id;\n", - " if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n", - " window.PyViz.plot_index[id] = Bokeh.index[id];\n", - " } else {\n", - " window.PyViz.plot_index[id] = null;\n", - " }\n", - " } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n", - " var bk_div = document.createElement(\"div\");\n", - " bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n", - " var script_attrs = bk_div.children[0].attributes;\n", - " for (var i = 0; i < script_attrs.length; i++) {\n", - " toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n", - " }\n", - " // store reference to server id on output_area\n", - " output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n", - " }\n", - "}\n", - "\n", - "/**\n", - " * Handle when an output is cleared or removed\n", - " */\n", - "function handle_clear_output(event, handle) {\n", - " var id = handle.cell.output_area._hv_plot_id;\n", - " var server_id = handle.cell.output_area._bokeh_server_id;\n", - " if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n", - " var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n", - " if (server_id !== null) {\n", - " comm.send({event_type: 'server_delete', 'id': server_id});\n", - " return;\n", - " } else if (comm !== null) {\n", - " comm.send({event_type: 'delete', 'id': id});\n", - " }\n", - " delete PyViz.plot_index[id];\n", - " if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n", - " var doc = window.Bokeh.index[id].model.document\n", - " doc.clear();\n", - " const i = window.Bokeh.documents.indexOf(doc);\n", - " if (i > -1) {\n", - " window.Bokeh.documents.splice(i, 1);\n", - " }\n", - " }\n", - "}\n", - "\n", - "/**\n", - " * Handle kernel restart event\n", - " */\n", - "function handle_kernel_cleanup(event, handle) {\n", - " delete PyViz.comms[\"hv-extension-comm\"];\n", - " window.PyViz.plot_index = {}\n", - "}\n", - "\n", - "/**\n", - " * Handle update_display_data messages\n", - " */\n", - "function handle_update_output(event, handle) {\n", - " handle_clear_output(event, {cell: {output_area: handle.output_area}})\n", - " handle_add_output(event, handle)\n", - "}\n", - "\n", - "function register_renderer(events, OutputArea) {\n", - " function append_mime(data, metadata, element) {\n", - " // create a DOM node to render to\n", - " var toinsert = this.create_output_subarea(\n", - " metadata,\n", - " CLASS_NAME,\n", - " EXEC_MIME_TYPE\n", - " );\n", - " this.keyboard_manager.register_events(toinsert);\n", - " // Render to node\n", - " var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n", - " render(props, toinsert[0]);\n", - " element.append(toinsert);\n", - " return toinsert\n", - " }\n", - "\n", - " events.on('output_added.OutputArea', handle_add_output);\n", - " events.on('output_updated.OutputArea', handle_update_output);\n", - " events.on('clear_output.CodeCell', handle_clear_output);\n", - " events.on('delete.Cell', handle_clear_output);\n", - " events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n", - "\n", - " OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n", - " safe: true,\n", - " index: 0\n", - " });\n", - "}\n", - "\n", - "if (window.Jupyter !== undefined) {\n", - " try {\n", - " var events = require('base/js/events');\n", - " var OutputArea = require('notebook/js/outputarea').OutputArea;\n", - " if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n", - " register_renderer(events, OutputArea);\n", - " }\n", - " } catch(err) {\n", - " }\n", - "}\n" - ], - "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n function JupyterCommManager() {\n }\n\n JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n comm_manager.register_target(comm_id, function(comm) {\n comm.on_msg(msg_handler);\n });\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n comm.onMsg = msg_handler;\n });\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n console.log(message)\n var content = {data: message.data, comm_id};\n var buffers = []\n for (var buffer of message.buffers || []) {\n buffers.push(new DataView(buffer))\n }\n var metadata = message.metadata || {};\n var msg = {content, buffers, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n })\n }\n }\n\n JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n if (comm_id in window.PyViz.comms) {\n return window.PyViz.comms[comm_id];\n } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n if (msg_handler) {\n comm.on_msg(msg_handler);\n }\n } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n comm.open();\n if (msg_handler) {\n comm.onMsg = msg_handler;\n }\n } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n var comm_promise = google.colab.kernel.comms.open(comm_id)\n comm_promise.then((comm) => {\n window.PyViz.comms[comm_id] = comm;\n if (msg_handler) {\n var messages = comm.messages[Symbol.asyncIterator]();\n function processIteratorResult(result) {\n var message = result.value;\n var content = {data: message.data};\n var metadata = message.metadata || {comm_id};\n var msg = {content, metadata}\n msg_handler(msg);\n return messages.next().then(processIteratorResult);\n }\n return messages.next().then(processIteratorResult);\n }\n }) \n var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n return comm_promise.then((comm) => {\n comm.send(data, metadata, buffers, disposeOnDone);\n });\n };\n var comm = {\n send: sendClosure\n };\n }\n window.PyViz.comms[comm_id] = comm;\n return comm;\n }\n window.PyViz.comm_manager = new JupyterCommManager();\n \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n var div = document.createElement(\"div\");\n var script = document.createElement(\"script\");\n node.appendChild(div);\n node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n var output_area = handle.output_area;\n var output = handle.output;\n if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n return\n }\n var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n if (id !== undefined) {\n var nchildren = toinsert.length;\n var html_node = toinsert[nchildren-1].children[0];\n html_node.innerHTML = output.data[HTML_MIME_TYPE];\n var scripts = [];\n var nodelist = html_node.querySelectorAll(\"script\");\n for (var i in nodelist) {\n if (nodelist.hasOwnProperty(i)) {\n scripts.push(nodelist[i])\n }\n }\n\n scripts.forEach( function (oldScript) {\n var newScript = document.createElement(\"script\");\n var attrs = [];\n var nodemap = oldScript.attributes;\n for (var j in nodemap) {\n if (nodemap.hasOwnProperty(j)) {\n attrs.push(nodemap[j])\n }\n }\n attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n oldScript.parentNode.replaceChild(newScript, oldScript);\n });\n if (JS_MIME_TYPE in output.data) {\n toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n }\n output_area._hv_plot_id = id;\n if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n window.PyViz.plot_index[id] = Bokeh.index[id];\n } else {\n window.PyViz.plot_index[id] = null;\n }\n } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n var bk_div = document.createElement(\"div\");\n bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n var script_attrs = bk_div.children[0].attributes;\n for (var i = 0; i < script_attrs.length; i++) {\n toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n }\n // store reference to server id on output_area\n output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n var id = handle.cell.output_area._hv_plot_id;\n var server_id = handle.cell.output_area._bokeh_server_id;\n if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n if (server_id !== null) {\n comm.send({event_type: 'server_delete', 'id': server_id});\n return;\n } else if (comm !== null) {\n comm.send({event_type: 'delete', 'id': id});\n }\n delete PyViz.plot_index[id];\n if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n var doc = window.Bokeh.index[id].model.document\n doc.clear();\n const i = window.Bokeh.documents.indexOf(doc);\n if (i > -1) {\n window.Bokeh.documents.splice(i, 1);\n }\n }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n delete PyViz.comms[\"hv-extension-comm\"];\n window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n handle_clear_output(event, {cell: {output_area: handle.output_area}})\n handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n function append_mime(data, metadata, element) {\n // create a DOM node to render to\n var toinsert = this.create_output_subarea(\n metadata,\n CLASS_NAME,\n EXEC_MIME_TYPE\n );\n this.keyboard_manager.register_events(toinsert);\n // Render to node\n var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n render(props, toinsert[0]);\n element.append(toinsert);\n return toinsert\n }\n\n events.on('output_added.OutputArea', handle_add_output);\n events.on('output_updated.OutputArea', handle_update_output);\n events.on('clear_output.CodeCell', handle_clear_output);\n events.on('delete.Cell', handle_clear_output);\n events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n safe: true,\n index: 0\n });\n}\n\nif (window.Jupyter !== undefined) {\n try {\n var events = require('base/js/events');\n var OutputArea = require('notebook/js/outputarea').OutputArea;\n if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n register_renderer(events, OutputArea);\n }\n } catch(err) {\n }\n}\n" - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ] - }, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "b2aa672d-82eb-4c01-ae69-0f830f13a3ea" - } - }, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "\n", - "
\n", - "\n", - "\n", - "\n", - " \n", - " \n", - "\n", - "\n", - "\n", - "\n", - "
\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "# Enable bokeh as default backend, for interactive plots.\n", "\n", @@ -1900,7 +51,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "4b2e2dc8-fe24-4437-bafb-6f66557c75a0", "metadata": {}, "outputs": [], @@ -1942,113 +93,17 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "70b21582-0bab-4530-930a-9bc5930f9641", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[TaskID(pid=0, comm='swapper/0'),\n", - " TaskID(pid=0, comm='swapper/4'),\n", - " TaskID(pid=0, comm='swapper/5'),\n", - " TaskID(pid=0, comm='swapper/1'),\n", - " TaskID(pid=0, comm='swapper/3'),\n", - " TaskID(pid=0, comm='swapper/2'),\n", - " TaskID(pid=1, comm='init'),\n", - " TaskID(pid=10, comm='kworker/0:1'),\n", - " TaskID(pid=11, comm='ksoftirqd/0'),\n", - " TaskID(pid=12, comm='rcu_preempt'),\n", - " TaskID(pid=13, comm='migration/0'),\n", - " TaskID(pid=26, comm='migration/3'),\n", - " TaskID(pid=31, comm='migration/4'),\n", - " TaskID(pid=36, comm='migration/5'),\n", - " TaskID(pid=43, comm='kworker/1:1'),\n", - " TaskID(pid=44, comm='kworker/2:1'),\n", - " TaskID(pid=48, comm='kcompactd0'),\n", - " TaskID(pid=57, comm='kworker/5:1'),\n", - " TaskID(pid=60, comm='kworker/4:1'),\n", - " TaskID(pid=85, comm='kworker/u12:1'),\n", - " TaskID(pid=147, comm='kworker/3:2'),\n", - " TaskID(pid=3807, comm='sshd'),\n", - " TaskID(pid=5715, comm='trace-cmd'),\n", - " TaskID(pid=5716, comm='sshd'),\n", - " TaskID(pid=5716, comm='sh'),\n", - " TaskID(pid=5717, comm='sshd'),\n", - " TaskID(pid=5717, comm='sh'),\n", - " TaskID(pid=5717, comm='shutils'),\n", - " TaskID(pid=5718, comm='shutils'),\n", - " TaskID(pid=5718, comm='busybox'),\n", - " TaskID(pid=5719, comm='sshd'),\n", - " TaskID(pid=5719, comm='sh'),\n", - " TaskID(pid=5719, comm='shutils'),\n", - " TaskID(pid=5720, comm='shutils'),\n", - " TaskID(pid=5720, comm='busybox'),\n", - " TaskID(pid=5720, comm='true'),\n", - " TaskID(pid=5721, comm='shutils'),\n", - " TaskID(pid=5721, comm='busybox'),\n", - " TaskID(pid=5722, comm='shutils'),\n", - " TaskID(pid=5722, comm='busybox'),\n", - " TaskID(pid=5722, comm='true'),\n", - " TaskID(pid=5723, comm='shutils'),\n", - " TaskID(pid=5723, comm='busybox'),\n", - " TaskID(pid=5724, comm='shutils'),\n", - " TaskID(pid=5724, comm='busybox'),\n", - " TaskID(pid=5724, comm='true'),\n", - " TaskID(pid=5725, comm='shutils'),\n", - " TaskID(pid=5725, comm='busybox'),\n", - " TaskID(pid=5726, comm='shutils'),\n", - " TaskID(pid=5726, comm='busybox'),\n", - " TaskID(pid=5726, comm='true'),\n", - " TaskID(pid=5727, comm='shutils'),\n", - " TaskID(pid=5727, comm='busybox'),\n", - " TaskID(pid=5728, comm='shutils'),\n", - " TaskID(pid=5728, comm='busybox'),\n", - " TaskID(pid=5728, comm='true'),\n", - " TaskID(pid=5729, comm='shutils'),\n", - " TaskID(pid=5729, comm='busybox'),\n", - " TaskID(pid=5730, comm='shutils'),\n", - " TaskID(pid=5730, comm='busybox'),\n", - " TaskID(pid=5730, comm='true'),\n", - " TaskID(pid=5731, comm='shutils'),\n", - " TaskID(pid=5731, comm='busybox'),\n", - " TaskID(pid=5732, comm='sshd'),\n", - " TaskID(pid=5732, comm='sh'),\n", - " TaskID(pid=5732, comm='rt-app'),\n", - " TaskID(pid=5733, comm='rt-app'),\n", - " TaskID(pid=5733, comm='big_0-0'),\n", - " TaskID(pid=5734, comm='rt-app'),\n", - " TaskID(pid=5734, comm='big_1-1'),\n", - " TaskID(pid=5735, comm='rt-app'),\n", - " TaskID(pid=5735, comm='small_0-2'),\n", - " TaskID(pid=5736, comm='rt-app'),\n", - " TaskID(pid=5736, comm='small_1-3'),\n", - " TaskID(pid=5737, comm='rt-app'),\n", - " TaskID(pid=5737, comm='small_2-4'),\n", - " TaskID(pid=5738, comm='sshd'),\n", - " TaskID(pid=5738, comm='sh'),\n", - " TaskID(pid=5738, comm='shutils'),\n", - " TaskID(pid=5739, comm='shutils'),\n", - " TaskID(pid=5739, comm='busybox'),\n", - " TaskID(pid=5740, comm='sshd'),\n", - " TaskID(pid=5740, comm='sh'),\n", - " TaskID(pid=5741, comm='sshd'),\n", - " TaskID(pid=5741, comm='sh'),\n", - " TaskID(pid=5741, comm='trace-cmd')]" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "trace.ana.tasks.task_ids" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "49e249f4-0dd7-4bf8-843c-d5ae419bc082", "metadata": {}, "outputs": [], @@ -2080,117 +135,10 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "d6a3175c-e33c-4d54-87a2-66f2580080e6", "metadata": {}, - "outputs": [ - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ], - "text/plain": [ - ":Overlay\n", - " .Curve.Util :Curve [Time] (util)\n", - " .Marker.Util :Scatter [Time] (util)\n", - " .VSpan.Overutilized.I :VSpan [x,y]\n", - " .VSpan.Overutilized.II :VSpan [x,y]\n", - " .VSpan.Overutilized.III :VSpan [x,y]\n", - " .VSpan.Overutilized.IV :VSpan [x,y]\n", - " .VSpan.Overutilized.V :VSpan [x,y]\n", - " .VSpan.Overutilized.VI :VSpan [x,y]\n", - " .VSpan.Overutilized.VII :VSpan [x,y]\n", - " .VSpan.Overutilized.VIII :VSpan [x,y]\n", - " .VSpan.Overutilized.IX :VSpan [x,y]\n", - " .VSpan.Overutilized.X :VSpan [x,y]\n", - " .VSpan.Overutilized.XI :VSpan [x,y]\n", - " .VSpan.Overutilized.XII :VSpan [x,y]" - ] - }, - "execution_count": 6, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "45326c21-37f0-4f2e-ba46-51262c3e6b4a" - } - }, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Call a plot method that will return a holoviews object.\n", "util_fig = ana.load_tracking.plot_task_signals(task, signals=['util'])\n", @@ -2210,117 +158,10 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "f2f83800-aabf-4577-a9f6-824d0f71925d", "metadata": {}, - "outputs": [ - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ], - "text/plain": [ - ":Overlay\n", - " .Curve.Util :Curve [Time] (util)\n", - " .Marker.Util :Scatter [Time] (util)\n", - " .VSpan.Overutilized.I :VSpan [x,y]\n", - " .VSpan.Overutilized.II :VSpan [x,y]\n", - " .VSpan.Overutilized.III :VSpan [x,y]\n", - " .VSpan.Overutilized.IV :VSpan [x,y]\n", - " .VSpan.Overutilized.V :VSpan [x,y]\n", - " .VSpan.Overutilized.VI :VSpan [x,y]\n", - " .VSpan.Overutilized.VII :VSpan [x,y]\n", - " .VSpan.Overutilized.VIII :VSpan [x,y]\n", - " .VSpan.Overutilized.IX :VSpan [x,y]\n", - " .VSpan.Overutilized.X :VSpan [x,y]\n", - " .VSpan.Overutilized.XI :VSpan [x,y]\n", - " .VSpan.Overutilized.XII :VSpan [x,y]" - ] - }, - "execution_count": 7, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "552eba5a-c7a9-48fb-855a-8d0b1ffa2b6f" - } - }, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "util_fig.options('Curve', color='red')" ] @@ -2335,18 +176,10 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "9bf1057d-7239-490e-b113-ec60a49b38ac", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "/work/projects/lisa/util.html\n" - ] - } - ], + "outputs": [], "source": [ "path = base_path / 'util.html'\n", "ana.load_tracking.plot_task_signals(task, signals=['util'], filepath=path)\n", @@ -2372,108 +205,10 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "7411c4c6-68d5-4153-8965-7bcaaf0c4f99", "metadata": {}, - "outputs": [ - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ], - "text/plain": [ - ".NewCls at 0x7fdf5ee72330>" - ] - }, - "execution_count": 9, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "289b884f-5d5d-4d4c-80cf-130218701c91" - } - }, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "activation_fig = ana.tasks.plot_tasks_activation(output='ui')\n", "activation_fig" @@ -2492,108 +227,10 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "id": "723e79b7-f188-4363-a185-19130ba18e95", "metadata": {}, - "outputs": [ - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ], - "text/plain": [ - ".NewCls at 0x7fdf1624e330>" - ] - }, - "execution_count": 10, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "503ff72d-7efd-4d9e-843b-9be9d2f8c47d" - } - }, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "layout = activation_fig + util_fig\n", "layout.cols(1)" @@ -2619,124 +256,10 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "24295073-3b30-45a2-8835-99354f3d1470", "metadata": {}, - "outputs": [ - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ], - "text/plain": [ - ":Overlay\n", - " .Curve.Required_capacity :Curve [Time] (required_capacity)\n", - " .Marker.Required_capacity :Scatter [Time] (required_capacity)\n", - " .Curve.Util :Curve [Time] (util)\n", - " .Marker.Util :Scatter [Time] (util)\n", - " .VSpan.Overutilized.I :VSpan [x,y]\n", - " .VSpan.Overutilized.II :VSpan [x,y]\n", - " .VSpan.Overutilized.III :VSpan [x,y]\n", - " .VSpan.Overutilized.IV :VSpan [x,y]\n", - " .VSpan.Overutilized.V :VSpan [x,y]\n", - " .VSpan.Overutilized.VI :VSpan [x,y]\n", - " .VSpan.Overutilized.VII :VSpan [x,y]\n", - " .VSpan.Overutilized.VIII :VSpan [x,y]\n", - " .VSpan.Overutilized.IX :VSpan [x,y]\n", - " .VSpan.Overutilized.X :VSpan [x,y]\n", - " .VSpan.Overutilized.XI :VSpan [x,y]\n", - " .VSpan.Overutilized.XII :VSpan [x,y]" - ] - }, - "execution_count": 11, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "66aef5ed-ac16-4cdb-a04f-f4c1f1ef6fb4" - } - }, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "capa_fig = ana.load_tracking.plot_task_required_capacity(task=task)\n", "capa_fig * util_fig" @@ -2754,108 +277,10 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "id": "202fd708-8a6f-4c2a-9cfb-e409e9c4b490", "metadata": {}, - "outputs": [ - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": {}, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.holoviews_exec.v0+json": "", - "text/html": [ - "
\n", - "
\n", - "
\n", - "" - ], - "text/plain": [ - ".NewCls at 0x7fdf401b9a90>" - ] - }, - "execution_count": 12, - "metadata": { - "application/vnd.holoviews_exec.v0+json": { - "id": "c4cb577b-ac09-4a3b-9bb8-57b4581c2583" - } - }, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "util_df = ana.load_tracking.df_task_signal(task=task, signal='util')\n", "events_df = ana.notebook.df_all_events()\n", diff --git a/lisa/_assets/binaries/__init__.py b/lisa/_assets/binaries/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_assets/binaries/arm64/__init__.py b/lisa/_assets/binaries/arm64/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_assets/binaries/armeabi/__init__.py b/lisa/_assets/binaries/armeabi/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_assets/binaries/x86_64/__init__.py b/lisa/_assets/binaries/x86_64/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_assets/kmodules/__init__.py b/lisa/_assets/kmodules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_assets/kmodules/lisa/__init__.py b/lisa/_assets/kmodules/lisa/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_assets/kmodules/lisa/introspect_header.py b/lisa/_assets/kmodules/lisa/introspect_header.py index 72b00a9c04c82308fb9fb5362744f61330cdfab2..168ba9765c43ea2b8b1f915c71e8c5f4d1e38235 100755 --- a/lisa/_assets/kmodules/lisa/introspect_header.py +++ b/lisa/_assets/kmodules/lisa/introspect_header.py @@ -22,7 +22,7 @@ import abc import sys import itertools import argparse -from collections import namedtuple, deque, defaultdict +from collections import deque, defaultdict import functools import json import re diff --git a/lisa/_assets/kmodules/lisa/lisa/__init__.py b/lisa/_assets/kmodules/lisa/lisa/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_doc/__init__.py b/lisa/_doc/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/_doc/helpers.py b/lisa/_doc/helpers.py index 096dfc03a2d71f12022c7a717add2c4567d61d81..88f393ea5827147bc3e0597ccd3d4b240a8d9552 100644 --- a/lisa/_doc/helpers.py +++ b/lisa/_doc/helpers.py @@ -14,7 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Sphinc documentation building helpers. +""" +import enum import io import contextlib import subprocess @@ -26,32 +30,45 @@ import types import abc import warnings from collections.abc import Mapping +from collections import ChainMap from urllib.request import urlopen, Request from urllib.error import HTTPError, URLError -from operator import itemgetter +from operator import attrgetter, itemgetter import collections from concurrent.futures import ThreadPoolExecutor from textwrap import dedent - -from docutils.parsers.rst import Directive, directives -from docutils.parsers.rst.directives import flag +import importlib +from pathlib import Path +import builtins +import copy +import typing + +from sphinx.util.docutils import SphinxDirective +from docutils.parsers.rst import directives from docutils import nodes from docutils.statemachine import ViewList from sphinx.util.nodes import nested_parse_with_titles from sphinx.ext.autodoc import exclude_members_option +from sphinx.pycode import ModuleAnalyzer +from sphinx.errors import PycodeError import lisa import lisa.analysis from lisa.analysis.base import AnalysisHelpers, TraceAnalysisBase, measure_time -from lisa.utils import get_subclasses, import_all_submodules, DEPRECATED_MAP, get_sphinx_name, groupby, get_short_doc, order_as, is_link_dead +from lisa.utils import get_subclasses, import_all_submodules, _DEPRECATED_MAP, get_obj_name, groupby, get_short_doc, order_as, is_link_dead, resolve_dotted_name, get_sphinx_role, _get_parent_namespace, _get_parent_namespaces, get_parent_namespace, memoized, _DelegatedBase, ffill, fixedpoint, deduplicate, fold from lisa.trace import TraceEventCheckerBase from lisa.conf import KeyDesc, SimpleMultiSrcConf, TopLevelKeyDesc from lisa.version import format_version import lisa._git -class RecursiveDirective(Directive): +def _sphinx_recursive_parse(directive, viewlist): + node = nodes.Element() + nested_parse_with_titles(directive.state, viewlist, node) + return node.children + +class RecursiveDirective(SphinxDirective): """ Base class helping nested parsing. @@ -61,7 +78,7 @@ class RecursiveDirective(Directive): will be interpreted as reStructuredText. """ option_spec = { - 'literal': flag, + 'literal': directives.flag, } def parse_nested(self, txt, source=None): @@ -76,10 +93,48 @@ class RecursiveDirective(Directive): node['language'] = 'text' return [node] else: - txt = ViewList(txt.splitlines(), source) - node = nodes.Element() - nested_parse_with_titles(self.state, txt, node) - return node.children + viewlist = ViewList(txt.splitlines(), source) + return _sphinx_recursive_parse(self, viewlist) + + +class WithRefCtxDirective(SphinxDirective): + """ + Allow temporarily switching to a different current class and module for + reference resolution purpose. + """ + has_content = True + + option_spec = { + 'module': directives.unchanged, + 'class': directives.unchanged, + } + + def run(self): + ctx = { + 'py:module': self.options.get('module'), + 'py:class': self.options.get('class'), + } + ctx = { + k: v + for k, v in ctx.items() + if v + } + notset = object() + old = { + k: self.env.ref_context.get(k, notset) + for k in ctx.keys() + } + + try: + self.env.ref_context.update(ctx) + nodes = _sphinx_recursive_parse(self, self.content) + finally: + for k, v in old.items(): + if v is notset: + del self.env.ref_context[k] + else: + self.env.ref_context[k] = v + return nodes class ExecDirective(RecursiveDirective): @@ -92,6 +147,10 @@ class ExecDirective(RecursiveDirective): import sys print(sys.version) + This directive will also register a ``lisa-exec-state`` hook that will be + called with no extra parameters. The return value will be made available as + the ``state`` global variable injected in the block of code. + Options: * ``literal``: If set, a literal block will be used, otherwise the text @@ -103,15 +162,14 @@ class ExecDirective(RecursiveDirective): stdout = io.StringIO() code = '\n'.join(self.content) code = dedent(code) + + state = self.env.app.emit_firstresult('lisa-exec-state') with contextlib.redirect_stdout(stdout): - exec(code, {}) + exec(code, {'state': state}) out = stdout.getvalue() return self.parse_nested(out) -directives.register_directive('exec', ExecDirective) - - class RunCommandDirective(RecursiveDirective): """ reStructuredText directive to execute the specified command and insert @@ -135,9 +193,9 @@ class RunCommandDirective(RecursiveDirective): """ has_content = True option_spec = { - 'ignore-error': flag, - 'capture-stderr': flag, - 'literal': flag, + 'ignore-error': directives.flag, + 'capture-stderr': directives.flag, + 'literal': directives.flag, } def run(self): @@ -159,7 +217,1369 @@ class RunCommandDirective(RecursiveDirective): return self.parse_nested(out, cmd) -directives.register_directive('run-command', RunCommandDirective) +@functools.lru_cache(maxsize=128) +def sphinx_module_attrs_doc(mod): + modname = mod.__name__ + if modname in ('builtins', '__main__'): + return {} + else: + try: + analyzer = ModuleAnalyzer.for_module(modname) + # Some extension modules don't have source code and therefore cannot be + # found by ModuleAnalyzer + except PycodeError: + return {} + else: + attrs = { + '.'.join(x for x in name if x): '\n'.join(doc) + for name, doc in analyzer.find_attr_docs().items() + } + return attrs + + +def relname(parent, child): + return _relname( + parent=get_obj_name(parent), + child=get_obj_name(child), + ) + +def _relname(parent, child): + return '.'.join( + a + for a, b in itertools.zip_longest( + child.split('.'), + parent.split('.') + ) + if a != b and a is not None + ) + + +_GETATTR_NOTSET = object() +def silent_getattr(obj, attr, default=_GETATTR_NOTSET): + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + try: + return getattr(obj, attr) + except AttributeError: + if default is _GETATTR_NOTSET: + raise + else: + return default + + +def silent_hasattr(obj, attr): + try: + silent_getattr(obj, attr) + except AttributeError: + return False + else: + return True + + +def _resolve_dotted_name(name): + return resolve_dotted_name( + name, + getattr=silent_getattr, + ) + + +def _get_delegated_members(cls, avoid_delegated=None): + if avoid_delegated: + return _do_get_delegated_members(cls, avoid_delegated) + else: + return _get_delegated_members_memoized(cls) + + +@functools.lru_cache(maxsize=2048) +def _get_delegated_members_memoized(cls): + return _do_get_delegated_members(cls, None) + + +def _do_get_delegated_members(cls, avoid_delegated): + if issubclass(cls, _DelegatedBase) and cls._ATTRS_DELEGATED_TO_CLASSES: + # Make sure we do not include anything we inherited from. + avoid_delegated = set(avoid_delegated or []) + + # Including the entire MRO prevents O(N^2) complexity in case a + # subclass delegates to one of its bases. This is ok to do since + # whatever these bases provides, we already have via inheritance. + avoid_delegated.update(inspect.getmro(cls)) + + classes = deduplicate( + [ + __cls + for _cls in cls._ATTRS_DELEGATED_TO_CLASSES + for __cls in (_cls, *get_subclasses(_cls, mro_order=True, only_leaves=True)) + if __cls not in avoid_delegated + ], + keep_last=False, + ) + + # Any class we already visited is to be avoided, so that we avoid + # infinite recursion in case of cycles in the delegation graph. This + # can happen if sibling classes A and B both define a member (or + # inherit from it). In that case, the member would appear to be + # inherited from A from the point of view of B and vice versa. + avoid_delegated.update(classes) + + return _merge_members_stack( + SphinxDocObject.from_namespace(_cls)._get_members( + inherited=True, + allow_reexport=False, + avoid_delegated=avoid_delegated, + ) + for _cls in classes + ) + else: + return {} + + +def _with_refctx(docobj, rst): + obj = docobj.obj + if isinstance(obj, type) or inspect.ismodule(obj): + ns = docobj + else: + try: + ns = docobj.parent + except ValueError: + ns = None + + if ns: + if isinstance(ns.obj, type): + _class = f' :class: {ns.fullname}\n' + _mod = f' :module: {ns.__module__}' + elif inspect.ismodule(ns.obj): + _class = '' + _mod = f' :module: {ns.fullname}' + else: + raise TypeError(f'Namespace type not handled: {ns}') + else: + _class = '' + _mod = '' + + return f''' +.. withrefctx:: +{_mod} +{_class} + +{indent(rst, level=3)} +''' + + +def _fixup_inherited_doc(docobj, inherited): + # If a member overrides another member in the stack but the most + # derived one has no doc while the other one has one, we keep the + # original one. This implements "docstring inheritance". + if inherited.doc and not docobj.doc: + # Note that we don't just take the inherited docobj, as it might + # not have the same kind (e.g. it is a property in the base + # class and becomes a class attribute in a derived class). If + # we don't preserve that kind properly, autodoc will not be + # able to work on those items. + return docobj.replace( + doc=inherited.doc, + _doc_refctx=inherited, + # Docstring inheritance in autodoc only works if the item kind is + # the same. But in our case, it is a string class attribute that + # inherits from a abstract property, so autodoc does not propagate + # the docstring that we have inherited. + _broken_autodoc_inheritance=( + inherited._broken_autodoc_inheritance or + docobj.autodoc_kind != inherited.autodoc_kind + ), + ) + else: + return docobj + + +def _merge_members_stack(stack): + """ + Merge a stack of members (most derived first) with priority to the first + entry in the stack for each member (the most derived one). + """ + merged = {} + stack = list(stack) + for ns in reversed(stack): + merged.update({ + membername: ( + docobj + if (inherited := merged.get(membername)) is None else + _fixup_inherited_doc(docobj, inherited) + ) + for membername, docobj in ns.items() + }) + + return merged + + +def _sort_members(app, members): + def key(x): + membername, docobj = x + inherited, place, visibility = docobj.resolve_inheritance_style(app) + return ( + # Make deprecated members appear last in listings + docobj.is_deprecated, + # Make inherited members come last. + inherited and visibility == 'public', + docobj.fullname, + ) + return dict(sorted(members.items(), key=key)) + + +_NOTSET = object() + + +class SphinxDocObject: + """ + Represent a variable that has a "docstring" attached to it. + + Those docstrings are not really docstrings as they are merely a + string literal floating below a variable assignment, but sphinx + recognizes those by parsing the source. + """ + def __init__(self, modname, qualname, doc=_NOTSET, obj=_NOTSET, doc_refctx=None, parent=None, _can_be_resolved=True): + def _getdoc(obj): + if obj is _NOTSET: + return None + else: + doc = inspect.getdoc(obj) + # Instances of classes that have a docstring will have have a + # __doc__ attribute available on them. This makes + # inspect.getdoc() return it even for things like a dict + # instance. In those cases, we do not want to pick it up. + if obj is not type(obj) and doc == type(obj).__doc__: + return None + else: + return doc + + self.__qualname__ = qualname + self.__module__ = modname if modname else None + fullname = self.fullname + + if obj is _NOTSET: + try: + obj = _resolve_dotted_name(fullname) + # It is possible for an attribute to be listed by __dir__ and be + # implemented by a descriptor and yet it raises an AttributeError: + # https://stackoverflow.com/questions/24914584/abstractmethods-and-attributeerror + except Exception: + obj = _NOTSET + + if doc is _NOTSET: + doc = _getdoc(obj) + + self.doc = doc + self.obj = obj + + self._doc_refctx = doc_refctx + self._broken_autodoc_inheritance = False + self._parent = parent + + fullname = self.fullname + self._can_be_resolved = _can_be_resolved and not any(x in fullname for x in ('', '')) + + def __eq__(self, other): + def eq(obj1, obj2): + try: + # Trying == first is important since some names will provide a + # new object every time they are resolved, such as a + # classmethod that will get instantiated every time. + return obj1 == obj2 + except Exception: + return obj1 is obj2 + + return self is other or ( + isinstance(other, self.__class__) and + eq(self.obj, other.obj) and + self.fullname == other.fullname and + self.doc == other.doc and + self._can_be_resolved == other._can_be_resolved and + self._broken_autodoc_inheritance == other._broken_autodoc_inheritance + ) + + def __hash__(self): + return hash((self.fullname, self.doc)) + + @property + def is_deprecated(self): + return any( + docobj.fullname in get_deprecated_map() + for docobj in ( + self, + self.inherited_from(real=True, app=None), + ) + ) + + @property + def doc_refctx(self): + """ + The original SphinxDocObject in which the documentation was written. + + This allows correctly resolving relative class and module references in + docstrings: + * The class name is the derived class, so that we get the most + local and public reference target possible. This will allow + relative references to method work even if we inherit from a + private base class, since those references will be resolved on + the public derived class instead. + + * The module is the original one. If it is private, so be it and + references will be broken. Method docstrings can only reference + public items or they will not work. + """ + if self._doc_refctx: + return self._doc_refctx.doc_refctx + else: + docobj = self.inherited_from(app=None, real=True, topmost=False) + if docobj == self and docobj._doc_refctx == self._doc_refctx: + return docobj + else: + return docobj.doc_refctx + + @property + def __doc__(self): + if self.doc is None: + return None + else: + return _with_refctx(self.doc_refctx, self.doc) + + def replace(self, **kwargs): + new = copy.copy(self) + for attr, val in kwargs.items(): + setattr(new, attr, val) + return new + + def __repr__(self): + return f'{self.__class__.__qualname__}({self.__module__!r}, {self.__qualname__!r})' + + def __str__(self): + return repr(self) + + @property + @memoized + def mro(self): + mro = inspect.getmro(self.obj) + return [ + self.from_namespace(base) + for base in mro + ] + + @property + def is_dynamic(self): + if self.exists: + fullname = self.fullname + try: + _resolve_dotted_name(fullname) + except AttributeError: + return True + else: + return False + else: + return False + + @property + def exists(self): + # Lookup the member we inherited from as DelegateToAttr might + # introduce dynamic members via SphinxDocObject.mro that only + # exist on live instances of the class, and in the class of the + # attribute we delegate to. + inherited_from = self.inherited_from(app=None, real=True) + + if inherited_from._can_be_resolved: + try: + _resolve_dotted_name(inherited_from.fullname) + except AttributeError: + return False + else: + return True + else: + # We cannot resolve attribute names since they might only be + # available at runtime on class instances, so we just assume they + # do exist. + return True + + @memoized + def inherited_from(self, app, real=False, topmost=True): + assert app is not None or real + + def get(docobj): + parent = docobj.parent + if parent is not None and isinstance(parent.obj, type): + membername = docobj.membername + + def get_inherited(base, membername): + cls = base.obj + base_members = base.get_members(inherited=False) + return base_members.get(membername) + + # Make sure the member we inherit from has its docstring fixed + # up the same way as when merging an inheritance stack. + def fixup_docstrings(xs): + return reversed(list(itertools.accumulate( + reversed(list(xs)), + lambda x_super, x: ( + x + if x_super is None else + ( + x_super + if x is None else + _fixup_inherited_doc(x, x_super) + ) + ) + ))) + + mro = parent.mro + inherited_members = ( + get_inherited(base, membername) + for base in mro + ) + inherited_members1, inherited_members2 = itertools.tee(inherited_members) + iterator = ( + (realinherited, publicbase) + for (select, realinherited, publicbase) in zip( + (x is not None for x in inherited_members1), + fixup_docstrings(inherited_members2), + ffill( + mro, + select=lambda _docobj: (not _docobj.autodoc_is_skipped(app)) if app else True, + ), + ) + if select + ) + + try: + (realinherited, publicbase), *_ = iterator + # It is not inherited from any base class + except ValueError: + delegated_members = _get_delegated_members(parent.obj) + return delegated_members.get(membername, docobj) + else: + if real: + # Return the attribute we actually inherit from + return realinherited + else: + if publicbase is None: + return docobj + else: + # Return the attribute on the top-most public class + # we appear to be inheriting from. This will hide + # the real location we inherit from if that + # location is in a private base class. + members = publicbase.get_members(inherited=True) + return members.get(membername, docobj) + else: + return docobj + + return fixedpoint(get, init=self, limit=None if topmost else 1, raise_=False) + + def resolve_inheritance_style(self, app): + def get_toplevel_package(modname): + package, *_ = modname.split('.') + return package + + if self.is_class_member: + # Use the "public parent" rather than the "real parent", so we + # correctly infer "public" visibility rather than private. + inherited_from = self.inherited_from(app, real=False) + inherited = inherited_from != self + + if inherited: + # If both the parent of the attribute (where it is inherited) and the real + # parent of the attribute (where it is defined) are located in the same + # package, we will just forward the docstring in the inherited class. + # + # This ensures we will have the docstring available for the user even if we + # inherit the method from a private class. + if get_toplevel_package(self.__module__) == get_toplevel_package(inherited_from.__module__): + # If we inherit from a skipped parent, we silently forward + # the docstring, so that it stays an implementation detail + # hidden to the user. + if any( + parent.autodoc_is_skipped(app) + for parent in inherited_from.parents + ): + return (True, 'local', 'private') + else: + return (True, 'local', 'public') + # Otherwise if we are inheriting from a class defined in another package, + # we replace the docstring with a stub reference. This ensures we can build + # the documentation cleanly as we can fix any inherited docstring defined + # in our package. + else: + return (True, 'foreign', 'public') + + return (False, 'local', 'public') + + @property + def is_class_member(self): + try: + parent = self.parent + except ValueError: + # If we cannot resolve the parent, this means we are inside a class + # since modules cannot be defined as local variables, but classes + # can. + return True + else: + if parent: + return isinstance(parent.obj, type) + else: + return False + + def get_short_doc(self, *args, **kwargs): + proxy = types.SimpleNamespace(__doc__=self.doc) + doc = get_short_doc(proxy, *args, **kwargs) + return _with_refctx(self.doc_refctx, doc) + + @memoized + def get_name(self, *, style=None, abbrev=False): + # For some reason, autodoc does not display DynamicClassAttribute + if style == 'rst' and isinstance(self.obj, types.DynamicClassAttribute): + name = self.membername if abbrev else self.fullname + return f':code:`{name}`' + else: + return get_obj_name(self, abbrev=abbrev, style=style, name=self.fullname) + + @memoized + def autodoc_is_skipped(self, app): + kind = self.autodoc_kind + if kind in ('property', 'data'): + kind = 'attribute' + + return app.emit_firstresult( + 'autodoc-skip-member', kind, self.fullname, self.obj, False, {} + ) + + @property + @memoized + def autodoc_kind(self): + def get(obj, parent, is_classmember): + if is_classmember: + if ( + inspect.isfunction(obj) or + inspect.ismethod(obj) or + isinstance(obj, (classmethod, staticmethod)) + ): + return 'method' + else: + standalone = get(obj, parent=parent, is_classmember=False) + if standalone == 'data': + # autodoc does not include the docstring of an Enum + # member if ".. autoattribute::" directive is used, but + # it does if ".. autodata::" is used. + if parent and isinstance(parent.obj, type) and issubclass(parent.obj, enum.Enum): + return 'data' + else: + return 'attribute' + elif standalone == 'function': + return 'method' + else: + return standalone + elif isinstance(obj, type): + if issubclass(obj, BaseException): + return 'exception' + else: + return 'class' + elif inspect.ismodule(obj): + return 'module' + elif isinstance(obj, property): + return 'property' + elif callable(obj): + return 'function' + else: + # Deal with decorators + unwrapped = inspect.unwrap(obj) + if unwrapped is obj: + return 'data' + else: + return get(obj=obj, parent=parent, is_classmember=is_classmember) + + try: + parent = self.parent + except ValueError: + parent = None + + return get(obj=self.obj, parent=parent, is_classmember=self.is_class_member) + + @property + @memoized + def sphinx_role(self): + return get_sphinx_role(self, name=self.fullname) + + @property + def fullname(self): + modname = self.__module__ + qualname = self.__qualname__ + if modname: + return f'{modname}.{qualname}' + else: + return qualname + + @property + def membername(self): + return self.__qualname__.rsplit('.', 1)[-1] + + @property + def parent(self): + return self._parent or self._resolve_parent() + + @memoized + def _resolve_parent(self): + namespace = _get_parent_namespace(self.fullname) + return self.from_namespace(namespace) if namespace else None + + @property + @memoized + def parents(self): + def gen(): + parent = self.parent + while parent: + yield parent + parent = parent.parent + return list(gen()) + + @property + def __wrapped__(self): + return self.obj + + @classmethod + def from_namespace(cls, namespace): + if isinstance(namespace, type): + modname = namespace.__module__ + qualname = namespace.__qualname__ + elif inspect.ismodule(namespace): + *modname, qualname = namespace.__name__.split('.') + modname = '.'.join(modname) + else: + raise ValueError(f'Namespace not handled: {namespace}') + + return cls._from_namespace( + modname=modname, + qualname=qualname, + obj=namespace, + ) + + @classmethod + @functools.lru_cache(maxsize=2048) + def _from_namespace(cls, modname, qualname, obj): + doc = inspect.getdoc(obj) + return cls( + modname=modname, + qualname=qualname, + doc=doc, + obj=obj, + ) + + @classmethod + @functools.lru_cache(maxsize=2048) + def _from_name(cls, name): + def get_member(parent, membername): + if parent is None: + try: + ns = _resolve_dotted_name(membername) + except (AttributeError, ImportError) as e: + raise ValueError(str(e)) + else: + return cls.from_namespace(ns) + else: + try: + members = parent.get_members(allow_reexport=True) + except TypeError as e: + raise ValueError(str(e)) + else: + try: + return members[membername] + except KeyError as e: + raise ValueError(str(e)) + + assert name + return fold( + get_member, + name.split('.'), + init=None, + ) + + @classmethod + def from_name(cls, name, obj=_NOTSET): + """ + Resolve ``name`` to a :class:`SphinxDocObject`. + + :param name: Fully qualified name of the entity to document. + :type name: str + + :param obj: If passed, it will be used as the ``obj`` parameter for + :class:`SphinxDocObject`. If omitted, it will be resolved from the + name. + :type obj: object + """ + docobj = cls._from_name(name) + if obj is not _NOTSET: + docobj = docobj.replace(obj=obj) + + return docobj + + @memoized + def get_members(self, inherited=True, allow_reexport=False): + return self._get_members( + inherited=inherited, + allow_reexport=allow_reexport, + avoid_delegated=None, + ) + + def _get_members(self, inherited, allow_reexport, avoid_delegated): + """ + ``inspect.getmembers`` plus the attributes that are documented with a + "docstring" that is actually just a string literal floating after a + variable assignment. + """ + cls = self.__class__ + namespace = self.obj + members_stack = [] + + if isinstance(namespace, type): + def make_docobj(membername, obj, _can_be_resolved=True, doc=_NOTSET): + return cls( + modname=namespace.__module__, + qualname=f'{namespace.__qualname__}.{membername}', + obj=obj, + doc=doc, + _can_be_resolved=_can_be_resolved, + parent=self, + ) + + def cls_members(namespace, _cls): + """ + Provide the members of a class, but not anything inherited + """ + def get_instance_dir(cls): + try: + instance_dir = cls.__instance_dir__ + except AttributeError: + return {} + else: + return dict(instance_dir()) + + def get_sphinx_attrs(namespace, base): + def split_name(name): + try: + basename, membername = name.rsplit('.', 1) + except ValueError: + basename = None + membername = name + + return (basename, membername) + + mod = inspect.getmodule(base) + attrs = sphinx_module_attrs_doc(mod) + return { + membername: cls( + modname=namespace.__module__, + qualname=f'{namespace.__qualname__}.{membername}', + doc=doc, + _can_be_resolved=False, + parent=self, + ) + for (basename, membername), doc in ( + (split_name(name), doc) + for name, doc in attrs.items() + ) + if basename == base.__qualname__ + } + + def getmembers(_cls): + """ + Provide all members of a class, including inherited ones. + """ + def get_dynamic_attr_doc(_cls): + docobj = cls.from_namespace(_cls) + ref = docobj.get_name(style='rst') + return f'See {ref}' + + return { + **{ + membername: make_docobj( + membername=membername, + obj=None, + # This is going to be a dynamic attribute, we + # cannot expect lookup to succeed on the class, + # only on instances. + _can_be_resolved=False, + doc=get_dynamic_attr_doc(obj), + ) + for membername, obj in get_instance_dir(_cls).items() + }, + **{ + membername: make_docobj(membername, obj) + for membername, obj in inspect.getmembers(_cls) + }, + **get_sphinx_attrs(namespace, _cls), + } + + members = getmembers(_cls) + base_members = _merge_members_stack( + getmembers(base) + for base in _cls.__bases__ + ) + # This is more correct than _cls.__dict__ as it will include + # dynamic attributes that are reported by dir() but not in + # __dict__. + members = { + membername: docobj + for membername, docobj in members.items() + if ( + # This was defined in the class itself, so it's always + # taken + membername in silent_getattr(_cls, '__dict__', {}) + or + # The membername might not be in __dict__ if it is a + # dynamic attribute, so we still take it as long as the + # base classes do not provide it. + membername not in base_members + or + # The membername exists in the base but has been + # overridden with a new docstring. + ( + # members we gather with inspect.getmembers() will + # have doc set to None. If the doc is set in a base + # class and then it is None in the derived class, + # it means we are inherited. + docobj.doc is not None and + base_members[membername].doc != docobj.doc + ) + ) + } + return members + + def _filter(membername, docobj): + return True + + mro = self.mro + assert mro[0] == self + bases = mro if inherited else [mro[0]] + bases = [base.obj for base in bases] + + members_stack.extend( + cls_members(namespace, _cls) + for _cls in bases + ) + + if inherited: + # Lowest priority to delegated members, since they are + # implemented with __getattr__ and can be overridden by any + # actual member. + members_stack.append({ + membername: make_docobj( + membername=membername, + obj=docobj.obj, + doc=docobj.doc, + _can_be_resolved=False, + ) + for membername, docobj in _get_delegated_members(namespace, avoid_delegated).items() + }) + + elif inspect.ismodule(namespace): + attrs = sphinx_module_attrs_doc(namespace) + members_stack.append({ + membername: cls( + modname=namespace.__name__, + qualname=membername, + doc=doc, + _can_be_resolved=False, + parent=self, + ) + for membername, doc in attrs.items() + # Names with dot in them mean the variable is actually a class + # attribute rather than a top-level variable, so we don't want + # them at module-level listing. + if '.' not in membername + }) + + members_stack.append({ + membername: cls( + modname=namespace.__name__, + qualname=membername, + obj=obj, + parent=self, + ) + for membername, obj in inspect.getmembers(namespace) + if ( + # Member actually defined in the module rather than just + # imported. + ( + allow_reexport or (inspect.getmodule(obj) is namespace) + ) or + # Submodule. + ( + inspect.ismodule(obj) and obj.__name__.startswith(f'{namespace.__name__}.') + ) + ) + }) + + def _filter(membername, docobj): + # Modules might have undocumented globals that are actually + # defined somewhere else and simply imported, so remove them. + return docobj.parent == self + else: + raise TypeError(f'Namespace not handled: {namespace}') + + members = _merge_members_stack(members_stack) + + def check(membername, docobj): + parent = docobj.parent + if parent == self: + return True + else: + raise ValueError(f'Member {docobj} is not a child of expected parent: expected={self} actual={docobj.parent}') + + return { + membername: docobj + for membername, docobj in sorted(members.items()) + if _filter(membername, docobj) and check(membername, docobj) + } + + +class ModuleListingDirective(RecursiveDirective): + """ + reStructuredText directive similar to autosummary but with correct handling + of inheritance:: + + .. module-listing:: mymodule + + Options: + """ + STUBS_FOLDER = 'generated' + + required_arguments = 1 + + @classmethod + def _run(cls, app, curr_loc, stubs_loc, modname, make_stub): + + def listing_entry(app, docobj): + inherited, place, visibility = docobj.resolve_inheritance_style(app) + + tags = [] + if inherited and visibility == 'public': + tags.append('inherited') + ref_target = docobj.inherited_from(app) + else: + ref_target = docobj + + if ref_target.is_deprecated: + tags.append('deprecated') + + ref = ref_target.get_name(style='rst', abbrev=True) + + if place == 'local': + doc = docobj.get_short_doc(style='rst') + else: + long_ref = ref_target.get_name(style='rst', abbrev=False) + # The doc could be invalid reStructuredText, so we just do not + # include it and use a link instead. + doc = f'See {long_ref}' + + tags = ', '.join(sorted(tags)) + tags = f' :sup:`{tags}`' if tags else '' + return f''' +* - {ref}{tags} + - +{indent(doc or '', level=3)} +'''.strip() + + def toc_entry(docobj, name, curr_loc, stubs_loc, make_stub): + name = docobj.get_name() + # Remove the common prefix coming from the parent module name + # to avoid O(N^2) ToC entry size + membername = docobj.membername + + path = stubs_loc / f'{name}.rst' + + if make_stub: + stub_content = process_member( + app=app, + docobj=docobj, + curr_loc=path, + stubs_loc=stubs_loc, + make_stub=make_stub, + ) + path.parent.mkdir(parents=True, exist_ok=True) + stub_content = stub_content.encode('utf-8') + with open(path, 'w+b') as f: + existing = f.read() + if stub_content != existing: + f.write(stub_content) + + toc_ref = str( + (stubs_loc / name).relative_to( + curr_loc.parent + ) + ) + + entry = f'{membername} <{toc_ref}>' + return entry + + def process_member(app, docobj, curr_loc, stubs_loc, make_stub): + if inspect.ismodule(docobj.obj): + return process_mod( + app=app, + docobj=docobj, + curr_loc=curr_loc, + stubs_loc=stubs_loc, + make_stub=make_stub, + ) + else: + return process_leaf(app=app, docobj=docobj) + + def group_members(app, members): + grouped = order_as( + groupby( + _sort_members(app, members).items(), + key=lambda x: x[1].autodoc_kind + ), + order_as=[ + 'module', + 'data', + 'class', + 'attribute', + 'property', + 'method', + 'function', + 'exception', + ], + key=itemgetter(0), + ) + return { + key: dict(_members) + for key, _members in grouped + } + + def make_grouped(app, members, make_group): + def make_pretty(name): + return { + 'method': 'methods', + 'attribute': 'attributes', + 'exception': 'exceptions', + 'class': 'classes', + 'module': 'modules', + 'function': 'functions', + 'property': 'properties', + 'data': 'globals', + }[name].title() + + grouped = group_members(app, members) + return '\n'.join( + group + for _group, _members in grouped.items() + if (group := make_group( + title=make_pretty(_group), + members=_members, + )) is not None + ) + + def make_listing(app, members): + def make_group(title, members): + listing = '\n'.join( + listing_entry(app=app, docobj=docobj) + for docobj in members.values() + if not docobj.is_deprecated + ) + if listing: + return f''' +.. rubric:: {title} + +.. list-table:: + :align: left + +{indent(listing, level=3)} +''' + else: + return None + + return make_grouped( + app=app, + members=members, + make_group=make_group, + ) + + def make_body_listing(app, docobj, members, extra=''): + def make_item_doc(app, docobj): + inherited, place, visibility = docobj.resolve_inheritance_style(app) + if inherited and visibility == 'public' and docobj.is_deprecated: + return None + else: + kind = docobj.autodoc_kind + + # DelegateToAttr introduces dynamic members that can only be + # lookedup on live instances. This makes autodoc fail to + # generate an API doc for that member, so instead we just + # document the member without relying on autodoc. + if docobj.is_dynamic or docobj._broken_autodoc_inheritance: + ref = docobj.inherited_from(app).get_name(style='rst') + doc, _ = _inherited_docstring(docobj=docobj, app=app) + doc = doc or '' + obj = docobj.obj + + fields = { + ':classmethod:': isinstance(obj, classmethod), + ':staticmethod:': isinstance(obj, staticmethod), + ':async:': inspect.iscoroutinefunction(obj), + ':abstractmethod:': getattr(obj, '__isabstractmethod__', False), + f':canonical: {docobj.__module__}.{docobj.__qualname__}': not inspect.ismodule(obj), + f':value: {docobj.obj!r}': kind in ('attribute', 'data') and docobj.obj is not _NOTSET + } + + info_field_list = '\n'.join(sorted( + field + for field, cond in fields.items() + if cond + )) + + # Use :canonical: to actually provide the reference target. + # This allows using just the __qualname__ for the title, so + # it is consistent with autodoc output. + return f''' +.. {kind}:: {docobj.__qualname__} +{indent(info_field_list, level=3)} + +{indent(doc, level=3)} +''' + else: + return f''' +.. auto{kind}:: {docobj.__module__}::{docobj.__qualname__} +''' + if isinstance(docobj.obj, type): + def make_group(title, members): + body = '\n\n'.join( + doc + for docobj in members.values() + if (doc := make_item_doc(app, docobj)) + ) + return f''' +{title} +{'-' * len(title)} + +{body} +''' + body = make_grouped( + app=app, + make_group=make_group, + members={ + name: docobj + for name, docobj in members.items() + if docobj.exists + } + ) + return f''' +.. autoclass:: {docobj.__module__}::{docobj.__qualname__} + :no-members: + :no-inherited-members: + :no-undoc-members: + :no-private-members: + :no-special-members: + +{indent(extra, level=3)} + +{body} +''' + elif inspect.ismodule(docobj.obj): + return extra + else: + doc = make_item_doc(app, docobj) + return f''' +{doc} +{extra} +''' if doc else '' + + def document_title(docobj): + parent = docobj.parent + if parent: + # Set a shorter doc title if possible, so the breadcrumbs UI + # element does not grow in O(N^2) with the depth of the + # nesting. + return f'.. title:: {relname(parent.obj, docobj.obj)}\n\n' + else: + return '' + + def sort_mod_members(app, members): + grouped = group_members(app, members) + merged = list(grouped.values()) + return dict(ChainMap(*reversed(merged))) + + def process_mod(app, docobj, curr_loc, stubs_loc, make_stub, with_title=True): + members = docobj.get_members() + members = { + name: docobj + for name, docobj in members.items() + if not docobj.autodoc_is_skipped(app) + } + members = sort_mod_members(app, members) + + listing = make_listing(app=app, members=members) + + toc_entries = '\n'.join( + entry + for name, docobj in members.items() + if (entry:= toc_entry( + docobj=docobj, + name=name, + curr_loc=curr_loc, + stubs_loc=stubs_loc, + make_stub=make_stub, + )) is not None + ) + + name = docobj.fullname + toctree = f''' +.. toctree:: + :hidden: + +{indent(toc_entries, level=3)} +''' + + automodule = f''' +.. automodule:: {name} + :no-index: + :no-members: + :no-inherited-members: + :no-undoc-members: + :no-private-members: + :no-special-members: +''' + # autodoc seems broken and does not make the module specified by + # ".. automodule:: " the current module in the + # reference context, so we do it manually instead. + automodule = _with_refctx(docobj, automodule) + automodule = f''' +.. module:: {name} + +{automodule} +''' + + title = f'{name}\n{"=" * len(name)}\n\n' if with_title else '' + doc_title = document_title(docobj) + content = f'{doc_title}{title}{toctree}{automodule}{listing}' + return content + + + def process_leaf(app, docobj): + obj = docobj.obj + if isinstance(obj, type) or inspect.ismodule(obj): + members = docobj.get_members() + else: + members = {} + + members = { + name: docobj + for name, docobj in members.items() + if not docobj.autodoc_is_skipped(app) + } + + listing = make_listing(app=app, members=members) + + body = make_body_listing( + app=app, + docobj=docobj, + members=members, + extra=listing, + ) + + fullname = docobj.fullname + content = f''' +.. title:: {docobj.membername} + +{fullname} +{'=' * len(fullname)} + +{body} +''' + return content + + mod = importlib.import_module(modname) + import_all_submodules(mod, best_effort=True) + + docobj = SphinxDocObject.from_namespace(mod) + out = process_mod( + app=app, + docobj=docobj, + curr_loc=curr_loc, + stubs_loc=stubs_loc, + make_stub=make_stub, + with_title=False, + ) + return out + + def run(self): + curr_loc = Path(self.env.doc2path(self.env.docname)).resolve() + stubs_loc = Path( + curr_loc.parent, + self.STUBS_FOLDER, + ).resolve() + + modname, = self.arguments + + out = self._run( + app=self.env.app, + curr_loc=curr_loc, + stubs_loc=stubs_loc, + modname=modname, + make_stub=False, + ) + + return self.parse_nested(out) + + @classmethod + def make_stubs(cls, app): + env = app.builder.env + sources = [ + path + for path in map( + lambda x: Path(env.doc2path(x)).resolve(), + env.found_docs + ) + if path.is_file() and path.suffix == '.rst' + ] + + def process_directive(path, modname): + stubs_loc = path.parent / cls.STUBS_FOLDER + out = cls._run( + app=app, + curr_loc=path, + stubs_loc=stubs_loc, + modname=modname, + make_stub=True, + ) + + pattern = re.compile(r'\.\. module-listing\s*::\s*([a-zA-Z0-9_.]+)') + for path in sources: + modnames = re.findall(pattern, path.read_text()) + for modname in modnames: + process_directive(path, modname) + + +# Sphinx extension setup +def setup(app): + directives.register_directive('withrefctx', WithRefCtxDirective) + directives.register_directive('run-command', RunCommandDirective) + + + directives.register_directive('exec', ExecDirective) + app.add_event('lisa-exec-state') + + + directives.register_directive('module-listing', ModuleListingDirective) + # We cannot add new sources in SphinxDirective.run(), so it needs to be + # done earlier in the build process. + app.connect('builder-inited', ModuleListingDirective.make_stubs) + + return { + 'version': '0.1', + 'env_version': 1, + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } + + +def indent(content, level=1, idt=' '): + idt = level * idt + return idt + content.replace('\n', f'\n{idt}') def is_test(method): @@ -187,6 +1607,99 @@ def is_test(method): ) +# Unfortunately, this will not currently run for items that do not have a +# docstring at all because of: +# https://github.com/sphinx-doc/sphinx/issues/12678 +def autodoc_process_inherited_members(app, what, name, obj, options, lines): + """ + Replace docstrings of inherited members by a stub that points at the place + where the member is actually defined. + + This prevents having issues when inheriting from members with docstrings + that are not valid reStructuredText, which would make the build fail. + """ + new, _ = _autodoc_process_inherited_members(app, what, name, obj, lines) + lines[:] = new + + +def autodoc_process_inherited_signature(app, what, name, obj, options, signature, return_annotation): + """ + Removes the signature when :func:`autodoc_process_inherited_members` would + remove the docstring. + """ + _, foreign = _autodoc_process_inherited_members(app, what, name, obj, ['']) + return (None, None) if foreign else (signature, return_annotation) + + +def _autodoc_process_inherited_members(app, what, name, obj, lines): + try: + docobj = SphinxDocObject.from_name(name, obj=obj) + except ValueError: + return (lines, False) + else: + doc, foreign = _inherited_docstring( + app=app, + docobj=docobj, + doc=_with_refctx( + docobj.doc_refctx, + '\n'.join(lines), + ), + ) + lines = (doc or '').splitlines() + return (lines, foreign) + + +def _inherited_docstring(app, docobj, doc=None): + doc = doc or docobj.__doc__ or '' + + if docobj.is_class_member: + inherited, place, visibility = docobj.resolve_inheritance_style(app) + if inherited: + ref = docobj.inherited_from(app).get_name(style='rst') + kind = docobj.autodoc_kind + + if place == 'local': + if visibility == 'private': + pass + elif visibility == 'public': + # Set the current module to be the one in which the member was + # really defined. That will make any relative reference e.g. to + # another class in the same module work rather than requiring + # absolute references. + shortdoc = docobj.get_short_doc(style='rst') + snippet = f''' +*Inherited {kind}, see* {ref} + +{shortdoc} +''' + doc = snippet + else: + raise ValueError(f'Non handled inheritance visibility: {visibility}') + # Otherwise if we are inheriting from a class defined in another package, + # we replace the docstring with a stub reference. This ensures we can build + # the documentation cleanly as we can fix any inherited docstring defined + # in our package. + elif place == 'foreign': + doc = f''' +*Inherited {kind}, see* {ref} + +''' + else: + raise ValueError(f'Non handled inheritance place: {place}') + + foreign = place == 'foreign' + return (doc, foreign) + + elif isinstance(docobj.obj, type): + # We do not want to inherit any class docstring, as it is usually + # misleading. + doc = docobj.obj.__doc__ + doc = inspect.cleandoc(doc) if doc else doc + return (doc, False) + else: + return (doc, False) + + def autodoc_process_test_method(app, what, name, obj, options, lines): # Append the list of available test methods for all classes that appear to # have some. @@ -227,11 +1740,139 @@ def autodoc_process_analysis_events(app, what, name, obj, options, lines): lines.extend(events_doc.splitlines()) +def intersphinx_warn_missing_reference_handler(app, domain, node, non_ignored_refs): + if domain and domain.name == 'py': + reftarget = node['reftarget'] + class_ctx = node.get('py:class') + mod_ctx = node.get('py:module') + + possible_names = [ + name + for name in ( + f'{class_ctx}.{reftarget}' if class_ctx else None, + f'{mod_ctx}.{reftarget}' if mod_ctx else None, + reftarget, + ) + if name + ] + for name in possible_names: + try: + docobj = SphinxDocObject.from_name(name) + except ValueError: + pass + else: + if docobj.autodoc_is_skipped(app): + return True + elif any( + regex.match(docobj.fullname) + for regex in non_ignored_refs + ): + return None + else: + return True + + package = reftarget.split('.')[0] + try: + importlib.import_module(package) + except ImportError: + # If the top level package cannot even be imported, this probably + # means we are referring to an optional dependency that is not + # installed so we assume the name is valid. + return True + else: + return None + + +def autodoc_process_bases_handler(app, name, obj, options, bases): + """ + Apply the skipping logic to base classes, so we hide private base classes. + """ + + def rewrite_bases(bases): + return itertools.chain.from_iterable( + rewrite(base) + for base in bases + ) + + def rewrite(cls): + try: + docobj = SphinxDocObject.from_namespace(cls) + # Some bases might not be classes. They might be functions + # monkey-patched with an __mro_entries__ attribute, like + # typing.NamedTuple (in Python 3.12 at least) + except ValueError: + return [cls] + else: + skipped = docobj.autodoc_is_skipped(app) + if skipped: + return rewrite_bases(cls.__bases__) + else: + return [cls] + + new_bases = list(rewrite_bases(list(bases))) + new_bases = [ + base + for base in new_bases + if base is not object + ] + new_bases = new_bases or [object] + bases[:] = new_bases + + def autodoc_skip_member_handler(app, what, name, obj, skip, options, default_exclude_members=None): """ Enforce the "exclude-members" option, even in cases where it seems to be ignored by Sphinx. """ + UNINTERESTING_BASES = ( + object, + type, + abc.ABC, + abc.ABCMeta, + typing.NamedTuple, + ) + + def make_sub(cls): + class Sub(cls): + pass + return Sub + + # Plain subclasses so that we filter out any default dunder attribute they + # might get, such as non-default __base__. + UNINTERESTING_BASES = list(UNINTERESTING_BASES) + list(map(make_sub, UNINTERESTING_BASES)) + + def filter_name(fullname, excluded, doc): + def _filter(membername): + if membername in excluded: + return True + # Dunder names are a bit more tricky to handle since we cannot decide + # whether it is skipped or not just based on the name. Unfortunately, + # we also cannot just interpret the absence of doc as to be skipped, + # since some default implementations have docstrings (e.g. + # object.__init_subclass__). Those docstrings will be "inherited" by + # any custom implementation that does not specify any docstring as per + # inspect.getdoc() behavior. + # As a result, we skip implementation that either: + # * have no doc + # * or have the same doc as one of the uninteresting implementations. + elif membername.startswith('__') and membername.endswith('__'): + def same_doc(cls): + try: + member = silent_getattr(cls, membername) + except AttributeError: + return False + else: + return inspect.getdoc(member) == doc + + return any(map(same_doc, UNINTERESTING_BASES)) + + elif membername.startswith('_'): + return True + else: + return False + + return any(map(_filter, fullname.split('.'))) + excluded = options.get('exclude-members', set()) if excluded: # Either it's a one-item set with the string passed in conf.py @@ -243,55 +1884,77 @@ def autodoc_skip_member_handler(app, what, name, obj, skip, options, default_exc else: excluded = exclude_members_option(excluded) - default_excluded = exclude_members_option(default_exclude_members) + default_excluded = exclude_members_option(default_exclude_members or '') excluded = excluded | default_excluded - name = name.split('.')[-1] - - unwrapped = inspect.unwrap(obj) - # Get rid of the default implementation of dunder names, since it adds no - # value in the documentation - if any( - hasattr(cls, name) and getattr(cls, name) in (obj, unwrapped) - # providers of "uninteresting" methods that are useless in our - # documentation - for cls in ( - object, - type, - abc.ABC, - abc.ABCMeta, - ) - ): - return True - # Some classes like ABCMeta are more sneaky so also ban things that are - # just builtin functions - elif any( - type_ in map(type, (obj, unwrapped)) - for type_ in ( - # Work with multiple Python versions - getattr(types, type_name) - for type_name in ( - 'BuiltinFunctionType', - 'BuiltinMethodType', - 'WrapperDescriptorType', - 'MethodWrapperType', - 'MethodDescriptorType', - 'ClassMethodDescriptorType', - 'GetSetDescriptorType', - 'MemberDescriptorType', - ) - if hasattr(types, type_name) - ) - ): - return True - # Dunder names without any doc are of no interest, they are probably just - # implementation details - elif name.startswith('__') and name.endswith('__') and not inspect.getdoc(obj): - return True - elif name in excluded: - return True + # Workaround issue: + # https://github.com/sphinx-doc/sphinx/issues/12674 + # + # Note that if it was an inherited member, it will resolve the name to be + # its real name where it was defined, rather than as a member of the + # subclass. + if '.' not in name: + try: + _name = get_obj_name(obj) + except ValueError: + pass + else: + # We only want to get the fully qualified version of "name". We + # don't want to rename cases where a class attribute is assigned + # something random that happens to have a name. + if _name.split('.')[-1] == name: + name = _name + + try: + docobj = SphinxDocObject.from_name(name, obj=obj) + except ValueError: + membername = name.split('.')[-1] + # Best effort attempt, in case the workaround for + # https://github.com/sphinx-doc/sphinx/issues/12674 + # did not work. + return filter_name(name, excluded, doc=None) else: - return skip + obj = docobj.obj + fullname = docobj.fullname + membername = docobj.membername + doc = docobj.doc or '' + unwrapped = inspect.unwrap(obj) + # Get rid of the default implementation of dunder names, since it adds no + # value in the documentation + if any( + silent_getattr(cls, membername, object()) in (obj, unwrapped) + # providers of "uninteresting" methods that are useless in our + # documentation + for cls in UNINTERESTING_BASES + ): + return True + # Some classes like ABCMeta are more sneaky so also ban things that are + # just builtin functions + elif any( + type_ in map(type, (obj, unwrapped)) + for type_ in ( + # Work with multiple Python versions + silent_getattr(types, type_name) + for type_name in ( + 'BuiltinFunctionType', + 'BuiltinMethodType', + 'WrapperDescriptorType', + 'MethodWrapperType', + 'MethodDescriptorType', + 'ClassMethodDescriptorType', + 'GetSetDescriptorType', + 'MemberDescriptorType', + ) + if hasattr(types, type_name) + ) + ): + return True + elif re.search(r'^\s*:\s*meta\s*public\s*:', doc, re.MULTILINE): + return False + elif re.search(r'^\s*:\s*meta\s*private\s*:', doc, re.MULTILINE): + return True + else: + return filter_name(fullname, excluded, doc=doc) class DocPlotConf(SimpleMultiSrcConf): @@ -368,13 +2031,10 @@ def autodoc_pre_make_plots(conf): rst_figure = TraceAnalysisBase.call_on_trace(meth, trace, { 'backend': 'bokeh', 'output': 'sphinx-rst', - 'interactive': False, **kwargs }) print(f'Plot for {meth.__qualname__} generated in {m.delta}s') - - rst_figure = f'\n:Example plot:\n\n{rst_figure}' return rst_figure plot_methods = set(itertools.chain.from_iterable( @@ -384,9 +2044,8 @@ def autodoc_pre_make_plots(conf): preload_events(conf, plot_methods) plots = { - meth.__qualname__: plot + meth: _make_plot(meth) for meth in plot_methods - if (plot := _make_plot(meth)) is not None } return plots @@ -396,18 +2055,18 @@ def autodoc_process_analysis_plots(app, what, name, obj, options, lines, plots): if what != 'method': return + name = get_obj_name(obj) try: - rst_figure = plots[obj.__qualname__] + rst_figure = plots[name] except KeyError: return else: - lines.extend(rst_figure.splitlines()) + if rst_figure: + rst_figure = f'{rst_figure}\n' + lines[:0] = rst_figure.splitlines() -def autodoc_process_analysis_methods(app, what, name, obj, options, lines): - """ - Append the list of required trace events - """ +def ana_invocation(obj): methods = { func: subclass for subclass in TraceAnalysisBase.get_analysis_classes().values() @@ -417,10 +2076,22 @@ def autodoc_process_analysis_methods(app, what, name, obj, options, lines): try: cls = methods[obj] except (KeyError, TypeError): - return + raise ValueError(f'Could not find method {obj}') else: on_trace_name = f'trace.ana.{cls.name}.{obj.__name__}' - extra_doc = f"\n*Called on* :class:`~lisa.trace.Trace` *instances as* ``{on_trace_name}()``\n\n" + return f"*Called on* :class:`~lisa.trace.Trace` *instances as* ``{on_trace_name}()``" + + +def autodoc_process_analysis_methods(app, what, name, obj, options, lines): + """ + Append the list of required trace events + """ + try: + extra_doc = ana_invocation(obj) + except ValueError: + pass + else: + extra_doc = f"\n{extra_doc}\n\n" # prepend lines[:0] = extra_doc.splitlines() @@ -444,7 +2115,7 @@ def get_analysis_list(meth_type): elif meth_type == 'df': meth_list = ( subclass.get_df_methods() - if isinstance(subclass, TraceAnalysisBase) else + if issubclass(subclass, TraceAnalysisBase) else [] ) else: @@ -501,6 +2172,7 @@ def check_dead_links(filename): ))) +@functools.lru_cache() def get_deprecated_map(): """ Get the mapping of deprecated names with some metadata. @@ -508,7 +2180,7 @@ def get_deprecated_map(): # Import everything there is to import, so the map is fully populated import_all_submodules(lisa, best_effort=True) - return DEPRECATED_MAP + return _DEPRECATED_MAP def get_deprecated_table(): """ @@ -516,9 +2188,9 @@ def get_deprecated_table(): :mod:`lisa`. """ - def indent(string, level=1): + def indent(string): idt = ' ' * 4 - return string.replace('\n', '\n' + idt * level) + return string.replace('\n', '\n' + idt) def make_entry(entry): msg = entry.get('msg') or '' @@ -528,12 +2200,18 @@ def get_deprecated_table(): else: removed_in = f'*Removed in: {format_version(removed_in)}*\n\n' - name = get_sphinx_name(entry['obj'], style='rst') + name = get_obj_name(entry['obj'], style='rst') replaced_by = entry.get('replaced_by') + if replaced_by is None: replaced_by = '' else: - replaced_by = f"*Replaced by:* {get_sphinx_name(replaced_by, style='rst')}\n\n" + if isinstance(replaced_by, str): + replaced_by = str(replaced_by) + else: + replaced_by = get_obj_name(replaced_by, style='rst') + + replaced_by = f"*Replaced by:* {replaced_by}\n\n" return "* - {name}{msg}{replaced_by}{removed_in}".format( name=indent(name + '\n\n'), @@ -595,35 +2273,6 @@ def get_deprecated_table(): return '\n\n'.join(tables) -def get_xref_type(obj): - """ - Infer the Sphinx type a cross reference to ``obj`` should have. - - For example, ``:py:class`FooBar`` has the type ``py:class``. - """ - if isinstance(obj, type): - if issubclass(obj, BaseException): - t = 'exc' - else: - t = 'class' - elif isinstance(obj, types.ModuleType): - t = 'mod' - elif callable(obj): - try: - qualname = obj.__qualname__ - except AttributeError: - t = 'func' - else: - if len(qualname.split('.')) > 1: - t = 'meth' - else: - t = 'func' - else: - raise ValueError(f'Cannot infer the xref type of {obj}') - - return f'py:{t}' - - def get_subclasses_bullets(cls, abbrev=True, style=None, only_leaves=False): """ Return a formatted bullet list of the subclasses of the given class, @@ -633,8 +2282,8 @@ def get_subclasses_bullets(cls, abbrev=True, style=None, only_leaves=False): f'* {subcls}: {doc}' for subcls, doc in sorted( ( - get_sphinx_name(subcls, style=style, abbrev=abbrev), - get_short_doc(subcls) + get_obj_name(subcls, style=style, abbrev=abbrev), + get_short_doc(subcls, style=style) ) for subcls in get_subclasses(cls, only_leaves=only_leaves) ) @@ -736,10 +2385,6 @@ def make_changelog(repo, since=None, head_release_name='Next release', fmt='rst' for release, msgs in release_msgs.items() } - def indent(level, content): - idt = level * ' ' - return idt + content.replace('\n', f'\n{idt}') - def format_release(name, sections): title = f'{name}\n{len(name) * "="}\n' body = '\n\n'.join( @@ -756,7 +2401,6 @@ def make_changelog(repo, since=None, head_release_name='Next release', fmt='rst' def format_section(name, msgs): title = f'{name.capitalize()}\n{len(name) * "+"}\n' body = '\n\n'.join(map(format_msg, sorted(msgs))) - body = indent(4, body) return f'{title}\n{body}' def format_msg(msg): @@ -771,13 +2415,6 @@ def make_changelog(repo, since=None, head_release_name='Next release', fmt='rst' return rst -class PlaceHolderRef: - """ - If you got redirected to here, this means that the reference points to - something private and undocumented, or is not expected to be even - documentable. - """ - def escape_rst(s): """ Escape the string so that it's considered plain reStructuredText input, diff --git a/lisa/_doc/manconf.py b/lisa/_doc/manconf.py index a79b7b01b5e97bf6924dfcbdace24e83f22b532f..c8b24ac5d3e7a5e45b6e140cd6312198bd21700d 100644 --- a/lisa/_doc/manconf.py +++ b/lisa/_doc/manconf.py @@ -5,6 +5,8 @@ # full list see the documentation: # http://www.sphinx-doc.org/en/master/config +import os + # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, @@ -15,9 +17,6 @@ # import sys # sys.path.insert(0, os.path.abspath('.')) -# Get the custom reST directives -import lisa._doc.helpers - # -- Project information ----------------------------------------------------- copyright = '2019, ARM-Software' @@ -35,6 +34,7 @@ today_fmt = '%Y' # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ + 'lisa._doc.helpers', 'sphinx.ext.todo', ] @@ -71,3 +71,8 @@ pygments_style = None # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True + + +# Signal to lisa.utils.is_running_sphinx() that we are indeed running under +# sphinx before we import anything +os.environ['_LISA_DOC_SPHINX_RUNNING'] = '1' diff --git a/lisa/_generic.py b/lisa/_generic.py index 93d6291a2e02b456249dea1c7d3c34eb88918b31..befadb67fb35d549a94dc163e28663ff041e485a 100644 --- a/lisa/_generic.py +++ b/lisa/_generic.py @@ -26,7 +26,7 @@ from typing import Any, Union, Generic, TypeVar import typeguard from collections.abc import Iterable -from lisa.utils import get_cls_name +from lisa.utils import get_obj_name, _is_typing_hint class _TypeguardCustom: _HINT = Any @@ -92,22 +92,10 @@ def is_instance(obj, classinfo): def is_hint(obj): - """ - Heuristic to check if a given ``obj`` is a typing hint or anything else. - This function will return ``False`` for classes. - - .. warning:: Since there is currently no way to identify hints for sure, - the check might return ``False`` even if it is a hint. - """ - module = getattr(obj, '__module__', None) - - # This is a class, so cannot be a hint. - if isinstance(obj, type): - return issubclass(obj, _TypeguardCustom) - elif module in ('typing', 'typing_extensions'): + if isinstance(obj, type) and issubclass(obj, _TypeguardCustom): return True else: - return False + return _is_typing_hint(obj) @functools.lru_cache(maxsize=None, typed=True) @@ -123,7 +111,7 @@ def hint_to_class(hint): class Stub(metaclass=Meta): pass - name = get_cls_name(hint).split('.', 1) + name = get_obj_name(hint).split('.', 1) try: name = name[1] except IndexError: diff --git a/lisa/_kmod.py b/lisa/_kmod.py index 75becefc983334ecd735f8cc48b28d5d6740d03d..e48f7867698dbd04dad981211f3162b343a07688 100644 --- a/lisa/_kmod.py +++ b/lisa/_kmod.py @@ -780,8 +780,6 @@ class OverlayResource(abc.ABC): class _FileOverlayBase(OverlayResource): """ - :meta public: - Base class for file overlays. """ pass @@ -816,8 +814,6 @@ class FileOverlay(_FileOverlayBase): class _PathOverlayBase(_FileOverlayBase): """ - :meta public: - Base class for path-based overlays. """ # This is racy with write_to(), but we are not trying to make something diff --git a/lisa/analysis/__init__.py b/lisa/analysis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b2391dfdf657881d5cb786683587beb47edfed4d --- /dev/null +++ b/lisa/analysis/__init__.py @@ -0,0 +1,7 @@ +""" +Data and trace analysis classes package. + +Each analysis is living in a class in its own module. :mod:`lisa.analysis.base` +contains helpers to define such analysis class. +""" + diff --git a/lisa/analysis/_proxy.py b/lisa/analysis/_proxy.py index f3f04272a1418050021983bc67703d4849ce73f9..122a18070b305e4fef6fac68127c3f5ecc7a5b3a 100644 --- a/lisa/analysis/_proxy.py +++ b/lisa/analysis/_proxy.py @@ -98,27 +98,6 @@ class AnalysisProxy(Loggable): """ Entry point to call analysis methods on :class:`~lisa.trace.Trace` objects. - **Example** - - # Call lisa.analysis.LoadTrackingAnalysis.df_task_signal() on a trace:: - - df = trace.ana.load_tracking.df_task_signal(task='foo', signal='util') - - The proxy can also be called like a function to define default values for - analysis methods:: - - ana = trace.ana(task='big_0-3') - ana.load_tracking.df_task_signal(signal='util') - - # Equivalent to: - ana.load_tracking.df_task_signal(task='big_0-3', signal='util') - - # The proxy can be called again to override the value given to some - # parameters, and the the value can also be overridden when calling the - # method: - ana(task='foo').df_task_signal(signal='util') - ana.df_task_signal(task='foo', signal='util') - :param trace: input Trace object :type trace: lisa.trace.Trace """ diff --git a/lisa/analysis/base.py b/lisa/analysis/base.py index 64c87a49939e2d452d05773b6a7b3bf80e67aef0..fe40cedb380d59229ab94610f4905feeea2c5db1 100644 --- a/lisa/analysis/base.py +++ b/lisa/analysis/base.py @@ -15,6 +15,10 @@ # limitations under the License. # +""" +Base classes to define a new trace analysis. +""" + import io import os import inspect @@ -458,10 +462,10 @@ class AnalysisHelpers(Loggable, abc.ABC): It provides among other things: - * automatic plot setup - * HTML and reStructuredText output. - * workarounds some holoviews issues - * integration in other tools + * automatic plot setup + * HTML and reStructuredText output. + * workarounds some holoviews issues + * integration in other tools """ _decorator = cls.plot_method.__func__ @@ -473,13 +477,13 @@ class AnalysisHelpers(Loggable, abc.ABC): :param backend: Holoviews plot library backend to use: - * ``bokeh``: good support for interactive plots - * ``matplotlib``: sometimes better static image output, but - unpredictable results that more often than not require - a fair amount of hacks to get something good. - * ``plotly``: not supported by LISA but technically - available. Since it's very similar to bokeh - feature-wise, bokeh should be preferred. + * ``bokeh``: good support for interactive plots + * ``matplotlib``: sometimes better static image output, but + unpredictable results that more often than not require + a fair amount of hacks to get something good. + * ``plotly``: not supported by LISA but technically + available. Since it's very similar to bokeh + feature-wise, bokeh should be preferred. .. note:: In a notebook, the way to choose which backend should be used to display plots is typically selected with e.g. @@ -1144,7 +1148,7 @@ class TraceAnalysisBase(AnalysisHelpers): It provides among other things: - * Dataframe format conversion + * Dataframe format conversion """ # Apply caching to all df-returning functions. This way we also diff --git a/lisa/analysis/frequency.py b/lisa/analysis/frequency.py index ff58b0adf8879177b1a0e1bbe6c4adbd2c4ff1cf..8a056baf71a15911b4c388c8005b30fafb7675b6 100644 --- a/lisa/analysis/frequency.py +++ b/lisa/analysis/frequency.py @@ -352,6 +352,9 @@ class FrequencyAnalysis(TraceAnalysisBase): @TraceAnalysisBase.df_method @requires_events('clk_set_rate', 'clk_enable', 'clk_disable') def df_peripheral_clock_effective_rate(self, clk_name): + """ + Dataframe of peripheral clock frequencies. + """ # Note: the kernel still defines a "clock_*" variant for each of these, # but it's not actually used anywhere in the code. The new "clk_*" diff --git a/lisa/analysis/idle.py b/lisa/analysis/idle.py index 86fb3599955a3c9a8e744bdc9bddf249f0fde8dc..9d1cb3466dc37a59b89a6a42a65d023cb124d50f 100644 --- a/lisa/analysis/idle.py +++ b/lisa/analysis/idle.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Linux kernel idle states analysis. +""" from functools import reduce import operator diff --git a/lisa/analysis/latency.py b/lisa/analysis/latency.py index 0f7b52910b4015a16286659cdf7ae86c220e4476..f4a96649e0f26763ef5add4cb7d9c0446a5c18f1 100644 --- a/lisa/analysis/latency.py +++ b/lisa/analysis/latency.py @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Linux kernel scheduler latency analysis. +""" + import pandas as pd import polars as pl import numpy as np @@ -425,7 +429,8 @@ class LatencyAnalysis(TraceAnalysisBase): @df_activations.used_events def plot_activations(self, task: TaskID): """ - Plot the :meth:`lisa.analysis.latency.LatencyAnalysis.df_activations` of a task + Plot the :meth:`~lisa.analysis.latency.LatencyAnalysis.df_activations` + of a task :param task: The task's name or PID :type task: int or str or tuple(int, str) diff --git a/lisa/analysis/pixel6.py b/lisa/analysis/pixel6.py index 0eaead7b50b5bcb88a812d0e7d5c407e56754101..2286083a273d47e270fa6325f788269a15090265 100644 --- a/lisa/analysis/pixel6.py +++ b/lisa/analysis/pixel6.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Pixel 6-specific analysis. +""" import pandas as pd import holoviews as hv diff --git a/lisa/analysis/rta.py b/lisa/analysis/rta.py index 428e63a05ab4669a7578c39d3e8145d1efd671ab..61d2c3d815c402d398a8f53f81e86d5828774342 100644 --- a/lisa/analysis/rta.py +++ b/lisa/analysis/rta.py @@ -14,7 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import namedtuple +""" +``rt-app`` analysis. +""" + +from typing import NamedTuple, Dict import pandas as pd import holoviews as hv @@ -29,22 +33,38 @@ from lisa.wlgen.rta import RTA, RTAConf from lisa.notebook import plot_signal -RefTime = namedtuple("RefTime", ['kernel', 'user']) -""" -Named tuple to synchronize kernel and userspace (``rt-app``) timestamps. -""" +class RefTime(NamedTuple): + """ + Named tuple to synchronize kernel and userspace (``rt-app``) timestamps. + """ + kernel: float + user: float -PhaseWindow = namedtuple("PhaseWindow", ['id', 'start', 'end', 'properties']) -""" -Named tuple with fields: +class PhaseWindow(NamedTuple): + """ + Time window for the execution of an ``rt-app`` phase. + """ - * ``id``: integer ID of the phase or its name. - * ``start``: timestamp of the start of the phase - * ``end``: timestamp of the end of the phase - * ``properties``: properties of the phase, extracted from a - :mod:`lisa.wlgen.rta` profile. -""" + id: int + """ + Integer ID of the phase or its name. + """ + + start: float + """ + Timestamp of the start of the phase. + """ + + end: float + """ + Timestamp of the end of the phase. + """ + + properties: Dict[str, object] + """ + Properties of the phase, extracted from a :mod:`lisa.wlgen.rta` profile. + """ class RTAEventsAnalysis(TraceAnalysisBase): @@ -122,22 +142,18 @@ class RTAEventsAnalysis(TraceAnalysisBase): * A ``__comm`` column: the actual rt-app trace task name * A ``__cpu`` column: the CPU on which the task was running at event - generation time + generation time * A ``__pid`` column: the PID of the task - * A ``data`` column: the data corresponding to the reported event - * An ``event`` column: the event generated - - The ``event`` column can report these events: - - * ``start``: the start of the rt-app main thread execution - * ``end``: the end of the rt-app main thread execution - * ``clock_ref``: the time rt-app gets the clock to be used for logfile entries + * A ``data`` column: the data corresponding to the reported event: - The ``data`` column reports: + * the base timestamp used for logfile generated event for the ``clock_ref`` event + * ``NaN`` for all the other events - * the base timestamp used for logfile generated event for the ``clock_ref`` event - * ``NaN`` for all the other events + * An ``event`` column: the event generated: + * ``start``: the start of the rt-app main thread execution + * ``end``: the end of the rt-app main thread execution + * ``clock_ref``: the time rt-app gets the clock to be used for logfile entries """ return self.trace.df_event('userspace@rtapp_main') @@ -190,16 +206,13 @@ class RTAEventsAnalysis(TraceAnalysisBase): * A ``__comm`` column: the actual rt-app trace task name * A ``__cpu`` column: the CPU on which the task was running at event - generation time + generation time * A ``__line`` column: the ftrace line numer * A ``__pid`` column: the PID of the task - * An ``event`` column: the event generated - - The ``event`` column can report these events: - - * ``start``: the start of the ``__pid``:``__comm`` task execution - * ``end``: the end of the ``__pid``:``__comm`` task execution + * An ``event`` column: the event generated: + * ``start``: the start of the ``__pid``:``__comm`` task execution + * ``end``: the end of the ``__pid``:``__comm`` task execution """ df = self.trace.df_event('userspace@rtapp_task') return self._task_filtered(df, task) @@ -227,19 +240,17 @@ class RTAEventsAnalysis(TraceAnalysisBase): * A ``__comm`` column: the actual rt-app trace task name * A ``__cpu`` column: the CPU on which the task was running at event - generation time + generation time * A ``__line`` column: the ftrace line numer * A ``__pid`` column: the PID of the task - * An ``event`` column: the generated event + * An ``event`` column: the generated event: + + * ``start``: the start of the ``__pid``:``__comm`` related event + * ``end``: the end of the ``__pid``:``__comm`` related event + * A ``phase`` column: the phases counter for each ``__pid``:``__comm`` task * A ``phase_loop`` colum: the phase_loops's counter * A ``thread_loop`` column: the thread_loop's counter - - The ``event`` column can report these events: - - * ``start``: the start of the ``__pid``:``__comm`` related event - * ``end``: the end of the ``__pid``:``__comm`` related event - """ df = self.trace.df_event('userspace@rtapp_loop') df = self._task_filtered(df, task) @@ -341,11 +352,11 @@ class RTAEventsAnalysis(TraceAnalysisBase): :returns: A :class:`pandas.DataFrame` with index representing the start time of a phase and these column: - * ``phase``: the phase number or its name extracted from - ``wlgen_profile``. - * ``duration``: the measured phase duration. - * ``properties``: the properties mapping of the phase extracted - from ``wlgen_profile``. + * ``phase``: the phase number or its name extracted from + ``wlgen_profile``. + * ``duration``: the measured phase duration. + * ``properties``: the properties mapping of the phase extracted + from ``wlgen_profile``. """ # Trace windowing can cut the trace anywhere, so we need to remove the # partial loops records to avoid confusion @@ -404,7 +415,8 @@ class RTAEventsAnalysis(TraceAnalysisBase): @df_phases.used_events def task_phase_windows(self, task, wlgen_profile=None): """ - Yield the phases of the specified task. + Yield a :class:`PhaseWindow` for each rt-app phase of the specified + task. :param task: the rt-app task to filter for :type task: int or str or lisa.analysis.tasks.TaskID @@ -412,14 +424,8 @@ class RTAEventsAnalysis(TraceAnalysisBase): :param wlgen_profile: See :meth:`df_phases`. :type wlgen_profile: dict(str, lisa.wlgen.rta.RTAPhaseBase) or None - Yield :class: `namedtuple` reporting: - - * `id` : the iteration ID - * `start` : the iteration start time - * `end` : the iteration end time - - :return: Generator yielding :class:`PhaseWindow` with - start end end timestamps. + :return: Generator yielding :class:`PhaseWindow` with start end end + timestamps. """ for phase in self.df_phases(task, wlgen_profile=wlgen_profile).itertuples(): start = phase.Index @@ -448,7 +454,7 @@ class RTAEventsAnalysis(TraceAnalysisBase): * A ``__pid`` column: the PID of the task * A ``phase`` column: the phases counter for each ``__pid``:``__comm`` task - The ``index`` represents the timestamp of a phase start event. + The ``index`` represents the timestamp of a phase start event. """ return self._get_rtapp_phases('start', task, wlgen_profile=wlgen_profile) @@ -470,7 +476,7 @@ class RTAEventsAnalysis(TraceAnalysisBase): * A ``__pid`` column: the PID of the task * A ``phase`` column: the phases counter for each ``__pid``:``__comm`` task - The ``index`` represents the timestamp of a phase end event. + The ``index`` represents the timestamp of a phase end event. """ return self._get_rtapp_phases('end', task, wlgen_profile=wlgen_profile) @@ -646,14 +652,14 @@ class RTAEventsAnalysis(TraceAnalysisBase): * A ``__comm`` column: the actual rt-app trace task name * A ``__pid`` column: the PID of the task * A ``__cpu`` column: the CPU on which the task was running at event - generation time + generation time * A ``__line`` column: the ftrace line numer * A ``type`` column: the type of the generated event * A ``desc`` column: the mnemonic type of the generated event * A ``id`` column: the ID of the resource associated to the event, - e.g. the ID of the fired timer + e.g. the ID of the fired timer - The ``index`` represents the timestamp of the event. + The ``index`` represents the timestamp of the event. """ df = self.trace.df_event('userspace@rtapp_event') return self._task_filtered(df, task) @@ -686,21 +692,20 @@ class RTAEventsAnalysis(TraceAnalysisBase): :returns: a :class:`pandas.DataFrame` with a set of colums representing the stats generated by rt-app after each loop. - - .. seealso:: the rt-app provided documentation: - https://github.com/scheduler-tools/rt-app/blob/master/doc/tutorial.txt - * A ``__comm`` column: the actual rt-app trace task name * A ``__pid`` column: the PID of the task * A ``__cpu`` column: the CPU on which the task was running at event - generation time + generation time * A ``__line`` column: the ftrace line numer * A ``type`` column: the type of the generated event * A ``desc`` column: the mnemonic type of the generated event * A ``id`` column: the ID of the resource associated to the event, - e.g. the ID of the fired timer + e.g. the ID of the fired timer + + The ``index`` represents the timestamp of the event. - The ``index`` represents the timestamp of the event. + .. seealso:: the rt-app provided documentation: + https://github.com/scheduler-tools/rt-app/blob/master/doc/tutorial.txt """ df = self._get_stats() return self._task_filtered(df, task) @@ -783,13 +788,12 @@ class RTAEventsAnalysis(TraceAnalysisBase): perf_index = \frac{slack}{c_period - c_run} - where + where: - - ``c_period``: is the configured period for an activation - - ``c_run``: is the configured run time for an activation, assuming to - run at the maximum frequency and on the maximum capacity - CPU. - - ``slack``: is the measured slack for an activation + * ``c_period``: is the configured period for an activation + * ``c_run``: is the configured run time for an activation, assuming to + run at the maximum frequency and on the maximum capacity CPU. + * ``slack``: is the measured slack for an activation The slack is defined as the different among the activation deadline and the actual completion time of the activation. @@ -872,7 +876,6 @@ class RTAEventsAnalysis(TraceAnalysisBase): :type bins: int .. seealso:: :meth:`plot_perf` for the perf index definition. - """ task = self.trace.ana.tasks.get_task_id(task) name = f'perf index of {task} (us)' diff --git a/lisa/analysis/tasks.py b/lisa/analysis/tasks.py index 2a1738bf43bffd31e75c33a936d72b992d55372a..d8328eefd8a3d045357c9d98f1bd448409078b7e 100644 --- a/lisa/analysis/tasks.py +++ b/lisa/analysis/tasks.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Linux kernel scheduler tasks analysis. +""" from enum import Enum import itertools @@ -21,9 +24,9 @@ import warnings import typing from numbers import Number from operator import itemgetter -from collections import namedtuple import re import functools +from typing import NamedTuple, Optional import numpy as np import pandas as pd @@ -39,7 +42,13 @@ from lisa.notebook import _hv_neutral, plot_signal from lisa._typeclass import FromString -class TaskID(namedtuple('TaskID', ('pid', 'comm'))): +# We cannot override __init__ in a NamedTuple so we inherit instead: +# https://github.com/python/typing/issues/526 +class _TaskID(NamedTuple): + pid: Optional[int] + comm: Optional[str] + +class TaskID(_TaskID): """ Unique identifier of a logical task in a :class:`lisa.trace.Trace`. @@ -51,10 +60,6 @@ class TaskID(namedtuple('TaskID', ('pid', 'comm'))): names associated. :type comm: str """ - - # Prevent creation of a __dict__. This allows a more compact representation - __slots__ = [] - def __init__(self, *args, **kwargs): # pylint: disable=unused-argument super().__init__() diff --git a/lisa/analysis/thermal.py b/lisa/analysis/thermal.py index 2a3de006b456ba4be99a6ccc54efcc9da13a2410..127efc9cdc59b6f1598fc645d738f6b2ce99c755 100644 --- a/lisa/analysis/thermal.py +++ b/lisa/analysis/thermal.py @@ -14,7 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +""" +Linux kernel thermal management subsystem analysis. +""" from devlib.utils.misc import list_to_mask, mask_to_list diff --git a/lisa/conf.py b/lisa/conf.py index 481c23ab40d24b3d0ba27b7f6c02b3e53b3c0f76..3950a0bfd583a62b5441a036a91d5629e4aedc0d 100644 --- a/lisa/conf.py +++ b/lisa/conf.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Configuration file management. +""" import abc import copy @@ -40,7 +43,7 @@ import typeguard import lisa from lisa.utils import ( Serializable, Loggable, get_nested_key, set_nested_key, get_call_site, - is_running_sphinx, get_cls_name, HideExekallID, get_subclasses, groupby, + is_running_sphinx, get_obj_name, HideExekallID, get_subclasses, groupby, import_all_submodules, delegate_getattr ) from lisa._generic import check_type @@ -49,15 +52,14 @@ from lisa._generic import check_type class DeferredValue: """ Wrapper similar to :func:`functools.partial` allowing to defer computation - of the value until the key is actually used. + of the value until the key is accessed. Once computed, the deferred value is replaced by the value that was - computed. This is useful for values that are very costly to compute, but - should be used with care as it means it will usually not be available in - the offline :class:`lisa.platforms.platinfo.PlatformInfo` instances. This - means that client code such as submodules of ``lisa.analysis`` will - typically not have it available (unless :meth:`~MultiSrcConf.eval_deferred` - was called) although they might need it. + computed. This is useful for values that are very costly to compute. + + .. seealso:: Deferred values can be forcefully computed using + :meth:`~MultiSrcConf.eval_deferred`, e.g. to ensure a serialized + configuration contains all the values an offline user might need. """ def __init__(self, callback, *args, **kwargs): @@ -144,7 +146,7 @@ class KeyDescBase(abc.ABC): This allows defining the structure of the configuration file, in order to sanitize user input and generate help snippets used in various places. """ - INDENTATION = 4 * ' ' + _INDENTATION = 4 * ' ' _VALID_NAME_PATTERN = r'^[a-zA-Z0-9-<>]+$' def __init__(self, name, help): @@ -152,8 +154,19 @@ class KeyDescBase(abc.ABC): self._check_name(name) self.name = name + """ + Name of that key. + """ + self.help = help + """ + Help description associated with the key. + """ + self.parent = None + """ + Parent :class:`LevelKeyDesc`. + """ @classmethod def _check_name(cls, name): @@ -163,11 +176,10 @@ class KeyDescBase(abc.ABC): @property def qualname(self): """ - "Qualified" name of the key. + Qualified name of the configuration key used for error reporting. This is a slash-separated path in the config file from the root to that - key: - / + key following the pattern ``/``. """ return '/'.join(self.path) @@ -176,6 +188,8 @@ class KeyDescBase(abc.ABC): """ Path in the config file from the root to that key. + This path is a list of strings, one item per level. + .. note:: This includes the top-level key name, which must be removed before it's fed to :meth:`MultiSrcConf.get_nested_key`. """ @@ -249,6 +263,12 @@ class KeyDesc(KeyDescBase): @property def newtype(self): + """ + Unique type associated with that key. + + This allows refering unambiguously to the type of a configuration key, + linking directly a type annotation to the value of a specific key. + """ if self._newtype: return self._newtype else: @@ -283,8 +303,8 @@ class KeyDesc(KeyDescBase): try: check_type(val, classinfo) except TypeError as e: - classinfo = ' or '.join(get_cls_name(cls) for cls in classinfo) - raise TypeError(f'Key "{key}" is an instance of {get_cls_name(type(val))}, but should be instance of {classinfo}: {e}. Help: {self.help}', key) + classinfo = ' or '.join(get_obj_name(cls) for cls in classinfo) + raise TypeError(f'Key "{key}" is an instance of {get_obj_name(type(val))}, but should be instance of {classinfo}: {e}. Help: {self.help}', key) # DeferredValue will be checked when they are computed if not isinstance(val, DeferredValue): @@ -323,7 +343,7 @@ class KeyDesc(KeyDescBase): prefix=prefix, key=key, classinfo=' or '.join( - get_cls_name( + get_obj_name( key_cls, style=style, fully_qualified=False, @@ -603,6 +623,13 @@ class LevelKeyDesc(KeyDescBase, Mapping): @property def key_desc(self): + """ + Leaf :class:`KeyDescBase` that this level will delegate to in case it + is assigned a leaf value. + + This :class:`KeyDescBase` is pointed at by the :attr:`value_path` + attribute. + """ path = self.value_path if path is None: raise AttributeError(f'{self} does not define a value path for direct assignment') @@ -673,23 +700,14 @@ class LevelKeyDesc(KeyDescBase, Mapping): self[key].validate_val(val) def get_help(self, style=None, last=False): - idt = self.INDENTATION + idt = ' ' if style == 'rst' else self._INDENTATION prefix = '*' if style == 'rst' else ('└' if last else '├') - # Nasty hack: adding an empty ResStructuredText comment between levels - # of nested list avoids getting extra blank line between list items. - # That prevents ResStructuredText from thinking each item must be a - # paragraph. - suffix = '\n\n..\n\n' if style == 'rst' else '\n' - suffix += idt - help_ = '{prefix} {key}:{help}{suffix}'.format( - prefix=prefix, - suffix=suffix, - key=self.name, - help=' ' + self.help if self.help else '', - ) nl = '\n' + idt + _help = ' ' + self.help if self.help else '' + suffix = (nl * 2) if style == 'rst' else nl + help_ = f'{prefix} {self.name}:{_help}{suffix}' last = len(self.children) - 1 - help_ += nl.join( + help_ += suffix.join( key_desc.get_help( style=style, last=i == last, @@ -697,7 +715,7 @@ class LevelKeyDesc(KeyDescBase, Mapping): for i, key_desc in enumerate(self.children) ) if style == 'rst': - help_ += '\n\n..\n' + help_ += '\n' return help_ @@ -819,6 +837,9 @@ class NestedTopLevelKeyDesc(TopLevelKeyDescBase): """ class MultiSrcConfABC(Serializable, abc.ABC): + """ + Abstract Base Class of :class:`MultiSrcConf`. + """ _REGISTERED_TOPLEVEL_KEYS = {} @abc.abstractmethod @@ -1072,7 +1093,7 @@ class MultiSrcConfABC(Serializable, abc.ABC): Newtype.__name__ = newtype_name Newtype.__qualname__ = f'{cls.__qualname__}.{newtype_name}' Newtype.__module__ = cls.__module__ - Newtype.__doc__ = key_desc.help + Newtype.__doc__ = f':meta private:\n\n{key_desc.help}' setattr(cls, newtype_name, Newtype) def make_getter(cls, type_, key_desc): @@ -1202,8 +1223,9 @@ class MultiSrcConf(MultiSrcConfABC, Loggable, Mapping): configuration. """ + @property @abc.abstractmethod - def STRUCTURE(): + def STRUCTURE(self): """ Class attribute defining the structure of the configuration file, as a instance of :class:`TopLevelKeyDescBase` @@ -2073,40 +2095,50 @@ class Configurable(abc.ABC): Pair a regular class with a configuration class. The pairing is achieved by inheriting from :class:`Configurable` and - setting ``CONF_CLASS`` attribute. The benefits are: - - * The docstring of the class is processed as a string template and - ``{configurable_params}`` is replaced with a Sphinx-compliant list of - parameters. The help and type of each parameter is extracted from the - configuration class. - * The ``DEFAULT_SRC`` attribute of the configuration class is updated - with non-``None`` default values of the class ``__init__`` parameters. - * The :meth:`~Configurable.conf_to_init_kwargs` method allows turning a - configuration object into a dictionary suitable for passing to - ``__init__`` as ``**kwargs``. - * The :meth:`~Configurable.check_init_param` method allows checking - types of ``__init__`` parameters according to what is specified in the - configuration class. + setting :attr:`CONF_CLASS` attribute. The benefits are: + + * The docstring of the class is processed as a string template and + ``{configurable_params}`` is replaced with a Sphinx-compliant list of + parameters. The help and type of each parameter is extracted from the + configuration class. + * The :attr:`~MultiSrcConf.DEFAULT_SRC` attribute of the configuration + class is updated with non-``None`` default values of the class + ``__init__`` parameters. + * The :meth:`~Configurable.conf_to_init_kwargs` method allows turning a + configuration object into a dictionary suitable for passing to + ``__init__`` as ``**kwargs``. + * The :meth:`~Configurable.check_init_param` method allows checking + types of ``__init__`` parameters according to what is specified in the + configuration class. Most of the time, the configuration keys and ``__init__`` parameters have the same name (modulo underscore/dashes which are handled automatically). In that case, the mapping between config keys and ``__init__`` parameters is done without user intervention. When that is not the case, the - ``INIT_KWARGS_KEY_MAP`` class attribute can be used. Its a dictionary with - keys being ``__init__`` parameter names, and values being path to - configuration key. That path is a list of strings to take into account - sublevels like ``['level-key', 'sublevel', 'foo']``. + :attr:`INIT_KWARGS_KEY_MAP` class attribute can be used. .. note:: A given configuration class must be paired to only one class. - Otherwise, the ``DEFAULT_SRC`` conf class attribute will be updated - multiple times, leading to unexpected results. + Otherwise, the :attr:`~MultiSrcConf.DEFAULT_SRC` conf class attribute + will be updated multiple times, leading to unexpected results. .. note:: Some services offered by :class:`Configurable` are not extended to subclasses of a class using it. For example, it would not make sense - to update ``DEFAULT_SRC`` using a subclass ``__init__`` parameters. + to update :attr:`~MultiSrcConf.DEFAULT_SRC` using a subclass + ``__init__`` parameters. """ INIT_KWARGS_KEY_MAP = {} + """ + Dictionary of ``__init__`` parameter names to configuration key path. + + That path is a list of strings to take into account sublevels like + ``['level-key', 'sublevel', 'foo']``. + """ + + CONF_CLASS = None + """ + Configuration class associated with the current class. + """ @classmethod def __init_subclass__(cls, **kwargs): @@ -2204,7 +2236,7 @@ class Configurable(abc.ABC): type=( 'collections.abc.Mapping' if isinstance(key_desc, LevelKeyDesc) else - ' or '.join(get_cls_name(t) for t in key_desc.classinfo) + ' or '.join(get_obj_name(t) for t in key_desc.classinfo) ), ) for param, key_desc diff --git a/lisa/datautils.py b/lisa/datautils.py index fb988720e1dd60406eebe504cef28747f4debf10..a9f59c0318c58beafc3bd73476bfd5505ca7a2d7 100644 --- a/lisa/datautils.py +++ b/lisa/datautils.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Dataframe utilities. +""" import re import functools @@ -1013,49 +1016,49 @@ def series_integrate(y, x=None, sign=None, method='rect', rect_step='post'): *Rectangular Method* - - Step: Post + * Step: Post - Consider the following time series data:: + Consider the following time series data:: - 2 *----*----*----+ - | | - 1 | *----*----+ - | - 0 *----*----+ - 0 1 2 3 4 5 6 7 + 2 *----*----*----+ + | | + 1 | *----*----+ + | + 0 *----*----+ + 0 1 2 3 4 5 6 7 - import pandas as pd - a = [0, 0, 2, 2, 2, 1, 1] - s = pd.Series(a) + import pandas as pd + a = [0, 0, 2, 2, 2, 1, 1] + s = pd.Series(a) - The area under the curve is: + The area under the curve is: - .. math:: + .. math:: - \\sum_{k=0}^{N-1} (x_{k+1} - {x_k}) \\times f(x_k) \\\\ - (2 \\times 3) + (1 \\times 2) = 8 + \\sum_{k=0}^{N-1} (x_{k+1} - {x_k}) \\times f(x_k) \\\\ + (2 \\times 3) + (1 \\times 2) = 8 - - Step: Pre + * Step: Pre - :: + :: - 2 +----*----*----* - | | - 1 | +----*----*----+ - | - 0 *----* - 0 1 2 3 4 5 6 7 + 2 +----*----*----* + | | + 1 | +----*----*----+ + | + 0 *----* + 0 1 2 3 4 5 6 7 - import pandas as pd - a = [0, 0, 2, 2, 2, 1, 1] - s = pd.Series(a) + import pandas as pd + a = [0, 0, 2, 2, 2, 1, 1] + s = pd.Series(a) - The area under the curve is: + The area under the curve is: - .. math:: + .. math:: - \\sum_{k=1}^{N} (x_k - x_{k-1}) \\times f(x_k) \\\\ - (2 \\times 3) + (1 \\times 3) = 9 + \\sum_{k=1}^{N} (x_k - x_{k-1}) \\times f(x_k) \\\\ + (2 \\times 3) + (1 \\times 3) = 9 """ x = _resolve_x(y, x) @@ -1145,15 +1148,15 @@ def series_window(series, window, method='pre', clip_window=True): :param method: Choose how edges are handled: - * `inclusive`: When no exact match is found, include both the previous - and next values around the window. - * `exclusive`: When no exact match is found, only index values within - the range are selected. This is the default pandas float slicing - behavior. - * `nearest`: Not supported with :mod:`polars` objects: when no exact - match is found, take the nearest index value. - * `pre`: When no exact match is found, take the previous index value. - * `post`: When no exact match is found, take the next index value. + * `inclusive`: When no exact match is found, include both the previous + and next values around the window. + * `exclusive`: When no exact match is found, only index values within + the range are selected. This is the default pandas float slicing + behavior. + * `nearest`: Not supported with :mod:`polars` objects: when no exact + match is found, take the nearest index value. + * `pre`: When no exact match is found, take the previous index value. + * `post`: When no exact match is found, take the next index value. .. note:: The index of `series` must be monotonic and without duplicates. """ diff --git a/lisa/energy_meter.py b/lisa/energy_meter.py index 1fbb0e62d3ead7dadc55fd732798a67571c9ac66..9f206cde698194f84156070e3c865046fee64c75 100644 --- a/lisa/energy_meter.py +++ b/lisa/energy_meter.py @@ -14,6 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Energy measurement device support. + +.. deprecated:: The content of the is module is deprecated as none of these + devices are in active use. Additionally, the data from such device is + usually hard to synchronize with other software events as there is no + shared clock. As a result, an approach based on ftrace is usually favored. +""" import abc import json @@ -22,7 +30,7 @@ import os.path import time import shutil -from collections import namedtuple +from typing import NamedTuple, Dict from collections.abc import Mapping from subprocess import Popen, PIPE, STDOUT import subprocess @@ -41,9 +49,11 @@ from lisa.conf import ( SimpleMultiSrcConf, KeyDesc, TopLevelKeyDesc, Configurable, ) -# Default energy measurements for each board -EnergyReport = namedtuple('EnergyReport', - ['channels', 'report_file', 'data_frame']) +class EnergyReport(NamedTuple): + channels: Dict[str, float] + report_file: str + data_frame: pd.DataFrame + _deprecate_emeter = deprecate( 'LISA energy meters are deprecated, please use devlib instruments or contribute the instrument to devlib', @@ -102,8 +112,9 @@ class EnergyMeter(Loggable, Configurable): chosen_cls.check_init_param(**kwargs) return chosen_cls(**kwargs) + @property @abc.abstractmethod - def name(): + def name(self): pass @abc.abstractmethod @@ -251,8 +262,6 @@ class HWMon(EnergyMeter): class _DevlibContinuousEnergyMeter(EnergyMeter): """ - :meta public: - Common functionality for devlib Instruments in CONTINUOUS mode """ diff --git a/lisa/energy_model.py b/lisa/energy_model.py index 810d32ac4deb58389ba994c887b44b2ff5b3478d..6b65756e953b4680c9a2e96f10c1bb123d0c6247 100644 --- a/lisa/energy_model.py +++ b/lisa/energy_model.py @@ -16,10 +16,11 @@ # """Classes for modeling and estimating energy usage of CPU systems""" -from collections import namedtuple, OrderedDict +from collections import OrderedDict from itertools import product import operator import re +from typing import NamedTuple, Optional import pandas @@ -70,54 +71,64 @@ class EnergyModelCapacityError(Exception): """Used by :meth:`EnergyModel.get_optimal_placements`""" -class ActiveState(namedtuple('ActiveState', ['capacity', 'power'])): - """Represents power and compute capacity at a given frequency +class _ActiveState(NamedTuple): + capacity: Optional[float] + """ + Relative compute capacity at frequency. + """ + + power: Optional[float] + """ + Power usage at frequency. + """ - :param capacity: Relative compute capacity at frequency - :param power: Power usage at frequency +class ActiveState(_ActiveState): + """ + Represents power and compute capacity at a given frequency """ def __new__(cls, capacity=None, power=None): - return super().__new__(cls, capacity, power) + return super().__new__(cls, capacity=capacity, power=power) class _CpuTree(Loggable): """ - :meta public: - Internal class. Abstract representation of a CPU topology. Each node contains either a single CPU or a set of child nodes. - - :Attributes: - * ``cpus``: CPUs contained in this node. Includes those of child nodes. - * ``cpu``: For convenience, this holds the single CPU contained by leaf - nodes. ``None`` for non-leaf nodes. """ def __init__(self, cpu, children): if (cpu is None) == (children is None): raise ValueError('Provide exactly one of: cpu or children') - self.parent = None - #: Test yolo - self.cpu = cpu - if cpu is not None: #: This is another thingie - self.cpus = (cpu,) - self.children = [] + cpus = (cpu,) + children = [] else: if len(children) == 0: raise ValueError('children cannot be empty') - self.cpus = tuple(sorted({ + cpus = tuple(sorted({ cpu for node in children for cpu in node.cpus })) - self.children = children for child in children: child.parent = self + self.cpus = cpus + """ + CPUs contained in this node. Includes those of child nodes. + """ + + self.cpu = cpu + """ + For convenience, this holds the single CPU contained by leaf nodes. + ``None`` for non-leaf nodes. + """ + + self.parent = None + self.children = children self.name = None def __repr__(self): @@ -268,10 +279,6 @@ class PowerDomain(_CpuTree): :type cpu: int :param children: Non-empty list of child :class:`PowerDomain` objects :type children: list(PowerDomain) - - :Attributes: - * ``cpus`` (`tuple(int)`): CPUs contained in this node. Includes - those of child nodes. """ def __init__(self, idle_states, cpu=None, children=None): @@ -306,13 +313,6 @@ class EnergyModel(Serializable, Loggable): frequencies must be equal (probably because they share a clock). The frequency domains must be a partition of the CPUs. - :Attributes: - * ``cpu_nodes``: List of leaf (CPU) :class`:`EnergyModelNode` - * ``cpus``: List of logical CPU numbers in the system - * ``capacity_scale``: The relative computational capacity of the most - powerful CPU at its highest available frequency. Utilisation is in - the interval ``[0, capacity_scale]``. - :param root_node: Root of :class:`EnergyModelNode` tree :param root_power_domain: Root of :class:`PowerDomain` tree :param freq_domains: Collection of collections of logical CPU numbers @@ -321,11 +321,11 @@ class EnergyModel(Serializable, Loggable): .. note:: The most signficant shortcomings of the model are: - 1. Voltage domains are assumed to be congruent to frequency domains + 1. Voltage domains are assumed to be congruent to frequency domains - 2. Idle state power is assumed to be independent of voltage + 2. Idle state power is assumed to be independent of voltage - 3. Temperature is ignored entirely + 3. Temperature is ignored entirely .. _cpu-utils: @@ -352,6 +352,10 @@ class EnergyModel(Serializable, Loggable): def __init__(self, root_node, root_power_domain, freq_domains): self.cpus = root_node.cpus + ''' + List of logical CPU numbers in the system + ''' + if self.cpus != tuple(range(len(self.cpus))): raise ValueError(f'CPU IDs [{self.cpus}] are sparse') @@ -387,6 +391,9 @@ class EnergyModel(Serializable, Loggable): self.root = root_node self.cpu_nodes = sorted_leaves(root_node) + ''' + List of leaf (CPU) :class:`EnergyModelNode` + ''' self.pd = root_power_domain self.cpu_pds = sorted_leaves(root_power_domain) assert len(self.cpu_pds) == len(self.cpu_nodes) @@ -395,6 +402,12 @@ class EnergyModel(Serializable, Loggable): node.max_capacity for node in self.cpu_nodes ) + ''' + The relative computational capacity of the most powerful CPU at its + highest available frequency. Utilisation is in the interval + ``[0, capacity_scale]``. + ''' + def _cpus_with_capacity(self, cap): """ diff --git a/lisa/pelt.py b/lisa/pelt.py index dc87356710481e77258362debd0dcd4e741d2a9e..91d3703bc4078cc587e86619e6caf56bb38ccf89 100644 --- a/lisa/pelt.py +++ b/lisa/pelt.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +PELT kernel signal simulation utilities. +""" import math import functools diff --git a/lisa/platforms/__init__.py b/lisa/platforms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e0f8f9f0d0717ed20a4a13e8ec1083d1b4b90f91 --- /dev/null +++ b/lisa/platforms/__init__.py @@ -0,0 +1,7 @@ +""" +Target platform support. + +The primary input of platform-specific information in :mod:`lisa` is achieved +using :class:`lisa.platforms.platinfo.PlatformInfo`. +""" + diff --git a/lisa/platforms/hikey620.py b/lisa/platforms/hikey620.py index edd77159f47cf93b83ebc4ce07c040e5822e38e8..3e0358fcf542966e14bbe6f8567ce6f0da8e8b3a 100644 --- a/lisa/platforms/hikey620.py +++ b/lisa/platforms/hikey620.py @@ -14,6 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Hikey 620 board hardcoded platform information. + + +.. deprecated:: Information found in this module should nowadays be + auto-detected using :class:`~lisa.platforms.platinfo.PlatformInfo`. +""" from lisa.energy_model import (ActiveState, EnergyModelNode, EnergyModelRoot, PowerDomain, EnergyModel) diff --git a/lisa/platforms/juno_r0.py b/lisa/platforms/juno_r0.py index 1fce8aa34db4462fcf2b28774d9f69481d7a86aa..f03ec7b94aa9920c4dbb2defc64bee734bab7760 100644 --- a/lisa/platforms/juno_r0.py +++ b/lisa/platforms/juno_r0.py @@ -14,6 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Juno R0 board hardcoded platform information. + + +.. deprecated:: Information found in this module should nowadays be + auto-detected using :class:`~lisa.platforms.platinfo.PlatformInfo`. +""" from collections import OrderedDict from lisa.energy_model import (ActiveState, EnergyModelNode, EnergyModelRoot, diff --git a/lisa/platforms/platinfo.py b/lisa/platforms/platinfo.py index beb378aabebb54ff8d89b07fc7dc67dcc46026db..d12c2423a8f73e8e2b60b1cacd25b99baf03ad1e 100644 --- a/lisa/platforms/platinfo.py +++ b/lisa/platforms/platinfo.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Generic auto-detected platform information support. +""" import re import functools diff --git a/lisa/regression.py b/lisa/regression.py index 4ece4fc0ff84d097507a349fd8e086e0860daf25..c72b0a5dff71d79c218153a4e5d469a0877e50d7 100644 --- a/lisa/regression.py +++ b/lisa/regression.py @@ -14,17 +14,30 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Regression testing utilities. +""" import math import itertools -from collections import OrderedDict, namedtuple +from collections import OrderedDict +from typing import NamedTuple import scipy.stats from lisa.utils import groupby, memoized from lisa.tests.base import Result, ResultBundleBase -ResultCount = namedtuple('ResultCount', ('passed', 'failed')) +class ResultCount(NamedTuple): + passed: int + """ + Number of passed occurrences. + """ + + failed: int + """ + Number of failed occurrences. + """ class RegressionResult: diff --git a/lisa/stats.py b/lisa/stats.py index 2b8e4ca32ccac3c7513d43abadc8259f026db2b6..7ac3f020878b0e3b5e5bc17c01050b43030b1bad 100644 --- a/lisa/stats.py +++ b/lisa/stats.py @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Statistical comparison helpers. +""" + import uuid import functools from operator import itemgetter @@ -174,13 +178,13 @@ class Stats(Loggable): tag value is ``None``, the key will only be used for grouping in graphs. Comparison will add the following statistics: - * A 2-sample Komolgorov-Smirnov test ``'ks2samp_test'`` column. - This test is non-parametric and checks for difference in - distributions. The only assumption is that the distribution is - continuous, which should suit almost all use cases - * Most statistics will be normalized against the reference group as - a difference percentage, except for a few non-normalizable - values. + * A 2-sample Komolgorov-Smirnov test ``'ks2samp_test'`` column. + This test is non-parametric and checks for difference in + distributions. The only assumption is that the distribution is + continuous, which should suit almost all use cases + * Most statistics will be normalized against the reference group as + a difference percentage, except for a few non-normalizable + values. .. note:: The group referenced must exist, otherwise unexpected behaviours might occur. @@ -198,9 +202,9 @@ class Stats(Loggable): :param agg_cols: Columns to aggregate on. In a sense, the given columns will be treated like a compound iteration number. Defaults to: - * ``iteration`` column if available, otherwise - * All the tag columns that are neither the value nor part of the - ``ref_group``. + * ``iteration`` column if available, otherwise + * All the tag columns that are neither the value nor part of the + ``ref_group``. :type agg_cols: list(str) @@ -221,23 +225,23 @@ class Stats(Loggable): will be made to guess the most appropriate kind of mean to use using the ``mean_kind_col``, ``unit_col`` and ``control_var_col``: - * The mean itself, as: + * The mean itself, as: - * ``'mean'`` (arithmetic) - * ``'hmean'`` (harmonic) - * ``'gmean'`` (geometric) + * ``'mean'`` (arithmetic) + * ``'hmean'`` (harmonic) + * ``'gmean'`` (geometric) - * The Standard Error of the Mean (SEM): + * The Standard Error of the Mean (SEM): - * ``'sem'`` (arithmetic) - * ``'hse'`` (harmonic) - * ``'gse'`` (geometric) + * ``'sem'`` (arithmetic) + * ``'hse'`` (harmonic) + * ``'gse'`` (geometric) - * The standard deviation: + * The standard deviation: - * ``'std'`` (arithmetic) - * ``'hsd'`` (harmonic) - * ``'gsd'`` (geometric) + * ``'std'`` (arithmetic) + * ``'hsd'`` (harmonic) + * ``'gsd'`` (geometric) :type stats: dict(str, str or collections.abc.Callable) diff --git a/lisa/target.py b/lisa/target.py index 362cba87bb37c8d897bd7e812046cee019116649..3512e643d6185360753e158a0be4b6098a3c5557 100644 --- a/lisa/target.py +++ b/lisa/target.py @@ -14,6 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Target manipulation helpers. + +A target is a device such as an android phone that can be manipulated +programmatically using :class:`lisa.target.Target`. +""" from datetime import datetime import os @@ -39,10 +45,10 @@ import typing import devlib from devlib.exception import TargetStableError -from devlib.utils.misc import which +from devlib.utils.misc import which, to_identifier from devlib.platform.gem5 import Gem5SimulationPlatform -from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized, destroyablecontextmanager, ContextManagerExit, update_params_from, delegate_getattr +from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized, destroyablecontextmanager, ContextManagerExit, update_params_from, delegate_getattr, DelegateToAttr from lisa._assets import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc, Configurable, DelegatedLevelKeyDesc, ConfigKeyError from lisa._kmod import _KernelBuildEnv, DynamicKmod, _KernelBuildEnvConf @@ -59,15 +65,22 @@ else: class PasswordKeyDesc(KeyDesc): + """ + :class:`~lisa.conf.KeyDesc` used to describe the target password. + """ + def pretty_format(self, v): + """ + Hide the password with a generic placeholder. + """ return '' # Make sure all submodules of devlib.module are imported so the classes # are all created before we list them import_all_submodules(devlib.module) -_DEVLIB_AVAILABLE_MODULES = { - cls.name +_DEVLIB_AVAILABLE_MODULE_CLASSES = { + to_identifier(cls.name): cls for cls in get_subclasses(devlib.module.Module) if ( getattr(cls, 'name', None) @@ -103,7 +116,7 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): Content of target_conf.yml: - .. literalinclude:: ../target_conf.yml + .. literalinclude:: ../../../../target_conf.yml :language: YAML :: @@ -193,7 +206,13 @@ class TargetConf(SimpleMultiSrcConf, HideExekallID): } -class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): +class Target( + DelegateToAttr('target', [devlib.Target]), + Loggable, + HideExekallID, + ExekallTaggable, + Configurable, +): """ Wrap :class:`devlib.target.Target` to provide additional features on top of it. @@ -350,7 +369,7 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): logger.warning('Will not load cgroups devlib module: target is using systemd, which already uses cgroups') devlib_excluded_modules.add('cgroups') - self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules + self._devlib_loadable_modules = set(_DEVLIB_AVAILABLE_MODULE_CLASSES.keys()) - devlib_excluded_modules # Initialize binary tools to deploy if tools: @@ -523,7 +542,7 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): .. note:: This will attempt to load the module if it's not loaded already, and bail out if it fails to load. """ - if module not in _DEVLIB_AVAILABLE_MODULES: + if module not in _DEVLIB_AVAILABLE_MODULE_CLASSES.keys(): raise ValueError(f'"{module}" is not a devlib module') try: @@ -548,31 +567,37 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): # If it was not in the loadable list, it # has been excluded explicitly - if attr in (_DEVLIB_AVAILABLE_MODULES - self._devlib_loadable_modules): + if attr in (set(_DEVLIB_AVAILABLE_MODULE_CLASSES.keys()) - self._devlib_loadable_modules): # pylint: disable=raise-missing-from raise AttributeError(f'Devlib target module {attr} was explicitly excluded, not loading it') - def get(): - return delegate_getattr(self, 'target', attr) + get = super().__getattr__ try: - return get() + return get(attr) except AttributeError: # Load the module on demand if attr in self._devlib_loadable_modules: self.logger.info(f'Loading target devlib module {attr}') self.target.install_module(attr) - return get() + return get(attr) # Something else that does not exist ... else: raise + @classmethod + def __instance_dir__(cls): + return { + **super().__instance_dir__(), + **_DEVLIB_AVAILABLE_MODULE_CLASSES + } + def __dir__(self): """ List our attributes plus the ones from the underlying target, and the devlib modules that could be loaded on-demand. """ - attrs = set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules + attrs = set(super().__dir__()) | self._devlib_loadable_modules return sorted(attrs) @classmethod @@ -1292,7 +1317,9 @@ class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): def remote_func(self, **kwargs): """ Decorates a given function to execute remotely using - :meth:`execute_python`:: + :meth:`execute_python`. + + :: target = Target(...) diff --git a/lisa/tests/__init__.py b/lisa/tests/__init__.py index 038f357c41be84a50ed44b7b5d170610f766bdb0..01e23edcccea1506ff7a69e9bdaa2354b22bea5c 100644 --- a/lisa/tests/__init__.py +++ b/lisa/tests/__init__.py @@ -14,5 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Helpers to write tests using LISA. + +.. note:: The test we are talking about here are applications using :mod:`lisa` + that happen to be testing a system, such as the Linux kernel scheduler. + These are not the :mod:`lisa` Python package unit tests. +""" # vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab diff --git a/lisa/tests/base.py b/lisa/tests/base.py index e3b97f9e9fbe2ffcacc40605d1d1932adc69d422..e789608d17f7c8062c0febeb07f753523c61b602 100644 --- a/lisa/tests/base.py +++ b/lisa/tests/base.py @@ -53,7 +53,7 @@ from lisa.target import Target from lisa.utils import ( Serializable, memoized, lru_memoized, ArtifactPath, non_recursive_property, update_wrapper_doc, ExekallTaggable, annotations_from_signature, - get_sphinx_name, optional_kwargs, group_by_value, kwargs_dispatcher, + get_obj_name, optional_kwargs, group_by_value, kwargs_dispatcher, dispatch_kwargs, Loggable, kwargs_forwarded_to, docstring_update, is_running_ipython, ) @@ -439,16 +439,15 @@ class TestBundleMeta(abc.ABCMeta): Method with a return annotation of :class:`ResultBundleBase` are wrapped to: - * Update the ``context`` attribute of a returned - :class:`ResultBundleBase` + * Update the ``context`` attribute of a returned :class:`ResultBundleBase` - * Add an ``undecided_filter`` attribute, with - :meth:`add_undecided_filter` decorator, so that any test method can - be used as a pre-filter for another one right away. + * Add an ``undecided_filter`` attribute, with :meth:`add_undecided_filter` + decorator, so that any test method can be used as a pre-filter for + another one right away. - * Wrap ``_from_target`` to provide a single ``collector`` parameter, - built from the composition of the collectors provided by - ``_make_collector`` methods in the base class tree. + * Wrap ``_from_target`` to provide a single ``collector`` parameter, built + from the composition of the collectors provided by ``_make_collector`` + methods in the base class tree. If ``_from_target`` is defined in the class but ``from_target`` is not, a stub is created and the annotation of ``_from_target`` is copied to the @@ -589,7 +588,7 @@ class TestBundleMeta(abc.ABCMeta): {} """).strip().format( - get_sphinx_name(func, style='rst', abbrev=True), + get_obj_name(func, style='rst', abbrev=True), inspect.getdoc(func), ), ) @@ -626,6 +625,12 @@ class TestBundleMeta(abc.ABCMeta): func.undecided_filter = decorator return func + # Ensure that the methods become recognized as available on classes using + # us as a metaclass: + # https://docs.python.org/3/library/inspect.html#inspect.getmembers + def __dir__(metacls): + return super().__dir__() + ['add_undecided_filter'] + @classmethod def __prepare__(metacls, cls_name, bases, **kwargs): # Decorate each method when it is bound to its name in the class' @@ -1347,7 +1352,7 @@ class FtraceTestBundleBase(TestBundleBase): @lru_memoized(first_param_maxsize=5) def trace(self): """ - :returns: a :class:`lisa.trace._TraceView` + The :class:`~lisa.trace.Trace` for the collected trace.dat file. All events specified in ``FTRACE_CONF`` are parsed from the trace, so it is suitable for direct use in methods. @@ -1412,6 +1417,9 @@ class TestConfBase(SimpleMultiSrcConf): the actual top-level key will be ``test-conf/foo``. """ def __init_subclass__(cls, **kwargs): + """ + :meta private: + """ structure = copy.copy(cls.STRUCTURE) structure.levels = ['test-conf', *structure.levels] cls.STRUCTURE = structure @@ -1761,8 +1769,8 @@ class RTATestBundle(FtraceTestBundle, DmesgTestBundle): @lru_memoized(first_param_maxsize=5) def trace(self): """ - :returns: a :class:`lisa.trace._TraceView` cropped to the window given - by :meth:`trace_window`. + A :class:`lisa.trace.Trace` cropped to the window given by + :meth:`trace_window`. .. seealso:: :attr:`FtraceTestBundleBase.trace` """ diff --git a/lisa/tests/cpufreq/__init__.py b/lisa/tests/cpufreq/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/tests/staging/__init__.py b/lisa/tests/staging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lisa/trace.py b/lisa/trace.py index 9db083ad342b6d26c1415903733c61b831e5f5bb..2d965b4684b88dcef2e6cad51c7c3c21e835c920 100644 --- a/lisa/trace.py +++ b/lisa/trace.py @@ -64,7 +64,7 @@ import polars.selectors as cs import devlib -from lisa.utils import Loggable, HideExekallID, memoized, lru_memoized, deduplicate, take, deprecate, nullcontext, measure_time, checksum, newtype, groupby, PartialInit, kwargs_forwarded_to, kwargs_dispatcher, ComposedContextManager, get_nested_key, unzip_into, order_as, delegate_getattr, DirCache +from lisa.utils import Loggable, HideExekallID, memoized, lru_memoized, deduplicate, take, deprecate, nullcontext, measure_time, checksum, newtype, groupby, PartialInit, kwargs_forwarded_to, kwargs_dispatcher, ComposedContextManager, get_nested_key, unzip_into, order_as, DirCache, DelegateToAttr from lisa.conf import SimpleMultiSrcConf, LevelKeyDesc, KeyDesc, TopLevelKeyDesc, Configurable from lisa.datautils import SignalDesc, df_add_delta, df_deduplicate, df_window, df_window_signals, series_convert, df_update_duplicates, _polars_duration_expr, _df_to, _polars_df_in_memory, Timestamp, _pandas_cleanup_df from lisa.version import VERSION_TOKEN @@ -181,6 +181,22 @@ def _make_hardlink(src, dst): pass +def _df_json_serialize(df): + # TODO: revisit based on the outcome of: + # https://github.com/pola-rs/polars/issues/18284 + with warnings.catch_warnings(): + warnings.simplefilter(action='ignore') + return df.serialize(format='json') + + +def _df_json_deserialize(plan): + # TODO: revisit based on the outcome of: + # https://github.com/pola-rs/polars/issues/18284 + with warnings.catch_warnings(): + warnings.simplefilter(action='ignore') + return pl.LazyFrame.deserialize(plan, format='json') + + def _logical_plan_resolve_paths(cache, plan, kind): swap_dir = Path(cache.swap_dir).resolve() @@ -366,12 +382,12 @@ def _lazyframe_rewrite(df, update_plan): # TODO: once this is solved, we can just inspect the plan rather than # serialize()/deserialize() in JSON # https://github.com/pola-rs/polars/issues/9771 - plan = df.serialize(format='json') + plan = _df_json_serialize(df) plan = json.loads(plan) plan = update_plan(plan) plan = json.dumps(plan) plan = io.StringIO(plan) - df = pl.LazyFrame.deserialize(plan, format='json') + df = _df_json_deserialize(plan) return df @@ -466,6 +482,12 @@ class TraceParserBase(abc.ABC, Loggable, PartialInit): metadata should only be computed when the object is used as a context manager. Note that the same parser object might be used as a context manager multiple times in its lifetime. + + + .. attention:: This class and its base class is not subject to the normal + backward compatibility guarantees. It is considered somewhat internal + and will be modified if necessary, with backward compatibility being + offered on a best-effort basis. """ _STEAL_FILES = False @@ -549,7 +571,7 @@ class TraceParserBase(abc.ABC, Loggable, PartialInit): promise on the availability of any except for the following that *must* be provided if asked for: - * ``time-range`` + * ``time-range`` Metadata may still be made available if not asked for, but only if it's a very cheap byproduct of parsing that incurs no extra cost. @@ -578,14 +600,16 @@ class TraceParserBase(abc.ABC, Loggable, PartialInit): Parse the given event from the trace and return a :class:`pandas.DataFrame` with the following columns: - * ``Time`` index: floating point absolute timestamp in seconds. The - index *must not* have any duplicated values. - * One column per event field, with the appropriate dtype. - * Columns prefixed with ``__``: Header of each event, usually containing the following fields: + * ``Time`` index: floating point absolute timestamp in seconds. The + index *must not* have any duplicated values. + * One column per event field, with the appropriate dtype. + * Columns prefixed with ``__``: Header of each event, usually + containing the following fields: - * ``__cpu``: CPU number the event was emitted from - * ``__pid``: PID of the current process scheduled at the time the event was emitted - * ``__comm``: Task command name going with ``__pid`` at the point the event was emitted + * ``__cpu``: CPU number the event was emitted from + * ``__pid``: PID of the current process scheduled at the time the event was emitted + * ``__comm``: Task command name going with ``__pid`` at the point the + event was emitted :param event: name of the event to parse :type event: str @@ -1070,10 +1094,10 @@ class EventParserBase: Required attributes or properties: - * ``event``: name of the event - * ``regex``: full regex to parse a line of the event - * ``fields``: mapping of field names to :mod:`pandas` dtype to use for - the :class:`pandas.DataFrame` column. + * ``event``: name of the event + * ``regex``: full regex to parse a line of the event + * ``fields``: mapping of field names to :mod:`pandas` dtype to use for + the :class:`pandas.DataFrame` column. """ PARSER_REGEX_TERMINALS = dict( @@ -1268,9 +1292,9 @@ class PrintTxtEventParser(TxtEventParser): Event parser for the folling events, displayed in non-raw format by ``trace-cmd``: - * ``print`` - * ``bprint`` - * ``bputs`` + * ``print`` + * ``bprint`` + * ``bputs`` .. note:: ``bputs`` and ``print`` could be parsed in raw format, but that would make them harder to parse (function resolution needed), and @@ -2510,20 +2534,20 @@ class SimpleTxtTraceParser(TxtTraceParserBase): Each event description can include the following dict keys: - * ``header_regex``: Regex to parse the event header. If not set, the - header regex from the trace parser will be used. + * ``header_regex``: Regex to parse the event header. If not set, the + header regex from the trace parser will be used. - * ``fields_regex``: Regex to parse the fields part of the event (i.e. - the part after the header). This is the most commonly modified - setting to take into account special cases in event formatting. + * ``fields_regex``: Regex to parse the fields part of the event (i.e. + the part after the header). This is the most commonly modified + setting to take into account special cases in event formatting. - * ``fields``: Mapping of field names to :class:`pandas.DataFrame` - column dtype. This allows using a smaller dtype or the use of a - non-inferred dtype like ``boolean``. + * ``fields``: Mapping of field names to :class:`pandas.DataFrame` + column dtype. This allows using a smaller dtype or the use of a + non-inferred dtype like ``boolean``. - * ``positional_field``: Name of the positional field (comming before - the named fields). If ``None``, the column will be suppressed in the - parsed dataframe. + * ``positional_field``: Name of the positional field (comming before + the named fields). If ``None``, the column will be suppressed in the + parsed dataframe. """ HEADER_REGEX = None @@ -2531,11 +2555,11 @@ class SimpleTxtTraceParser(TxtTraceParserBase): Default regex to use to parse event header. It must parse the following groups: - * ``__timestamp``: the timestamp of the event - * ``__event``: the name of the event - * ``__cpu`` (optional): the CPU by which the event was emitted - * ``__pid`` (optional): the currently scheduled PID at the point the event was emitted - * ``__comm`` (optional): the currently scheduled task's name at the point the event was emitted + * ``__timestamp``: the timestamp of the event + * ``__event``: the name of the event + * ``__cpu`` (optional): the CPU by which the event was emitted + * ``__pid`` (optional): the currently scheduled PID at the point the event was emitted + * ``__comm`` (optional): the currently scheduled task's name at the point the event was emitted .. note:: It must *not* capture the event fields, as it will be concatenated with the field regex of each event to parse full lines. @@ -2719,24 +2743,50 @@ class SysTraceParser(HRTxtTraceParser): class _InternalTraceBase(abc.ABC): """ - Base class for common functionalities between :class:`Trace` and + Base class for common functionalities between :class:`_Trace` and :class:`_TraceViewBase`. - - :Attributes: - - * ``start``: The timestamp of the first trace event. - * ``end``: The timestamp of the last trace event. - * ``basetime``: Absolute timestamp when the tracing started. This might - differ from ``start`` as the latter can be affected by various - normalization or windowing features. - * ``endtime``: Absolute timestamp when the tracing stopped. It has - similar characteristics as ``basetime``. - """ def __init__(self): pass + @property + @abc.abstractmethod + def start(self): + ''' + The timestamp of the first trace event. + ''' + + @property + @abc.abstractmethod + def end(self): + ''' + The timestamp of the last trace event. + ''' + + @property + @abc.abstractmethod + def basetime(self): + ''' + Absolute timestamp when the tracing started. + + This might differ from :attr:`start` as the latter can be affected + by various normalization or windowing features. + ''' + + @property + @abc.abstractmethod + def endtime(self): + ''' + Absolute timestamp when the tracing stopped. + + This might differ from :attr:`end` as the latter can be affected by + various normalization or windowing features. + + .. note:: With some parsers, that might be the timestamp of the last + recorded event instead if the trace end timestamp was not recorded. + ''' + @property def trace_state(self): """ @@ -2751,7 +2801,7 @@ class _InternalTraceBase(abc.ABC): @property def time_range(self): """ - Duration of that trace. + Duration of that trace (difference between :attr:`start` and :attr:`end`). """ return self.end - self.start @@ -2766,10 +2816,95 @@ class _InternalTraceBase(abc.ABC): @property def available_events(self): + """ + Set of available events on that trace. + + .. warning:: The set of events can change as new events are parsed. Not + all trace parsers are able to provide the list of events that could + be parsed upfront, so do not rely on this set to be stable. + However, using ``event in trace.available_events`` will always + return ``True`` if the event can be parsed, possibly at the cost of + actually parsing the event to check if that works. + """ return _AvailableTraceEventsSet(self) def get_view(self, **kwargs): - view = _TraceViewBase.make_view(self, **kwargs) + """ + Get a view on a trace. + + Various aspects of the trace can be altered depending on the + parameters, such as cropping time-wise to fit in ``window``. + + :param window: Crop the dataframe to include events that are inside the + given window. This includes the event immediately preceding the + left boundary if there is no exact timestamp match. This can also + include more rows before the beginning of the window based on the + ``signals`` required by the user. A ``None`` boundary will extend + to the beginning/end of the trace. + :type window: tuple(float or None, float or None) or None + + :param signals: List of :class:`lisa.datautils.SignalDesc` to use when + selecting rows before the beginning of the ``window``. This allows + ensuring that all the given signals have a known value at the beginning + of the window. + :type signals: list(lisa.datautils.SignalDesc) or None + + :param compress_signals_init: If ``True``, the timestamp of the events + before the beginning of the ``window`` will be compressed to be + either right before the beginning of the window, or at the exact + timestamp of the beginning of the window (depending on the + dataframe library chosen, since pandas cannot cope with more than + one row for each timestamp). + :type compress_signals_init: bool or None + + :param normalize_time: If ``True``, the beginning of the ``window`` + will become timestamp 0. If no ``window`` is used, the beginning of + the trace is taken as T=0. This allows easier comparison of traces + that were generated with absolute timestamps (e.g. timestamp + related to the uptime of the system). It also allows comparing + various slices of the same trace. + :type normalize_time: bool or None + + :param events_namespaces: List of namespaces of the requested events. + Each namespace will be tried in order until the event is found. The + ``None`` namespace can be used to specify no namespace. The full + event name is formed with ``__``. + :type events_namespaces: list(str or None) + + :param events: Preload the given events when creating the view. This + can be advantageous as a single instance of the parser will be + spawned, so if the parser supports it, multiple events will be + parsed in one trace traversal. + :type events: list(str) or lisa.trace.TraceEventCheckerBase or None + + :param strict_events: If ``True``, will raise an exception if the + ``events`` specified cannot be loaded from the trace. This allows + failing early in trace processing. + :param strict_events: bool or None + + :param process_df: Function called on each dataframe returned by + :meth:`lisa.trace.TraceBase.df_event`. The parameters are as follow: + + 1. Name of the event being queried. + 2. A :class:`polars.LazyFrame` of the event. + + It is expected to return a :class:`polars.LazyFrame` as well. + + :type process_df: typing.Callable[[str, polars.LazyFrame], polars.LazyFrame] or None + + :param df_fmt: Format of the dataframes returned by + :meth:`lisa.trace.TraceBase.df_events`. One of: + + * ``"pandas"``: :class:`pandas.DataFrame`. + * ``"polars-lazyframe"``: :class:`polars.LazyFrame`. + * ``None``: defaults to ``"pandas"`` for + backward-compatibility. + + :type df_fmt: str or None + + :Variable arguments: Forwarded to the contructor of the view. + """ + view = _TraceViewBase._make_view(self, **kwargs) assert isinstance(view, _TraceViewBase) return view @@ -2786,9 +2921,18 @@ class _InternalTraceBase(abc.ABC): @abc.abstractmethod def _preload_events(self, events): + """ + Preload the given events by parsing them if necessary. + + This can be more efficient than requesting events one by one as the + parser might be able to parse multiple events in one pass. + """ pass def __getitem__(self, window): + """ + Slice the trace with the given time range. + """ if not isinstance(window, slice): raise TypeError("Cropping window must be an instance of slice") @@ -2855,6 +2999,12 @@ class _InternalTraceBase(abc.ABC): # User-facing class TraceBase(_InternalTraceBase): + """ + Base class for all public trace classes. + + This :class:`abc.ABC` class defines the API available on trace-like + objects, and is suitable to use with ``isinstance`` and ``issubclass``. + """ @abc.abstractmethod def df_event(self, event, **kwargs): """ @@ -2870,15 +3020,15 @@ class TraceBase(_InternalTraceBase): ``bprint`` event format string, and the field values are decoded from the variable arguments buffer. Note that: - * The field values *must* be in the buffer, i.e. the format - string is only used as the event format, no "literal value" - will be extracted from it. + * The field values *must* be in the buffer, i.e. the format + string is only used as the event format, no "literal value" + will be extracted from it. - * The event *must* have fields. If not, ``trace_printk()`` - will emit a bputs event that will be ignored at the moment. - We need to get a bprint event. + * The event *must* have fields. If not, ``trace_printk()`` + will emit a bputs event that will be ignored at the moment. + We need to get a bprint event. - * Field names *must* be unique. + * Field names *must* be unique. .. code-block:: C @@ -2915,82 +3065,6 @@ class TraceBase(_InternalTraceBase): """ pass - def get_view(self, *args, **kwargs): - """ - Get a view on a trace cropped time-wise to fit in ``window`` and with - event dataframes post processed with ``process_df``. - - :param window: Crop the dataframe to include events that are inside the - given window. This includes the event immediately preceding the - left boundary if there is no exact timestamp match. This can also - include more rows before the beginning of the window based on the - ``signals`` required by the user. A ``None`` boundary will extend - to the beginning/end of the trace. - :type window: tuple(float or None, float or None) or None - - :param signals: List of :class:`lisa.datautils.SignalDesc` to use when - selecting rows before the beginning of the ``window``. This allows - ensuring that all the given signals have a known value at the beginning - of the window. - :type signals: list(lisa.datautils.SignalDesc) or None - - :param compress_signals_init: If ``True``, the timestamp of the events - before the beginning of the ``window`` will be compressed to be - either right before the beginning of the window, or at the exact - timestamp of the beginning of the window (depending on the - dataframe library chosen, since pandas cannot cope with more than - one row for each timestamp). - :type compress_signals_init: bool or None - - :param normalize_time: If ``True``, the beginning of the ``window`` - will become timestamp 0. If no ``window`` is used, the beginning of - the trace is taken as T=0. This allows easier comparison of traces - that were generated with absolute timestamps (e.g. timestamp - related to the uptime of the system). It also allows comparing - various slices of the same trace. - :type normalize_time: bool or None - - :param events_namespaces: List of namespaces of the requested events. - Each namespace will be tried in order until the event is found. The - ``None`` namespace can be used to specify no namespace. The full - event name is formed with ``__``. - :type events_namespaces: list(str or None) - - :param events: Preload the given events when creating the view. This - can be advantageous as a single instance of the parser will be - spawned, so if the parser supports it, multiple events will be - parsed in one trace traversal. - :type events: list(str) or lisa.trace.TraceEventCheckerBase or None - - :param strict_events: If ``True``, will raise an exception if the - ``events`` specified cannot be loaded from the trace. This allows - failing early in trace processing. - :param strict_events: bool or None - - :param process_df: Function called on each dataframe returned by - :meth:`lisa.trace.TraceBase.df_event`. The parameters are as follow: - - 1. Name of the event being queried. - 2. A :class:`polars.LazyFrame` of the event. - - It is expected to return a :class:`polars.LazyFrame` as well. - - :type process_df: typing.Callable[[str, polars.LazyFrame], polars.LazyFrame] or None - - :param df_fmt: Format of the dataframes returned by - :meth:`lisa.trace.TraceBase.df_events`. One of: - - * ``"pandas"``: :class:`pandas.DataFrame`. - * ``"polars-lazyframe"``: :class:`polars.LazyFrame`. - * ``None``: defaults to ``"pandas"`` for - backward-compatibility. - - :type df_fmt: str or None - - :Variable arguments: Forwarded to the contructor of the view. - """ - return super().get_view(*args, **kwargs) - @deprecate('This method has been deprecated and is an alias', deprecated_in='2.0', removed_in='4.0', @@ -3002,10 +3076,35 @@ class TraceBase(_InternalTraceBase): @property @abc.abstractmethod def ana(self): + """ + Allows calling an analysis method on the trace, sharing the dataframe cache. + + **Example** + + Call lisa.analysis.LoadTrackingAnalysis.df_task_signal() on a trace:: + + df = trace.ana.load_tracking.df_task_signal(task='foo', signal='util') + + The ``trace.ana`` proxy can also be called like a function to define default + values for analysis methods:: + + ana = trace.ana(task='big_0-3') + ana.load_tracking.df_task_signal(signal='util') + + # Equivalent to: + ana.load_tracking.df_task_signal(task='big_0-3', signal='util') + + # The proxy can be called again to override the value given to some + # parameters, and the the value can also be overridden when calling the + # method: + ana(task='foo').df_task_signal(signal='util') + ana.df_task_signal(task='foo', signal='util') + """ pass @property @abc.abstractmethod + @deprecate(replaced_by=ana, deprecated_in='3.0', removed_in='4.0') def analysis(self): pass @@ -3110,9 +3209,18 @@ class TraceBase(_InternalTraceBase): return self.ana.tasks.task_ids -class _TraceViewBase(_InternalTraceBase): +class _TraceViewBase( + DelegateToAttr( + 'base_trace', + [_InternalTraceBase], + ), + _InternalTraceBase +): def __init__(self, trace): self.base_trace = trace + """ + The original :class:`TraceBase` this view is based on. + """ super().__init__() def __enter__(self): @@ -3122,11 +3230,8 @@ class _TraceViewBase(_InternalTraceBase): def __exit__(self, *args): return self.base_trace.__exit__(*args) - def __getattr__(self, name): - return delegate_getattr(self, 'base_trace', name) - @classmethod - def make_view(cls, trace, *, window=None, signals=None, compress_signals_init=None, normalize_time=False, events_namespaces=None, events=None, strict_events=False, process_df=None, df_fmt=None, clear_base_cache=None): + def _make_view(cls, trace, *, window=None, signals=None, compress_signals_init=None, normalize_time=False, events_namespaces=None, events=None, strict_events=False, process_df=None, df_fmt=None, clear_base_cache=None): if clear_base_cache is not None: _deprecated_warn(f'"clear_base_cache" parameter has no effect anymore') @@ -3173,8 +3278,33 @@ class _TraceViewBase(_InternalTraceBase): def _internal_df_event(self, *args, **kwargs): return self.base_trace._internal_df_event(*args, **kwargs) + @property + def basetime(self): + return self.base_trace.basetime + + @property + def endtime(self): + return self.base_trace.endtime + + @property + def start(self): + return self.base_trace.start + + @property + def end(self): + return self.base_trace.end + + +class _WindowTraceViewBase(_TraceViewBase, abc.ABC): + @property + @abc.abstractmethod + def normalize_time(self): + """ + ``True`` if the trace timestamps were normalized to start at ``0``. + """ + -class _WindowTraceView(_TraceViewBase): +class _WindowTraceView(_WindowTraceViewBase): """ A view on a :class:`Trace`. @@ -3189,14 +3319,6 @@ class _WindowTraceView(_TraceViewBase): returned by :meth:`TraceBase.df_event`. :type process_df: typing.Callable[[str, pandas.DataFrame], pandas.DataFrame] or None - :Attributes: - * ``base_trace``: The original :class`:`Trace` this view is based on. - * ``ana``: The analysis proxy on the trimmed down :class`:`Trace`. - * ``start``: The timestamp of the first trace event in the view (>= - ``window[0]``) - * ``end``: The timestamp of the last trace event in the view (<= - ``window[1]``) - You can substitute an instance of :class:`Trace` with an instance of :class:`_WindowTraceView`. This means you can create a view of a trimmed down trace and run analysis code/plots that will only use data within that window, e.g.:: @@ -3245,6 +3367,9 @@ class _WindowTraceView(_TraceViewBase): @property @memoized def start(self): + """ + The timestamp of the first trace event in the view (>= :attr:`start`) + """ t_min, _ = self._window or (None, None) if t_min is None: return self.base_trace.start @@ -3254,6 +3379,9 @@ class _WindowTraceView(_TraceViewBase): @property @memoized def end(self): + """ + The timestamp of the last trace event in the view (<= :attr:`end`) + """ _, t_max = self._window or (None, None) if t_max is None: return self.base_trace.end @@ -3264,6 +3392,10 @@ class _WindowTraceView(_TraceViewBase): end = max(end, self.start) return end + @property + def normalize_time(self): + return False + def _fixup_window(self, window): _start = self.start _end = self.end @@ -3472,6 +3604,9 @@ class _PreloadEventsTraceView(_TraceViewBase): @property def events(self): + """ + Preloaded events as a :class:`TraceEventCheckerBase`. + """ try: base_events = self.base_trace.events except AttributeError: @@ -3534,6 +3669,9 @@ class _NamespaceTraceView(_TraceViewBase): @property @memoized def events_namespaces(self): + """ + Namespaces evens will be looked up in. + """ try: base_namespaces = self.base_trace.events_namespaces except AttributeError: @@ -3633,7 +3771,7 @@ class _TimeOffsetter(_CacheDataDescEncodable): ) -class _NormalizedTimeTraceView(_TraceViewBase): +class _NormalizedTimeTraceView(_WindowTraceViewBase): def __init__(self, trace, window, **kwargs): window = window or (trace.start, None) try: @@ -3651,9 +3789,6 @@ class _NormalizedTimeTraceView(_TraceViewBase): view = view.get_view(**kwargs) super().__init__(view) - self.start = 0 - self.end = self.base_trace.end - start - @classmethod def _with_time_offset(cls, trace, start): # Round down to avoid ending up with negative Time for anything that @@ -3672,6 +3807,14 @@ class _NormalizedTimeTraceView(_TraceViewBase): def endtime(self): return self.base_trace.endtime - self._offset + @property + def start(self): + return 0 + + @property + def end(self): + return self.base_trace.end - self._offset + @property def trace_state(self): return ( @@ -3766,10 +3909,6 @@ class _CacheDataDesc(Mapping): .. note:: Once introduced in a container, instances must not be modified, directly or indirectly. - - :Attributes: - * ``normal_form``: Normal form of the descriptor. Equality is - implemented by comparing this attribute. """ def __init__(self, spec, fmt): @@ -3782,6 +3921,10 @@ class _CacheDataDesc(Mapping): self.fmt = fmt self.spec = spec self.normal_form = _CacheDataDescNF.from_spec(self.spec, fmt) + """ + Normal form of the descriptor. Equality is implemented by comparing + this attribute. + """ def __getitem__(self, key): return self.spec[key] @@ -4418,7 +4561,7 @@ class _TraceCache(Loggable): to_parquet() else: try: - plan = data.serialize(format='json') + plan = _df_json_serialize(data) # We failed to serialize the logical plan. This could happen # because it contains references to UDF (e.g. a lambda passed # to Expr.map_elements()) @@ -4500,7 +4643,7 @@ class _TraceCache(Loggable): ) plan = json.dumps(plan) plan = io.StringIO(plan) - data = pl.LazyFrame.deserialize(plan, format='json') + data = _df_json_deserialize(plan) data = _LazyFrameOnDelete.attach_file_cleanup(data, hardlinks) else: raise ValueError(f'File format not supported "{fmt}" at path: {path}') @@ -4944,62 +5087,12 @@ class _TraceCache(Loggable): } -class _TraceProxy(TraceBase): - class _TraceNotSet: - def __getattribute__(self, attr): - raise RuntimeError('The trace instance can only be used after the end of the "with" statement.') - - def __enter__(self): - return self - - def __exit__(self, *args): - pass - - def __init__(self, path): - self.__base_trace = self._TraceNotSet() - self.__path = path - self.__deallocator = _Deallocator( - # Delete the file once we are done accessing it - f=functools.partial(_file_cleanup, paths=[path]), - on_del=True, - at_exit=True, - ) - - def __getattr__(self, attr): - return delegate_getattr(self, '_TraceProxy__base_trace', attr) - - def _set_trace(self, trace): - self.__base_trace = trace - - def __enter__(self): - self.__base_trace.__enter__() - return self - - def __exit__(self, *args): - try: - return self.__base_trace.__exit__(*args) - finally: - self.__deallocator.run() - - @property - def ana(self): - return self.__base_trace.ana - - @property - def analysis(self): - return self.__base_trace.analysis - - def df_event(self, *args, **kwargs): - return self.__base_trace.df_event(*args, **kwargs) - - def _internal_df_event(self, *args, **kwargs): - return self.__base_trace._internal_df_event(*args, **kwargs) - - def _preload_events(self, *args, **kwargs): - return self.__base_trace._preload_events(*args, **kwargs) - - class _Trace(Loggable, _InternalTraceBase): + """ + Object at the bottom of a :class:`_TraceViewBase` stack. + + It drives the actual trace parser and caches. + """ def _select_userspace(self, source_event, meta_event, df): # pylint: disable=unused-argument,no-self-use @@ -5554,6 +5647,12 @@ class _Trace(Loggable, _InternalTraceBase): @property @memoized def cpus_count(self): + """ + Number of CPUs on which data was gathered in that trace. + + This will typically be the number of CPUs on the target, but might + sometimes differ depending on the file format of the trace. + """ try: return self.plat_info['cpus-count'] except KeyError: @@ -5583,8 +5682,6 @@ class _Trace(Loggable, _InternalTraceBase): return count - - def _get_parser(self, events=tuple(), needed_metadata=None): cache = self._cache path = self.trace_path @@ -5637,21 +5734,20 @@ class _Trace(Loggable, _InternalTraceBase): @property def basetime(self): - """ - First absolute timestamp available in the trace. - """ return self._get_time_range()[0] @property def endtime(self): - """ - Timestamp of when the tracing stopped. - - .. note:: With some parsers, that might be the timestamp of the last - recorded event instead if the trace end timestamp was not recorded. - """ return self._get_time_range()[1] + @property + def start(self): + return self.basetime + + @property + def end(self): + return self.endtime + @memoized def _get_time_range(self, parser=None): return self._get_metadata('time-range', parser=parser) @@ -6007,16 +6103,14 @@ class _Trace(Loggable, _InternalTraceBase): else: return True - @property - def start(self): - return self.basetime - @property - def end(self): - return self.endtime - - -class Trace(TraceBase): +class Trace( + DelegateToAttr( + '_Trace__view', + [_InternalTraceBase], + ), + TraceBase, +): """ This class provides a way to access event dataframes and ties together various low-level moving pieces to make that happen. @@ -6089,17 +6183,6 @@ class Trace(TraceBase): the max size is the size of the trace file. :type max_swap_size: int or None - :Attributes: - * ``start``: The timestamp of the first trace event in the trace - * ``end``: The timestamp of the last trace event in the trace - * ``time_range``: Maximum timespan for all collected events - * ``window``: Conveniency tuple of ``(start, end)``. - * ``available_events``: Events available in the parsed trace, exposed - as some kind of set-ish smart container. Querying for event might - trigger the parsing of it. - * ``ana``: The analysis proxy used as an entry point to run analysis - methods on the trace. See :class:`lisa.analysis._proxy.AnalysisProxy`. - :Supporting more events in text parsers: .. note:: ``trace.dat`` parser can now fully infer the dataframe schema @@ -6110,20 +6193,20 @@ class Trace(TraceBase): event format, but there may be a number of reasons to pass a custom event parser: - * The event format produced by a given kernel differs from the - description bundled with the parser, leading to incorrect parse - (missing field). + * The event format produced by a given kernel differs from the + description bundled with the parser, leading to incorrect parse + (missing field). - * The event cannot be parsed in raw format in case text output of - ``trace-cmd`` is used, because of a ``const char*`` field displayed - as a pointer for example. + * The event cannot be parsed in raw format in case text output of + ``trace-cmd`` is used, because of a ``const char*`` field displayed + as a pointer for example. - .. seealso:: For events not following the regular field syntax, - use :class:`CustomFieldsTxtEventParser` + .. seealso:: For events not following the regular field syntax, + use :class:`CustomFieldsTxtEventParser` - * Automatic detection can take a heavy performance toll. This is - why parsers needing descriptions will come with pre-defined - descritption of most used events. + * Automatic detection can take a heavy performance toll. This is + why parsers needing descriptions will come with pre-defined + descritption of most used events. Custom event parsers can be passed as extra parameters to the parser, which can be set manually:: @@ -6245,6 +6328,7 @@ class Trace(TraceBase): @property @memoized + @deprecate(replaced_by=ana, deprecated_in='3.0', removed_in='4.0') def analysis(self): # Import here to avoid a circular dependency issue at import time # with lisa.analysis.base @@ -6266,9 +6350,6 @@ class Trace(TraceBase): df_fmt=df_fmt ) - def __getattr__(self, attr): - return delegate_getattr(self, '_Trace__view', attr) - @property def trace_state(self): return ( @@ -6290,12 +6371,6 @@ class Trace(TraceBase): df = _df_to(df, index='Time', fmt=df_fmt) return df - def _internal_df_event(self, *args, **kwargs): - return self.__view._internal_df_event(*args, **kwargs) - - def _preload_events(self, *args, **kwargs): - return self.__view._preload_events(*args, **kwargs) - @classmethod @contextlib.contextmanager def from_target(cls, target, events=None, buffer_size=10240, filepath=None, **kwargs): @@ -6368,6 +6443,103 @@ class Trace(TraceBase): def get_event_sources(cls, *args, **kwargs): return _Trace.get_event_sources(*args, **kwargs) + def _internal_df_event(self, *args, **kwargs): + return self.__view._internal_df_event(*args, **kwargs) + + def _preload_events(self, *args, **kwargs): + return self.__view._preload_events(*args, **kwargs) + + @property + def basetime(self): + return self.__view.basetime + + @property + def endtime(self): + return self.__view.endtime + + @property + def start(self): + return self.__view.start + + @property + def end(self): + return self.__view.end + + +class _TraceProxy( + DelegateToAttr( + '_TraceProxy__base_trace', + [Trace], + ), + TraceBase, +): + class _TraceNotSet: + def __getattribute__(self, attr): + raise RuntimeError('The trace instance can only be used after the end of the "with" statement.') + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def __init__(self, path): + self.__base_trace = self._TraceNotSet() + self.__path = path + self.__deallocator = _Deallocator( + # Delete the file once we are done accessing it + f=functools.partial(_file_cleanup, paths=[path]), + on_del=True, + at_exit=True, + ) + + def _set_trace(self, trace): + self.__base_trace = trace + + def __enter__(self): + self.__base_trace.__enter__() + return self + + def __exit__(self, *args): + try: + return self.__base_trace.__exit__(*args) + finally: + self.__deallocator.run() + + @property + def ana(self): + return self.__base_trace.ana + + @property + @deprecate(replaced_by=ana, deprecated_in='3.0', removed_in='4.0') + def analysis(self): + return self.__base_trace.analysis + + def df_event(self, *args, **kwargs): + return self.__base_trace.df_event(*args, **kwargs) + + def _internal_df_event(self, *args, **kwargs): + return self.__base_trace._internal_df_event(*args, **kwargs) + + def _preload_events(self, *args, **kwargs): + return self.__base_trace._preload_events(*args, **kwargs) + + @property + def basetime(self): + return self.__base_trace.basetime + + @property + def endtime(self): + return self.__base_trace.endtime + + @property + def start(self): + return self.__base_trace.start + + @property + def end(self): + return self.__base_trace.end + class TraceEventCheckerBase(abc.ABC, Loggable, Sequence): """ @@ -6481,7 +6653,7 @@ class TraceEventCheckerBase(abc.ABC, Loggable, Sequence): If some event requirements have already been defined for it (it has a `used_events` attribute, i.e. it has already been decorated), these will be combined with the new requirements using an - :class`AndTraceEventChecker`. + :class:`AndTraceEventChecker`. """ def unwrap_down_to(obj): return hasattr(obj, 'used_events') @@ -7113,7 +7285,7 @@ class FtraceConf(SimpleMultiSrcConf, HideExekallID): return self.add_src(src, conf=merged, **kwargs) -class CollectorBase(Loggable): +class CollectorBase(DelegateToAttr('_collector'), Loggable): """ Base class for :class:`devlib.collector.CollectorBase`-based collectors using composition. @@ -7143,9 +7315,6 @@ class CollectorBase(Loggable): def _install_tools(self, target): target.install_tools(self.TOOLS) - def __getattr__(self, attr): - return delegate_getattr(self, '_collector', attr) - def __enter__(self): self._collector.__enter__() return self diff --git a/lisa/utils.py b/lisa/utils.py index 5a4780395ff377e89d7dceb4f8c56b172619a30f..22969d3778f01c18ee6f85439b59125b2a48bfb4 100644 --- a/lisa/utils.py +++ b/lisa/utils.py @@ -34,7 +34,7 @@ import abc import copy import collections from collections.abc import Mapping, Iterable, Hashable -from collections import OrderedDict +from collections import OrderedDict, ChainMap import contextlib import inspect import io @@ -65,6 +65,8 @@ import platform import subprocess import multiprocessing import urllib.request +import builtins +import typing import ruamel.yaml from ruamel.yaml import YAML @@ -137,7 +139,7 @@ del _get_abi TASK_COMM_MAX_LEN = 16 - 1 -""" +r""" Value of ``TASK_COMM_LEN - 1`` macro in the kernel, to account for ``\0`` terminator. """ @@ -233,13 +235,13 @@ class instancemethod: """ Decorator providing a hybrid of a normal method and a classmethod: - * Like a classmethod, it can be looked up on the class itself, and the - class is passed as first parameter. This allows selecting the class - "manually" before applying on an instance. + * Like a classmethod, it can be looked up on the class itself, and the + class is passed as first parameter. This allows selecting the class + "manually" before applying on an instance. - * Like a normal method, it can be looked up on an instance. In that - case, the first parameter is the class of the instance and the second - parameter is the instance itself. + * Like a normal method, it can be looked up on an instance. In that + case, the first parameter is the class of the instance and the second + parameter is the instance itself. """ def __init__(self, f): self.__wrapped__ = classmethod(f) @@ -303,14 +305,11 @@ class Loggable: @classmethod def get_logger(cls, suffix=None): - cls_name = cls.__name__ - module = inspect.getmodule(cls) - if module: - name = module.__name__ + '.' + cls_name - else: - name = cls_name - if suffix: - name += '.' + suffix + """ + Provides a :class:`logging.Logger` named after ``cls``. + """ + suffix = f'.{suffix}' if suffix else '' + name = f'{cls.__module__}.{cls.__qualname__}{suffix}' logger = logging.getLogger(name) return _WrappedLogger(logger) @@ -475,7 +474,7 @@ class mappable: return map(self.__wrapped__, other) -def get_subclasses(cls, only_leaves=False, cls_set=None): +def get_subclasses(cls, only_leaves=False, cls_set=None, mro_order=False): """Get all indirect subclasses of the class.""" if cls_set is None: cls_set = set() @@ -491,41 +490,173 @@ def get_subclasses(cls, only_leaves=False, cls_set=None): } cls_set.update(to_be_added) - return cls_set + if mro_order: + return _make_mro(cls_set) + else: + return cls_set -def get_cls_name(cls, style=None, fully_qualified=True): +def _is_typing_hint(obj): """ - Get a prettily-formated name for the class given as parameter + Heuristic to check if a given ``obj`` is a typing hint or anything else. + This function will return ``False`` for classes. + + .. warning:: Since there is currently no way to identify hints for sure, + the check might return ``False`` even if it is a hint. + """ + module = getattr(obj, '__module__', None) + + # This is a class, so cannot be a hint. + if isinstance(obj, type): + return False + elif module in ('typing', 'typing_extensions'): + return typing.get_origin(obj) is not None + else: + return False - :param cls: Class or typing hint to get the name from. - :type cls: type + +def get_obj_name(obj, style=None, fully_qualified=True, abbrev=False, name=None): + """ + Get a prettily-formated name for the object given as parameter + + :param obj: Class or module or instance or typing hint to get the name from. + :type obj: object :param style: When "rst", a RestructuredText snippet is returned :param style: str + :param abbrev: If ``True``, a short name will be used. + :type abbrev: bool + + :param name: Fully qualified name of the object. It will be used to provide + better reST role inference in some cases. + :type name: str or None """ - if cls is None: - return 'None' - else: - try: - qualname = cls.__qualname__ - # type annotations like typing.Union[str, int] do not have a __qualname__ - except AttributeError: - name = str(cls) + role = get_sphinx_role(obj, name=name) + + def get(obj): + if inspect.isroutine(obj): + try: + proxy = obj.__func__ + except AttributeError: + if isinstance(getattr(obj, '__self__', None), type): + proxy = obj.__self__ + else: + proxy = obj + + try: + name = proxy.__qualname__ + # Some user-defined callable (__call__ protocol) do not have a + # __qualname__ + except AttributeError: + name = '.'.join( + proxy.__call__.__qualname__.split('.')[:-1] + ) + assert name + + mod = inspect.getmodule(proxy) + elif isinstance(obj, property): + proxy = obj.fget + name = proxy.__qualname__ + mod = inspect.getmodule(proxy) + elif obj is None: + mod = None + name = 'None' + elif _is_typing_hint(obj): + name = str(obj) + assert obj.__module__ == 'typing' + name = name[len('typing.'):] + mod = typing else: - if fully_qualified or style == 'rst': - mod_name = inspect.getmodule(cls).__name__ - mod_name = mod_name + '.' if mod_name not in ('builtins', '__main__') else '' + mod = inspect.getmodule(obj) + # For modules, getmodule() returns the module itself, not its parent + mod = None if mod is obj else mod + + try: + name = obj.__qualname__ + except AttributeError: + # Some objects like modules don't have a __qualname__ but do have a name + try: + name = obj.__name__ + except AttributeError: + raise ValueError(f'Could not determine the name of object: {obj}') + + return (mod, name) + + _obj = obj + while True: + try: + mod, name = get(_obj) + except ValueError as e: + try: + _obj = _obj.__wrapped__ + except AttributeError: + raise e else: - mod_name = '' + continue + else: + break - name = mod_name + cls.__qualname__ + mod_name = '' + if fully_qualified or style == 'rst': + mod_name = mod.__name__ if mod is not None else None + mod_name = f'{mod_name}.' if mod_name not in (None, 'builtins', '__main__') else '' - if style == 'rst': - name = f':class:`~{name}`' + if style == 'rst': + name = f'{mod_name}{name}' + abbrev = '~' if abbrev and role != 'code' else '' + name = f':{role}:`{abbrev}{name}`' + else: + name = name if abbrev else f'{mod_name}{name}' + + return name - return name + +def get_parent_namespace(obj): + """ + Return the enclosing namespace of ``obj`` (a class or a module). + """ + fullname = get_obj_name(obj) + return _get_parent_namespace(fullname) + + +def _get_parent_namespaces(fullname): + def _walk_parent_names(compos): + """ + Turns "a.b.c" into [["a", "b", "c"], ["a", "b"], ["a"]] + """ + return list(reversed([ + '.'.join(x) + for x in itertools.accumulate( + compos, + lambda x, y: [*x, y], + initial=[], + ) + if x + ])) + + def gen(): + compos = fullname.split('.') + if any(compo == '' for compo in compos): + raise ValueError(f'Cannot resolve the parent namespace of an item located inside a function: {fullname}') + else: + for _name in _walk_parent_names(compos)[1:]: + parent = resolve_dotted_name(_name) + if inspect.ismodule(parent) or isinstance(parent, type): + yield (_name, parent) + + + return list(gen()) + + +def _get_parent_namespace(fullname): + parents = _get_parent_namespaces(fullname) + try: + (name, ns), *_ = parents + except ValueError: + return None + else: + return ns def get_common_ancestor(classes): @@ -678,12 +809,56 @@ def _lru_memoized(first_param_maxsize, other_params_maxsize, sig_f): return decorator -def resolve_dotted_name(name): - """Only resolve names where __qualname__ == __name__, i.e the callable is a - module-level name.""" - mod_name, callable_name = name.rsplit('.', 1) - mod = importlib.import_module(mod_name) - return getattr(mod, callable_name) +def resolve_dotted_name(name, getattr=getattr): + """ + Resolve a dotted name, importing all modules necessary. + """ + + def resolve(name): + first, *compos = name.split('.') + + try: + obj = importlib.import_module(first) + except ImportError as e: + try: + return resolve(f'builtins.{name}') + except AttributeError: + raise e + else: + visited = [first] + for compo in compos: + visited.append(compo) + try: + importlib.import_module('.'.join(visited)) + except ImportError: + pass + obj = getattr(obj, compo) + + return obj + + # Attempt a straightforward resolution first, as get_type_hints() will not + # resolve e.g. modules. + try: + return resolve(name) + except Exception: + # Resolve type hints like "Dict[str, int]" + if '[' in name: + # Ensure all necessary modules are imported by resolving everything that + # looks like a fully qualified name. + items = re.findall(r'[a-zA-Z0-9_.]+', name) + for item in items: + try: + resolve(item) + except Exception: + pass + + # Piggy back on get_type_hints() so that it will resolve typing annotations + # as well. + def f(x: name): + pass + return typing.get_type_hints(f, globalns=sys.modules, localns={})['x'] + else: + raise def import_all_submodules(pkg, best_effort=False): @@ -697,19 +872,25 @@ def import_all_submodules(pkg, best_effort=False): imported will be silently skipped. :type best_effort: bool """ - return _import_all_submodules(pkg.__name__, pkg.__path__, best_effort) + try: + paths = pkg.__path__ + except AttributeError: + return pkg + else: + return _import_all_submodules(pkg.__name__, pkg.__path__, best_effort) def _import_all_submodules(pkg_name, pkg_path, best_effort=False): modules = [] - for _, module_name, _ in ( - pkgutil.walk_packages(pkg_path, prefix=pkg_name + '.') - ): + # Silence warnings if we hit some deprecated modules + with warnings.catch_warnings(): + warnings.simplefilter(action='ignore') + + for _, module_name, _ in ( + pkgutil.walk_packages(pkg_path, prefix=pkg_name + '.') + ): try: - # Silence warnings if we hit some deprecated modules - with warnings.catch_warnings(): - warnings.simplefilter(action='ignore') - module = importlib.import_module(module_name) + module = importlib.import_module(module_name) except ImportError: if best_effort: pass @@ -732,7 +913,7 @@ class UnknownTagPlaceholder: def docstring_update(msg): - """ + r""" Create a class to inherit from in order to add a snippet of doc at the end of the docstring of all direct and indirect subclasses:: @@ -759,68 +940,68 @@ class Serializable( The following YAML tags are supported on top of what YAML provides out of the box: - * ``!call``: call a Python callable with a mapping of arguments: + * ``!call``: call a Python callable with a mapping of arguments: - .. code-block:: yaml + .. code-block:: yaml - # will execute: - # package.module.Class(arg1='foo', arg2='bar', arg3=42) - # NB: there is no space after "call:" - !call:package.module.Class - arg1: foo - arg2: bar - arg3: 42 + # will execute: + # package.module.Class(arg1='foo', arg2='bar', arg3=42) + # NB: there is no space after "call:" + !call:package.module.Class + arg1: foo + arg2: bar + arg3: 42 - * ``!include``: include the content of another YAML file. Environment - variables are expanded in the given path: + * ``!include``: include the content of another YAML file. Environment + variables are expanded in the given path: - .. code-block:: yaml + .. code-block:: yaml - !include /foo/$ENV_VAR/bar.yml + !include /foo/$ENV_VAR/bar.yml - Relative paths are treated as relative to the file in which the - ``!include`` tag appears. + Relative paths are treated as relative to the file in which the + ``!include`` tag appears. - * ``!include-untrusted``: Similar to ``!include`` but will disable - custom tag interpretation when loading the content of the file. This - is suitable to load untrusted input. Note that the env var - interpolation and the relative path behavior depends on the mode of - the YAML parser. This means that the path itself must be trusted, as - this could leak environment variable values. Only the content of the - included file is treated as untrusted. + * ``!include-untrusted``: Similar to ``!include`` but will disable + custom tag interpretation when loading the content of the file. This + is suitable to load untrusted input. Note that the env var + interpolation and the relative path behavior depends on the mode of + the YAML parser. This means that the path itself must be trusted, as + this could leak environment variable values. Only the content of the + included file is treated as untrusted. - * ``!env``: take the value of an environment variable, and convert - it to a Python type: + * ``!env``: take the value of an environment variable, and convert + it to a Python type: - .. code-block:: yaml + .. code-block:: yaml - !env:int MY_ENV_VAR + !env:int MY_ENV_VAR - If `interpolate` is used as type, the value will be interpolated - using :func:`os.path.expandvars` and the resulting string - returned: + If `interpolate` is used as type, the value will be interpolated + using :func:`os.path.expandvars` and the resulting string + returned: - .. code-block:: yaml + .. code-block:: yaml - !env:interpolate /foo/$MY_ENV_VAR/bar + !env:interpolate /foo/$MY_ENV_VAR/bar - * ``!var``: reference a module-level variable: + * ``!var``: reference a module-level variable: - .. code-block:: yaml + .. code-block:: yaml - !var package.module.var + !var package.module.var - * ``!untrusted``: Interpret the given string as a YAML snippet, without - any of the special constructor being enabled. This provides a way - of safely including untrusted input in the YAML document without - running the risk of the user being able to use e.g. ``!call``. + * ``!untrusted``: Interpret the given string as a YAML snippet, without + any of the special constructor being enabled. This provides a way + of safely including untrusted input in the YAML document without + running the risk of the user being able to use e.g. ``!call``. - .. code-block:: yaml + .. code-block:: yaml - # Note the "|": this allows having a multiline string, leaving - # its interpretation to the untrusted loader. - !untrusted | - foo: bar + # Note the "|": this allows having a multiline string, leaving + # its interpretation to the untrusted loader. + !untrusted | + foo: bar .. note:: Not to be used on its own - instead, your class should inherit from this class to gain serialization superpowers. @@ -830,6 +1011,11 @@ class Serializable( 'ignored': [], 'placeholders': {}, } + """ + Attributes to be treated specially during serialization. + + .. seealso:: :meth:`Serializable.__getstate__` + """ YAML_ENCODING = 'utf-8' "Encoding used for YAML files" @@ -1085,18 +1271,18 @@ class Serializable( """ Filter the instance's attributes upon serialization. - The following keys in :attr:`ATTRIBUTES_SERIALIZATION` can be used to customize the serialized - content: + The following keys in :attr:`ATTRIBUTES_SERIALIZATION` can be used to + customize the serialized content: - * ``allowed``: list of attribute names to serialize. All other - attributes will be ignored and will not be saved/restored. + * ``allowed``: list of attribute names to serialize. All other + attributes will be ignored and will not be saved/restored. - * ``ignored``: list of attribute names to not serialize. All other - attributes will be saved/restored. + * ``ignored``: list of attribute names to not serialize. All other + attributes will be saved/restored. - * ``placeholders``: Map of attribute names to placeholder values. - These attributes will not be serialized, and the placeholder - value will be used upon restoration. + * ``placeholders``: Map of attribute names to placeholder values. + These attributes will not be serialized, and the placeholder + value will be used upon restoration. If both ``allowed`` and ``ignored`` are specified, ``ignored`` is ignored. @@ -1126,8 +1312,7 @@ class Serializable( def __copy__(self): """ - Make sure that copying the class still works as usual, without - dropping some attributes by defining __copy__ + Regular shallow copy operation, without dropping any attributes. """ try: return super().__copy__() @@ -1144,7 +1329,7 @@ def setup_logging(filepath='logging.conf', level=None): :param filepath: the relative or absolute path of the logging configuration to use. Relative path uses - :attr:`lisa.utils.LISA_HOME` as base folder. + :data:`lisa.utils.LISA_HOME` as base folder. :type filepath: str :param level: Override the conf file and force logging level. Defaults to @@ -1536,12 +1721,17 @@ def is_monotonic(iterable, decreasing=False): return True -def fixedpoint(f, init, limit=None): +def fixedpoint(f, init, limit=None, raise_=True): """ Find the fixed point of a function ``f`` with the initial parameter ``init``. :param limit: If provided, set a limit on the number of iterations. :type limit: int or None + + :param raise_: If ``True``, will raise a :exc:`ValueError` when ``limit`` + iterations is reached without finding a fixed point. Otherwise, simply + return the current value. + :type raise_: bool """ if limit is None: iterable = itertools.count() @@ -1556,7 +1746,10 @@ def fixedpoint(f, init, limit=None): else: prev = new - raise ValueError('Could not find a fixed point') + if raise_: + raise ValueError('Could not find a fixed point') + else: + return prev def get_common_prefix(*iterables): @@ -1632,10 +1825,9 @@ def unzip_into(n, iterator): orig_a = [1, 3] orig_b = [2, 4] - a, b = unzip(zip(orig_a, orig_b)) - assert a == orig_a - assert b == orig_b - + a, b = unzip_into(2, zip(orig_a, orig_b)) + assert list(a) == list(orig_a) + assert list(b) == list(orig_b) .. note:: ``n`` is needed in order to handle properly the case where an empty iterator is passed. @@ -1793,7 +1985,7 @@ def is_running_sphinx(): Returns True if the module is imported when Sphinx is running, False otherwise. """ - return 'sphinx' in sys.modules + return bool(int(os.environ.get('_LISA_DOC_SPHINX_RUNNING', '0'))) def is_running_ipython(): @@ -1842,7 +2034,7 @@ def non_recursive_property(f): return property(wrapper) -def get_short_doc(obj, strip_rst=False): +def get_short_doc(obj, strip_rst=False, style=None): """ Get the short documentation paragraph at the beginning of docstrings. @@ -1853,11 +2045,23 @@ def get_short_doc(obj, strip_rst=False): if docstring: docstring = split_paragraphs(docstring)[0] docstring = ' '.join(docstring.splitlines()) - if not docstring.endswith('.'): - docstring += '.' else: docstring = '' + docstring = docstring.strip() + + if docstring and not docstring.endswith('.'): + docstring += '.' + + # Remove :meta ...: info field list, e.g. :meta public:, which we never + # want + docstring = re.sub( + r'^\s*:\s*meta.*$\n?', + '', + docstring, + re.MULTILINE, + ) + if strip_rst: # Remove basic reStructuredText markup docstring = re.sub( @@ -1989,11 +2193,11 @@ def kwargs_forwarded_to(f, ignore=None): The signature is modified in the following way: - * Variable keyword parameters are removed - * All the parameters that ``f`` take are added as keyword-only in the - decorated function's signature, under the assumption that - ``**kwargs`` in the decorated function is used to relay the - parameters to ``f``. + * Variable keyword parameters are removed + * All the parameters that ``f`` take are added as keyword-only in the + decorated function's signature, under the assumption that + ``**kwargs`` in the decorated function is used to relay the + parameters to ``f``. **Example**:: @@ -2147,7 +2351,7 @@ def update_wrapper_doc(func, added_by=None, sig_from=None, description=None, rem if added_by: if callable(added_by): - added_by_ = get_sphinx_name(added_by, style='rst') + added_by_ = get_obj_name(added_by, style='rst') else: added_by_ = added_by @@ -2538,9 +2742,12 @@ def kwargs_dispatcher(f_map, ignore=None, allow_overlap=True): return decorator -DEPRECATED_MAP = {} +_DEPRECATED_MAP = {} """ Global dictionary of deprecated classes, functions and so on. + +.. warning:: This is updated by :func:`deprecate`, so the content will evolve + as modules get imported. """ @@ -2600,7 +2807,12 @@ def deprecate(msg=None, replaced_by=None, deprecated_in=None, removed_in=None, p with contextlib.suppress(Exception): doc_url = f' (see: {get_doc_url(replaced_by)})' - replacement_msg = f', use {get_sphinx_name(replaced_by, style=style)} instead{doc_url}' + if isinstance(replaced_by, str): + _replaced_by = str(replaced_by) + else: + _replaced_by = get_obj_name(replaced_by, style=style) + + replacement_msg = f', use {_replaced_by} instead{doc_url}' else: replacement_msg = '' @@ -2609,7 +2821,7 @@ def deprecate(msg=None, replaced_by=None, deprecated_in=None, removed_in=None, p else: removal_msg = '' - name = get_sphinx_name(deprecated_obj, style=style, abbrev=True) + name = get_obj_name(deprecated_obj, style=style, abbrev=True) if parameter: if style == 'rst': parameter = f'``{parameter}``' @@ -2630,7 +2842,7 @@ def deprecate(msg=None, replaced_by=None, deprecated_in=None, removed_in=None, p ) def decorator(obj): - obj_name = get_sphinx_name(obj) + obj_name = get_obj_name(obj) if removed_in and current_version >= removed_in: raise DeprecationWarning(f'{obj_name} was marked as being removed in version {format_version(removed_in)} but is still present in current version {format_version(current_version)}') @@ -2706,11 +2918,9 @@ def deprecate(msg=None, replaced_by=None, deprecated_in=None, removed_in=None, p extra_doc = textwrap.dedent( """ - .. attention:: + .. deprecated:: {deprecated_in} - .. deprecated:: {deprecated_in} - - {msg} + {msg} """.format( deprecated_in=deprecated_in if deprecated_in else '', # The documentation already creates references to the replacement, @@ -2755,7 +2965,7 @@ def deprecate(msg=None, replaced_by=None, deprecated_in=None, removed_in=None, p # object, so that what the rest of the world will see is consistent # with the 'obj' key if register_deprecated_map: - DEPRECATED_MAP[obj_name] = { + _DEPRECATED_MAP[obj_name] = { 'obj': return_obj, 'replaced_by': replaced_by, 'msg': msg, @@ -3041,7 +3251,8 @@ class ExekallTaggable: @abc.abstractmethod def get_tags(self): """ - :return: Dictionary of tags and tag values + Dictionary of tags and tag values. + :rtype: dict(str, object) """ return {} @@ -3220,63 +3431,48 @@ def checksum(file_, method): return result() - -def get_sphinx_role(obj): +def get_sphinx_role(obj, name=None): """ Return the reStructuredText Sphinx role of a given object. """ - if isinstance(obj, type): - return 'class' - elif callable(obj): - if '' in obj.__qualname__: + def get(obj): + if isinstance(obj, type): + return 'class' + elif _is_typing_hint(obj): return 'code' - elif '.' in obj.__qualname__: - return 'meth' + elif inspect.ismodule(obj): + return 'mod' + elif inspect.isgetsetdescriptor(obj) or inspect.isdatadescriptor(obj): + return 'attr' + elif callable(obj): + try: + name = obj.__qualname__ + except AttributeError: + return 'func' + else: + if '' in name or '' in name: + return None + elif '.' in name: + return 'meth' + else: + return 'func' else: - return 'func' - else: - return 'code' - -def get_sphinx_name(obj, style=None, abbrev=False): - """ - Get a Sphinx-friendly name of an object. - - :param obj: The object to take the name from - :type obj: object or type - - :param style: If ``rst``, a reStructuredText reference will be returned. - Otherwise a bare name is returned. - :type style: str or None - - :param abbrev: If ``True``, a short name will be used with ``style='rst'``. - :type abbrev: bool - """ - if isinstance(obj, (staticmethod, classmethod)): - obj = obj.__func__ - elif isinstance(obj, property): - obj = obj.fget - - try: - mod = obj.__module__ + '.' - except AttributeError: - mod = '' - - try: - qualname = obj.__qualname__ - except AttributeError: - qualname = str(obj) - - name = mod + qualname + return None - if style == 'rst': - return ':{}:`{}{}{}`'.format( - get_sphinx_role(obj), - '~' if abbrev else '', - mod, qualname - ) - else: - return name + role = get(obj) or get(inspect.unwrap(obj)) + if role is None: + if name is None: + role = 'code' + else: + parent = _get_parent_namespace(name) + if isinstance(parent, type): + role = 'attr' + elif inspect.ismodule(parent): + role = 'data' + else: + role = 'code' + return role def newtype(cls, name, doc=None, module=None): @@ -3501,6 +3697,12 @@ class PartialInit(metaclass=_PartialInitMeta): """ def __init__(self, f): self.f = f + functools.update_wrapper(self, f) + + # Ensure metaprogramming functions like get_obj_name() work properly + def __getattr__(self, attr): + return delegate_getattr(self, 'f', attr) + @classmethod def factory(cls, f): @@ -3693,7 +3895,7 @@ def chain_cm(*fs): This is equivalent to:: @contextlib.contextmanager - def combined(x): + def combined(fs): fs = list(reversed(fs)) with fs[0](x) as y: @@ -3710,7 +3912,7 @@ def chain_cm(*fs): @contextlib.contextmanager def f(a, b): print(f'f a={a} b={b}') - yield a * 2 + yield a + 10 @contextlib.contextmanager def g(x): @@ -3723,8 +3925,8 @@ def chain_cm(*fs): # Would print: # f a=1 b=2 - # g x=2 - # final x=2 + # g x=11 + # final x=11 """ @contextlib.contextmanager @@ -3745,14 +3947,14 @@ class DirCache(Loggable): :param category: Unique name for the cache category. This allows an arbitrary number of categories to be used under - :const:`lisa.utils.LISA_CACHE_HOME`. + :data:`lisa.utils.LISA_CACHE_HOME`. :type category: str :param populate: Callback to populate a new cache entry if none is found. It will be passed the following parameters: - * The key that is being looked up - * The path to populate + * The key that is being looked up + * The path to populate It must return a subfolder of the passed path to populate, or ``None``, which is the same as returning the passed path. @@ -4166,11 +4368,183 @@ def delegate_getattr(x, delegate_to, attr): :param attr: Name of the attribute to lookup. :type attr: str - """ + .. seealso:: :class:`DelegateToAttr` + """ # Prevent infinite recursion by calling the base class __getattr__ # implementation x = super(type(x), x).__getattribute__(delegate_to) - return getattr(x, attr) + + # Allow using delegate_getattr() in __getattribute__ implementation where + # the attribute being lookedup might be the attribute to delegate to. + if attr == delegate_to: + return x + else: + return getattr(x, attr) + + +class _DelegatedBase: + pass + + +def DelegateToAttr(attr, attr_classes=None): + """ + Implement delegation of attribute lookup to attribute named ``attr`` on + instances of the classes specified by ``attr_classes``. + + :param attr_classes: List of classes delegated to. Note that Liskov + substitution is assumed to work, so the documentation will list all + items made available by all subclasses of any class specified here. + This allows specifying e.g. an :class:`abc.ABC` base and let the + documentation reflect what is made available by every possible + implementation. This means that there could be a runtime + :exc:`AttributeError` when accessing some of these attributes, but it + is deemed to be more acceptable than simply not documenting those. + :type attr_classes: list(type) or None + + The documentation will list all the attributes and methods that the class + gains by delegating to the attribute thanks to ``attr_classes``. + + .. seealso:: :func:`delegate_getattr` + """ + + delegated_to = attr + delegated_to_classes = list(attr_classes or []) + + def is_private(name): + return name.startswith('_') + + if delegated_to_classes: + of_type = ' or '.join( + get_obj_name(_cls, style='rst') + for _cls in delegated_to_classes + if not is_private(_cls.__qualname__) + ) + of_type = f' of type {of_type}' if of_type else '' + else: + of_type = '' + + if is_private(delegated_to): + pretty = 'a private attribute' + else: + pretty = f'`self.{delegated_to}`' + pretty = f'{pretty}{of_type}' + + class _DelegatedToAttr(_DelegatedBase): + _ATTRS_DELEGATED_TO_CLASSES = delegated_to_classes + + def __getattr__(self, attr): + try: + return delegate_getattr(self, delegated_to, attr) + except AttributeError as e: + try: + sup = super().__getattr__ + except AttributeError: + raise e + else: + return sup(attr) + + # f-string cannot be used in the docstring syntax, so do it manually. + __getattr__.__doc__ = f'Delegate attribute lookup to {pretty}.' + + @classmethod + def __instance_dir__(cls): + def get_dir(cls): + if cls is None: + return {} + else: + try: + instance_dir = cls.__instance_dir__ + except AttributeError: + return {} + else: + return dict(instance_dir()) + + return dict(ChainMap(*( + get_dir(cls) + for cls in reversed(delegated_to_classes) + ))) + + def __dir__(self): + delegated = getattr(self, delegated_to) + return sorted(set(super().__dir__()) | dir(delegated)) + + return _DelegatedToAttr + + +@deprecate(deprecated_in='3.0', removed_in='4.0', replaced_by=get_obj_name) +def get_cls_name(*args, **kwargs): + return get_obj_name(**args, **kwargs) + + +@deprecate(deprecated_in='3.0', removed_in='4.0', replaced_by=get_obj_name) +def get_sphinx_name(*args, **kwargs): + return get_obj_name(**args, **kwargs) + + +def _make_mro(classes): + def flatten(tree): + def go(tree): + return itertools.chain.from_iterable( + ( + [node[0]] + if isinstance(node, tuple) else + go(node) + ) + for node in tree + ) + return list(reversed(list(go(tree)))) + + classes = sorted(classes, key=attrgetter('__qualname__')) + # Ensure classes appear in inheritance order, so that an MRO can be + # established. + tree = inspect.getclasstree(classes, unique=True) + ordered = flatten(tree) + return ordered + + +def _solve_metaclass_conflict(*bases): + """ + Solve the metaclass conflict by making a metaclass inheriting from all the + metaclasses. + """ + metaclasses = deduplicate( + [ + type(base) + for base in bases + ], + keep_last=False, + ) + ordered = _make_mro(metaclasses) + + class _Meta(*ordered): + pass + + class _Base(*bases, metaclass=_Meta): + pass + + return _Base + + +def ffill(iterator, select=lambda x: x is not None, init=None): + """ + Forward fill an iterator with the last selected value. + + :param iterator: Iterator to fill. + :type iterator: collections.abc.Iterable + + :param select: Select items to preserve (return ``True``) and items to + replace with the last selected value (return ``False``). + :type select: collections.abc.Callable + + :param init: Value to use before the first ``select``-ed item. + :type init: object + """ + curr = init + for x in iterator: + if select(x): + curr = x + + yield curr # vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab diff --git a/lisa/version.py b/lisa/version.py index 979d1cbb18ad1a09c62dda92eec860f2ea525980..b80e22b8214e817c8abdff3298d14484ea48a1a6 100644 --- a/lisa/version.py +++ b/lisa/version.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +:mod:`lisa` version identification. +""" import os import hashlib diff --git a/lisa/wa/__init__.py b/lisa/wa/__init__.py index 72983d2ce3561646f740012d1d5211ee11a044e6..b4e39d54871e750bb26ac3342418144b892d64ad 100644 --- a/lisa/wa/__init__.py +++ b/lisa/wa/__init__.py @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Workload Automation-related helpers. +""" + from collections.abc import Mapping from collections import defaultdict import inspect @@ -141,8 +145,8 @@ class WAOutput(StatsProp, Mapping, Loggable): collected are accessible through a :class:`pandas.DataFrame` in "database" format: - * meaningless index - * all values are tagged using tag columns + * meaningless index + * all values are tagged using tag columns :param path: Path containing a Workload Automation output. :type path: str diff --git a/lisa/wa/plugins/__init__.py b/lisa/wa/plugins/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b9d410055c505fec8461552941acf9a879fd8495 --- /dev/null +++ b/lisa/wa/plugins/__init__.py @@ -0,0 +1,4 @@ +""" +Workload Automation plugins. +""" + diff --git a/lisa/wlgen/__init__.py b/lisa/wlgen/__init__.py index b5edfb655b5f4276f534a0bc709785c5e0597360..c6e2900fc3beb90dc6d731151f4bc3f8cc05e068 100644 --- a/lisa/wlgen/__init__.py +++ b/lisa/wlgen/__init__.py @@ -1 +1,4 @@ +""" +Helpers to programmatically generate target workloads. +""" # vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab diff --git a/lisa/wlgen/rta.py b/lisa/wlgen/rta.py index fadd7f3cfeb257484ec78fff6bbfabdd1389b5b2..71f0a71f0a0bc840ef3678308ecba8c28c182f32 100644 --- a/lisa/wlgen/rta.py +++ b/lisa/wlgen/rta.py @@ -22,30 +22,27 @@ scheduler tests, allowing to easily run custom workloads. The most important classes are: - * :class:`RTA`: Subclass of :class:`lisa.wlgen.workload.Workload` that can - run rt-app on a given :class:`~lisa.target.Target`. - - * :class:`RTAConf`: An rt-app configuration file. It can be created either - from a template JSON or using a programmatic API. - - * :class:`RTAPhase`: The entry point of the programmatic API to build - rt-app configuration, phase by phase. - - * :class:`WloadPropertyBase`: The base class of all workloads that can be - given to an :class:`RTAPhase`. It has the following subclasses: - - .. exec:: - from lisa._doc.helpers import get_subclasses_bullets - from lisa.wlgen.rta import WloadPropertyBase - - print( - get_subclasses_bullets( - WloadPropertyBase, - abbrev=True, - only_leaves=True, - style='rst', - ) - ) +* :class:`RTA`: Subclass of :class:`lisa.wlgen.workload.Workload` that can + run rt-app on a given :class:`~lisa.target.Target`. +* :class:`RTAConf`: An rt-app configuration file. It can be created either + from a template JSON or using a programmatic API. +* :class:`RTAPhase`: The entry point of the programmatic API to build + rt-app configuration, phase by phase. +* :class:`WloadPropertyBase`: The base class of all workloads that can be + given to an :class:`RTAPhase`. It has the following subclasses: + + .. exec:: + from lisa._doc.helpers import get_subclasses_bullets + from lisa.wlgen.rta import WloadPropertyBase + + print( + get_subclasses_bullets( + WloadPropertyBase, + abbrev=True, + only_leaves=True, + style='rst', + ) + ) A typical workload would be created this way:: @@ -208,7 +205,7 @@ from lisa.utils import ( deprecate, fixedpoint, fold, - get_cls_name, + get_obj_name, get_subclasses, group_by_value, groupby, @@ -217,7 +214,7 @@ from lisa.utils import ( nullcontext, order_as, value_range, - get_cls_name, + get_obj_name, get_short_doc, kwargs_dispatcher, kwargs_forwarded_to, @@ -290,6 +287,9 @@ class RTAConf(Loggable, Mapping): """ ALLOWED_TASK_NAME_REGEX = r'^[a-zA-Z0-9_]+$' + """ + Regex to check whether the ``rt-app`` task name is valid. + """ def __init__(self, conf): self.conf = conf @@ -1332,8 +1332,6 @@ class PropertyBase(SimpleHash, metaclass=PropertyMeta): @classmethod def _from_key(cls, key, val): """ - :meta public: - Build an instance out of ``key`` and ``val``. """ raise NotImplementedError() @@ -1363,8 +1361,6 @@ class PropertyBase(SimpleHash, metaclass=PropertyMeta): @classmethod def _check_key(cls, key): """ - :meta public: - Check that the ``key`` is allowed for this class. """ if cls.KEY is not None and key != cls.KEY: @@ -1439,7 +1435,7 @@ class PropertyBase(SimpleHash, metaclass=PropertyMeta): if parsed_type: type_ += f'{parsed_type} or ' - type_ += get_cls_name(cls) + type_ += get_obj_name(cls) return (doc, type_) @@ -1894,8 +1890,6 @@ class SimpleConcreteProperty(SimpleProperty, ConcretePropertyBase): class _SemigroupProperty(PropertyBase): """ - :meta public: - Base class for properties forming a semigroup with respect to their ``__and__`` method. @@ -1906,15 +1900,13 @@ class _SemigroupProperty(PropertyBase): @abc.abstractmethod def _SEMIGROUP_OP(x, y): """ - :meta public: - Function used to combine two non-None values. """ def __and__(self, other): """ Combine values of the properties using - :meth:`~_SemigroupProperty._SEMIGROUP_OP`, except when one of the value + :meth:`~_SEMIGROUP_OP`, except when one of the value is ``None``, in which case the other value is used as is and wrapped into an instance using :meth:`~PropertyBase.from_key`. """ @@ -2127,8 +2119,6 @@ class PriorityProperty(SimpleConcreteProperty): class _UsecSimpleConcreteProperty(SimpleConcreteProperty): """ - :meta public: - Simple property that converts its value from seconds to microseconds for the JSON file. """ @@ -2332,8 +2322,6 @@ class ComposableMultiConcretePropertyBase(MultiConcreteProperty): _ATTRIBUTES = {} """ - :meta public: - Dictionary of allowed attributes where each value is in the format ``dict(doc=..., type_=...)``. This extra information is used to patch the docstrings (see :meth:`__init_subclass__`). @@ -2391,7 +2379,7 @@ class ComposableMultiConcretePropertyBase(MultiConcreteProperty): Update the docstring used as a :meth:`str.format` template with the following keys: - * ``{params}``: replaced by the Sphinx-friendly list of attributes + * ``{params}``: replaced by the Sphinx-friendly list of attributes """ docstring = inspect.getdoc(cls) if docstring: @@ -2408,7 +2396,7 @@ class ComposableMultiConcretePropertyBase(MultiConcreteProperty): def make(param, desc): fst = f':param {param}: {desc["doc"]}' - snd = f':type {param}: {get_cls_name(desc["type_"])} or {default}' + snd = f':type {param}: {get_obj_name(desc["type_"])} or {default}' return f'{fst}\n{snd}' return '\n\n'.join(starmap(make, cls._ATTRIBUTES.items())) @@ -2464,8 +2452,6 @@ class ComposableMultiConcretePropertyBase(MultiConcreteProperty): def _and(self, other): """ - :meta public: - Combine together two instances by taking the non-default values for each attribute, and giving priority to ``self``. """ @@ -2558,8 +2544,6 @@ class UclampProperty(ComposableMultiConcretePropertyBase): def _and(self, other): """ - :meta public: - Combine clamps by taking the most constraining solution. """ def none_shortcircuit(f, x, y): @@ -2722,8 +2706,6 @@ class WloadSequence(WloadPropertyBase, SimpleConcreteProperty): class _SingleWloadBase(WloadPropertyBase): """ - :meta public: - Execute a single rt-app event. """ @@ -2755,7 +2737,7 @@ class _SingleWloadBase(WloadPropertyBase): return [(self._action, self.json_value)] -class DurationWload(WloadPropertyBase): +class DurationWload(_SingleWloadBase): """ Workload parametrized by a duration. """ @@ -2778,10 +2760,6 @@ class DurationWload(WloadPropertyBase): return _to_us(self.duration) -class DurationWload(DurationWload, _SingleWloadBase): - pass - - class RunWload(DurationWload): """ Workload for the ``run`` event. @@ -3096,15 +3074,15 @@ class RTAPhaseProperties(SimpleHash, Mapping): """ Alternative constructor with polymorphic input: - * ``None``: equivalent to an empty list. - * :class:`RTAPhaseProperties`: taken as-is. - * :class:`~collections.abc.Mapping`: each key/value pair is either: + * ``None``: equivalent to an empty list. + * :class:`RTAPhaseProperties`: taken as-is. + * :class:`~collections.abc.Mapping`: each key/value pair is either: - * the value is a :class:`PropertyBase`: it's taken as-is - * the value is a :class:`PlaceHolderValue`: the property is - created using its ``PROPERTY_CLS.from_key`` method. - * otherwise, an instance of the appropriate class is built by - :meth:`PropertyBase.from_key`. + * the value is a :class:`PropertyBase`: it's taken as-is + * the value is a :class:`PlaceHolderValue`: the property is + created using its ``PROPERTY_CLS.from_key`` method. + * otherwise, an instance of the appropriate class is built by + :meth:`PropertyBase.from_key`. """ if obj is None: return cls(properties=[]) @@ -3205,11 +3183,11 @@ class RTAPhaseProperties(SimpleHash, Mapping): Properties are merged according to the following rules: - * Take the value as-is for all the keys that only appear in one of - them. - * For values set in both properties, combine them with ``&`` - operator. The value coming from ``self`` will be the left - operand. + * Take the value as-is for all the keys that only appear in one of + them. + * For values set in both properties, combine them with ``&`` + operator. The value coming from ``self`` will be the left + operand. """ common = self.properties.keys() & other.properties.keys() merged = [ @@ -3379,7 +3357,7 @@ class RTAPhaseBase(_RTAPhaseBase, SimpleHash, Mapping, abc.ABC): def with_delete_props(self, properties): """ Delete all the given property names, equivalent to - `with_props(foo=delete())`` + ``with_props(foo=delete())`` """ return self.with_properties_map( dict.fromkeys(properties, delete()) @@ -3482,8 +3460,6 @@ class RTAPhaseBase(_RTAPhaseBase, SimpleHash, Mapping, abc.ABC): class _RTAPhaseTreeBase(RTAPhaseBase, abc.ABC): """ - :meta public: - Base class for phases laid out as a tree. """ @abc.abstractmethod @@ -3925,8 +3901,6 @@ class ParametricPhase(RTAPhaseTree): @abc.abstractmethod def _make_children(cls, template, **kwargs): """ - :meta public: - Create a list of children :class:`RTAPhaseBase` based on the parameters passed from the constructor. """ @@ -4622,9 +4596,9 @@ def task_factory(f): Calling the decorated function will result in another callable that can be called once with: - * ``seed``: Seed to use to automatically initialize a :class:`random.Random`. - * ``rng``: Alternatively, an existing instance of - :class:`random.Random` to use. + * ``seed``: Seed to use to automatically initialize a :class:`random.Random`. + * ``rng``: Alternatively, an existing instance of + :class:`random.Random` to use. If the user-defined coroutine function returns ``None``, the return value will be replaced by an :class:`RTAPhaseBase` representing all the phases diff --git a/lisa/wlgen/workload.py b/lisa/wlgen/workload.py index bc90a1f074ace9bf1df02be7f722425c4263e61f..3c8f8ac30f77e01a47f4f5ceec74d5166ac209fc 100644 --- a/lisa/wlgen/workload.py +++ b/lisa/wlgen/workload.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +""" +Helpers to write target workloads. +""" import os from pathlib import Path @@ -222,8 +225,6 @@ class _WorkloadRunCM(Loggable): class _WorkloadBase: """ - :meta public: - Dummy base class so that :class:`Workload` is processed by ``__init_subclass__`` as well. """ @@ -384,9 +385,9 @@ class Workload(_WorkloadBase, PartialInit, Loggable): If not called manually, it will be called: - * If the workload is used as a context manager, in - ``__enter__``. - * If not, in :meth:`run` or :meth:`run_background`. + * If the workload is used as a context manager, in + ``__enter__``. + * If not, in :meth:`run` or :meth:`run_background`. Calling it manually ahead of time makes can allow less garbage while tracing during the execution of the workload. diff --git a/setup.py b/setup.py index 2e0c2bdb263314ef4ee8152d6a7a27b88fc99e10..cb8d03f2067a92445fab96526b4add1b5519004e 100755 --- a/setup.py +++ b/setup.py @@ -92,7 +92,7 @@ extras_require["doc"] = [ # Force ReadTheDocs to use a recent version, rather than the defaults used # for old projects. "sphinx > 2", - "sphinx_rtd_theme >= 0.5.2", + "pydata-sphinx-theme", "sphinxcontrib-plantuml", "nbsphinx", diff --git a/tests/test_trace.py b/tests/test_trace.py index b287e1ac85486c3819abc5f16d802bf123e66c5b..78699612a67cc06f5d381148029026fefc1488ef 100644 --- a/tests/test_trace.py +++ b/tests/test_trace.py @@ -393,7 +393,7 @@ class TestTrace(TraceTestCase): class TestTraceProxy(TraceTestCase): - """Smoke tests for LISA's Trace class""" + """Smoke tests for LISA's TraceProxy class""" def _wrap_trace(self, trace): proxy = _TraceProxy(None) proxy._set_trace(trace) diff --git a/tools/bisector/doc/api.rst b/tools/bisector/doc/api.rst index 2c06c4baee1032d19c5e5382847298e810a71c94..cdac591a2d9a93ea486d4024a21116846253431a 100644 --- a/tools/bisector/doc/api.rst +++ b/tools/bisector/doc/api.rst @@ -10,5 +10,4 @@ serialized format, new steps constructors must be written in a similar way to exisiting ones. .. automodule:: bisector.main - :members: diff --git a/tools/exekall/doc/internal_api.rst b/tools/exekall/doc/internal_api.rst index c93609295aa00200343b86517d369c47eb6a029f..d563a8dd82033627b1a94482d2dd6fdb7336ba66 100644 --- a/tools/exekall/doc/internal_api.rst +++ b/tools/exekall/doc/internal_api.rst @@ -13,23 +13,18 @@ Engine exekall.engine so jumping to that reference will lead at the right place. .. automodule:: exekall.valuedb - :members: .. automodule:: exekall.engine - :members: Customization ============= .. automodule:: exekall.customization - :members: Utils ===== .. automodule:: exekall.utils - :members: .. automodule:: exekall._utils - :members: