diff --git a/config/cca-3world.yaml b/config/cca-3world.yaml index e744fa2552967e0137810e37b91e5b4a084efedd..15531f7f2fb8178f49431eda0132eb08de735185 100644 --- a/config/cca-3world.yaml +++ b/config/cca-3world.yaml @@ -56,17 +56,6 @@ build: remote: https://git.gitlab.arm.com/linux-arm/linux-cca.git revision: cca-full/rfc-v1 - prebuild: - # Disable CPU_IDLE as a workaround to speed up the FVP. Since we are using - # the TF-A DT, which provides CPU idle state parameters, it otherwise - # causes Linux to constantly enter cpu idle, slowing the FVP down. We - # can't easily use the upstream DT right now, due to some RAM having been - # carved out for the RMM and this is not reflected in that DT. CPU_IDLE is - # selected by ACPI, so we have to disable that too to maintain a legal - # config. That's OK for now since we are using the DT. - - ./scripts/config --file ${param:builddir}/.config --disable CONFIG_ACPI - - ./scripts/config --file ${param:builddir}/.config --disable CONFIG_CPU_IDLE - kvmtool: repo: dtc: diff --git a/config/edk2-base.yaml b/config/edk2-base.yaml index c2603fd3864551f01b188f9e14d553e8100db60c..fcddd91fd3781d82c7e97b4fa364f6ef529ffee9 100644 --- a/config/edk2-base.yaml +++ b/config/edk2-base.yaml @@ -9,6 +9,22 @@ description: >- Builds acpica from source as part of the build process. build: + acpica: + repo: + remote: https://github.com/acpica/acpica.git + revision: R10_20_22 + + build: + - rm -rf ${param:sourcedir}/generate/unix/acpica + - make -j${param:jobs} + - mv ${param:sourcedir}/generate/unix/bin ${param:sourcedir}/generate/unix/acpica + + clean: + - make -j${param:jobs} clean + + artifacts: + ACPICA: ${param:sourcedir}/generate/unix/acpica + edk2: repo: edk2: @@ -17,17 +33,16 @@ build: edk2-platforms: remote: https://github.com/tianocore/edk2-platforms.git revision: 20e07099d8f11889d101dd710ca85001be20e179 - acpica: - remote: https://github.com/acpica/acpica.git - revision: R10_20_22 toolchain: aarch64-none-elf- + stderrfilt: true + prebuild: - export WORKSPACE=${param:sourcedir} - export GCC5_AARCH64_PREFIX=$$CROSS_COMPILE - export PACKAGES_PATH=$$WORKSPACE/edk2:$$WORKSPACE/edk2-platforms - - export IASL_PREFIX=$$WORKSPACE/acpica/generate/unix/bin/ + - export IASL_PREFIX=${artifact:ACPICA}/ - export PYTHON_COMMAND=/usr/bin/python3 params: @@ -37,7 +52,6 @@ build: -b: RELEASE build: - - make -j${param:jobs} -C acpica - source edk2/edksetup.sh --reconfig - make -j${param:jobs} -C edk2/BaseTools - build -n ${param:jobs} -D EDK2_OUT_DIR=${param:builddir} ${param:join_space} diff --git a/documentation/userguide/commands.rst b/documentation/userguide/commands.rst index a268d22da9e6627c2300d6986d5dacf92421c7bc..a1ee58e9d449d15f3a77f3d6ffa836128237a1d5 100644 --- a/documentation/userguide/commands.rst +++ b/documentation/userguide/commands.rst @@ -22,6 +22,7 @@ For help on a specific command: shrinkwrap inspect --help shrinkwrap build --help + shrinkwrap buildall --help shrinkwrap clean --help shrinkwrap run --help shrinkwrap process --help diff --git a/documentation/userguide/config.rst b/documentation/userguide/config.rst index c75c4dfda6f280a930515d7cc199b276d908b5a5..1ae50ce24a34bb443201a59cd4c8985a6f5761cd 100644 --- a/documentation/userguide/config.rst +++ b/documentation/userguide/config.rst @@ -122,19 +122,20 @@ output to get a better feel for how they work. See Defined Macros -------------- -======================= ========================================================================= ==== -macro scope description -======================= ========================================================================= ==== -``${param:sourcedir}`` build..{params, prebuild, build, postbuild, clean, artifacts} Directory in which the component's source code is located. -``${param:builddir}`` build..{params, prebuild, build, postbuild, clean, artifacts} Directory in which the component should be built, if the component's build system supports separation of source and build trees. -``${param:configdir}`` build..{params, prebuild, build, postbuild, clean, artifacts} Directory containing the config store. This MUST only be used for resolving files that already exist in the store. -``${param:jobs}`` build..{params, prebuild, build, postbuild, clean} Maximum number of low level parallel jobs specified on the command line. To be passed to (e.g.) make as ``-j${param:jobs}``. -``${param:join_equal}`` build..{prebuild, build, postbuild, clean} String containing all of the component's parameters (from its params dictionary), concatenated as ``key=value`` pairs. -``${param:join_space}`` build..{prebuild, build, postbuild, clean} String containing all of the component's parameters (from its params dictionary), concatenated as ``key value`` pairs. -``${artifact:}`` build..{params, prebuild, build, postbuild, clean} Build path of an artifact declared by another component. Usage of these macros determine the component build dependency graph. -``${artifact:}`` run.rtvars Package path of an artifact. -``${rtvar:}`` run.params Run-time variables. The variable names, along with default values are declared in run.rtvars, and the user may override the value on the command line. -======================= ========================================================================= ==== +======================= ====================================================================================== ==== +macro scope description +======================= ====================================================================================== ==== +``${param:sourcedir}`` build..{params, prebuild, build, postbuild, clean, artifacts} Directory in which the component's source code is located. +``${param:builddir}`` build..{params, prebuild, build, postbuild, clean, artifacts} Directory in which the component should be built, if the component's build system supports separation of source and build trees. +``${param:configdir}`` build..{params, prebuild, build, postbuild, clean, artifacts} Directory containing the config store. This MUST only be used for resolving files that already exist in the store. +``${param:jobs}`` build..{params, prebuild, build, postbuild, clean, artifacts} Maximum number of low level parallel jobs specified on the command line. To be passed to (e.g.) make as ``-j${param:jobs}``. +``${btvar:}`` build..{params, prebuild, build, postbuild, clean, artifacts} Build-time variables. The variable names, along with default values are declared in buildex.btvars, and the user may override the value on the command line. +``${param:join_equal}`` build..{prebuild, build, postbuild, clean} String containing all of the component's parameters (from its params dictionary), concatenated as ``key=value`` pairs. +``${param:join_space}`` build..{prebuild, build, postbuild, clean} String containing all of the component's parameters (from its params dictionary), concatenated as ``key value`` pairs. +``${artifact:}`` build..{params, prebuild, build, postbuild, clean, artifacts}, build.btvars Build path of an artifact declared by another component. Usage of these macros determine the component build dependency graph. +``${artifact:}`` run.rtvars Package path of an artifact. +``${rtvar:}`` run.params Run-time variables. The variable names, along with default values are declared in run.rtvars, and the user may override the value on the command line. +======================= ====================================================================================== ==== ****** Schema @@ -166,6 +167,22 @@ The build section, contains a dictionary of components that must be built. The keys are the component names and the values are themselves dictionaries, each containing the component meta data. +--------------- +buildex section +--------------- + +When the schema was originally created, we made a mistake. The components should +have been under ``build: components:``, allowing room for new build data to be +added under ``build:`` without being confused for components. In order to +retrofit a solution without breaking compatibility, the buildex section is +created. + +=========== =========== =========== +key type description +=========== =========== =========== +btvars dictionary Build-Time variables. Keys are the variable names and values are a dictionary with keys 'type' (which must be one of 'path' and 'string') and 'value' (which takes the default value). Build-Time variables can be overridden by the user at the command line. +=========== =========== =========== + ~~~~~~~~~~~~~~~~~ component section ~~~~~~~~~~~~~~~~~ @@ -177,12 +194,13 @@ repo dictionary Specifies information about the git repo(s) that must be sourcedir string If specified, points to the path on disk where the source repo can be found. Useful for developer use cases where a local repo already exists. builddir string If specified, the location where the component will be built. If not specified, shrinkwrap allocates its own location based on SHRINKWRAP_BUILD. toolchain string Defines the toolchain to be used for compilation. Value is set as CROSS_COMPILE environment variable before invoking any prebuild/build/postbuild/clean commands. When using the standard image with a container runtime, the options are: ``aarch64-none-elf-``, ``arm-none-eabi-``, ``aarch64-linux-gnu-``, or ``arm-linux-gnueabihf-``. +stderrfilt bool Optional, defaults to false. When true, and --verbose is not specified, filters stderr of the component's build task so that only lines containing 'error' and 'warning' are output. Everything else is suppressed. Useful for EDK2 which is extremely chatty. params dictionary Optional set of key:value pairs. When building most components, they require a set of parameters to be passed. By setting them out as a dictionary, it is easy to override and add to them in higher layers. See ``${param:join_*}`` macros. prebuild list List of shell commands to be executed during component build before the ``build`` list. build list List of shell commands to be executed during component build. postbuild list List of shell commands to be executed during component build after the ``build`` list. clean list List of shell commands to be executed during component clean. -artifacts dictionary Set of artifacts that the component exports. Key is artifact name and value is path to built artifact. Other components can reference them with the ``${artifact:}`` macros. Used to determine build dependencies. +artifacts dictionary Set of artifacts (files and/or directories) that the component exports. Key is artifact name and value is path to built artifact. Other components can reference them with the ``${artifact:}`` macros. Used to determine build dependencies. =========== =========== =========== ----------- diff --git a/shrinkwrap/commands/build.py b/shrinkwrap/commands/build.py index ce8d1d153655187847a49097a5393b68e71e3667..046788f6cb0aaff8239a20b5307f632c8f52310e 100644 --- a/shrinkwrap/commands/build.py +++ b/shrinkwrap/commands/build.py @@ -2,19 +2,13 @@ # SPDX-License-Identifier: MIT import os -import shrinkwrap.utils.config as config -import shrinkwrap.utils.graph as ugraph -import shrinkwrap.utils.runtime as runtime -import shrinkwrap.utils.workspace as workspace +import shrinkwrap.commands.buildall as buildall +import shrinkwrap.utils.vars as vars cmd_name = os.path.splitext(os.path.basename(__file__))[0] -def dflt_jobs(): - return min(os.cpu_count() // 2, 32) - - def add_parser(parser, formatter): """ Part of the command interface expected by shrinkwrap.py. Adds the @@ -23,8 +17,7 @@ def add_parser(parser, formatter): """ cmdp = parser.add_parser(cmd_name, formatter_class=formatter, - help="""Builds either all concrete standard configs or an - explicitly specified set of configs and packages them ready + help="""Builds a specified config and packages it ready to run.""", epilog="""Custom config store(s) can be defined at at as a colon-separated list of @@ -39,53 +32,21 @@ def add_parser(parser, formatter): and '~/.shrinkwrap/package'. The user can override them by setting the environment variables.""") - cmdp.add_argument('configs', - metavar='config', nargs='*', - help="""0 or more configs to build. If a config exists relative - to the current directory that config is used. Else if a - config exists relative to the config store then it is used. - If no configs are provided, all concrete configs in the - config store are built.""") + cmdp.add_argument('config', + metavar='config', + help="""Config to build. If the config exists relative to the + current directory that config is used. Else if the config + exists relative to the config store then it is used.""") - cmdp.add_argument('-o', '--overlay', - metavar='cfgfile', required=False, default=[], + cmdp.add_argument('-b', '--btvar', + metavar='key=value', required=False, default=[], action='append', - help="""Optional config file overlay to override run-time and - build-time settings. Only entries within the "build" and - "run" sections are used. Applied to all configs being - built. Can be specified multiple times; left-most overlay - is the first overlay applied.""") - - cmdp.add_argument('-t', '--tasks', - required=False, default=dflt_jobs(), metavar='count', type=int, - help="""Maximum number of "high-level" tasks that will be - performed in parallel by Shrinkwrap. Tasks include syncing - git repositories, building components and copying - artifacts. Default={}""".format(dflt_jobs())) - - cmdp.add_argument('-j', '--jobs', - required=False, default=dflt_jobs(), metavar='count', type=int, - help="""Maximum number of low-level jobs that will be - performed in parallel by each component build task. - Default={}""".format(dflt_jobs())) + help="""Override value for a single build-time variable defined + by the config. Specify option multiple times for multiple + variables. Overrides for variables that have a default + specified by the config are optional.""") - cmdp.add_argument('-v', '--verbose', - required=False, default=False, action='store_true', - help="""If specified, the output from all executed commands will - be displayed. It is advisable to set tasks to 1 when - this option is selected.""") - - cmdp.add_argument('-n', '--dry-run', - required=False, default=False, action='store_true', - help="""If specified, and - will not be touched and none of the - build commands will be executed. Instead the set of - commands that would have been executed are output to stdout - as a bash script.""") - - cmdp.add_argument('-c', '--no-color', - required=False, default=False, action='store_true', - help="""If specified, logs will not be colorized.""") + buildall.add_common_args(cmdp) return cmd_name @@ -96,59 +57,5 @@ def dispatch(args): execute the subcommand, with the arguments the user passed on the command line. The arguments comply with those requested in add_parser(). """ - clivars = {'jobs': args.jobs} - configs = config.load_resolveb_all(args.configs, args.overlay, clivars) - if len(args.configs) == 0: - configs = [c for c in configs if c['concrete']] - graph = config.build_graph(configs, args.verbose) - - if args.dry_run: - script = ugraph.make_script(graph) - print(script) - else: - if args.verbose: - workspace.dump() - - # Run under a runtime environment, which may just run commands - # natively on the host or may execute commands in a container, - # depending on what the user specified. - with runtime.Runtime(args.runtime, args.image) as rt: - def add_volume(path, levels_up=0): - while levels_up: - path = os.path.dirname(path) - levels_up -= 1 - os.makedirs(path, exist_ok=True) - rt.add_volume(path) - - add_volume(workspace.build) - add_volume(workspace.package) - for c in workspace.configs(): - add_volume(c) - - for conf in configs: - for comp in conf['build'].values(): - add_volume(comp['sourcedir'], 1) - add_volume(comp['builddir']) - - rt.start() - - ugraph.execute(graph, - args.tasks, - args.verbose, - not args.no_color) - - for c in configs: - # Dump the config. - cfg_name = os.path.join(workspace.package, - f'{c["name"]}.yaml') - with open(cfg_name, 'w') as cfg: - config.dump(c, cfg) - - # Dump the script to build the config. - graph = config.build_graph([c], args.verbose) - script = ugraph.make_script(graph) - build_name = os.path.join(workspace.package, - c['name'], - 'build.sh') - with open(build_name, 'w') as build: - build.write(script) + btvars = vars.parse(args.btvar, type='bt') + buildall.build([args.config], [btvars], args) diff --git a/shrinkwrap/commands/buildall.py b/shrinkwrap/commands/buildall.py new file mode 100644 index 0000000000000000000000000000000000000000..a7677d0f313fa6b099f76b4255371c09f8365f2f --- /dev/null +++ b/shrinkwrap/commands/buildall.py @@ -0,0 +1,176 @@ +# Copyright (c) 2022, Arm Limited. +# SPDX-License-Identifier: MIT + +import os +import yaml +import shrinkwrap.utils.config as config +import shrinkwrap.utils.graph as ugraph +import shrinkwrap.utils.runtime as runtime +import shrinkwrap.utils.workspace as workspace + + +cmd_name = os.path.splitext(os.path.basename(__file__))[0] + + +def dflt_jobs(): + return min(os.cpu_count() // 2, 32) + + +def add_parser(parser, formatter): + """ + Part of the command interface expected by shrinkwrap.py. Adds the + subcommand to the parser, along with all options and documentation. + Returns the subcommand name. + """ + cmdp = parser.add_parser(cmd_name, + formatter_class=formatter, + help="""Builds either all concrete standard configs or an + explicitly specified set of configs and packages them ready + to run.""", + epilog="""Custom config store(s) can be defined at at + as a colon-separated list of + directories. Building is done at and + output is saved to . The package + includes all FW binaries, a manifest and a build.sh script + containing all the commands that were executed per config. + Any pre-existing config package directory is first deleted. + Shrinkwrap will always search its default config store even + if is not defined. + and default to '~/.shrinkwrap/build' + and '~/.shrinkwrap/package'. The user can override them by + setting the environment variables.""") + + cmdp.add_argument('configs', + metavar='yamlfile', + help="""A yaml file containing all the configs to be built. The + top level dictionary contains a 'configs' key, whose value + is a list of dictionaries, each with a 'config' key, whose + value is a config filename.""") + + add_common_args(cmdp) + + return cmd_name + + +def add_common_args(cmdp): + """ + Common args shared between build and buildmulti. + """ + cmdp.add_argument('-o', '--overlay', + metavar='cfgfile', required=False, default=[], + action='append', + help="""Optional config file overlay to override run-time and + build-time settings. Only entries within the "build" and + "run" sections are used. Applied to all configs being + built. Can be specified multiple times; left-most overlay + is the first overlay applied.""") + + cmdp.add_argument('-t', '--tasks', + required=False, default=dflt_jobs(), metavar='count', type=int, + help="""Maximum number of "high-level" tasks that will be + performed in parallel by Shrinkwrap. Tasks include syncing + git repositories, building components and copying + artifacts. Default={}""".format(dflt_jobs())) + + cmdp.add_argument('-j', '--jobs', + required=False, default=dflt_jobs(), metavar='count', type=int, + help="""Maximum number of low-level jobs that will be + performed in parallel by each component build task. + Default={}""".format(dflt_jobs())) + + cmdp.add_argument('-v', '--verbose', + required=False, default=False, action='store_true', + help="""If specified, the output from all executed commands will + be displayed. It is advisable to set tasks to 1 when + this option is selected.""") + + cmdp.add_argument('-n', '--dry-run', + required=False, default=False, action='store_true', + help="""If specified, and + will not be touched and none of the + build commands will be executed. Instead the set of + commands that would have been executed are output to stdout + as a bash script.""") + + cmdp.add_argument('-c', '--no-color', + required=False, default=False, action='store_true', + help="""If specified, logs will not be colorized.""") + + +def dispatch(args): + """ + Part of the command interface expected by shrinkwrap.py. Called to + execute the subcommand, with the arguments the user passed on the + command line. The arguments comply with those requested in add_parser(). + """ + with open(args.configs) as file: + cfgs = yaml.safe_load(file) + + configs = [c['config'] for c in cfgs['configs']] + btvarss = [c['btvars'] for c in cfgs['configs']] + build(configs, btvarss, args) + + +def build(configs, btvarss, args): + """ + Concurrently builds a list of configs. Intended to be called as a common + handler for the build and buildmulti commands. + """ + clivars = {'jobs': args.jobs} + configs = config.load_resolveb_all(configs, args.overlay, clivars, btvarss) + graph = config.build_graph(configs, args.verbose) + + if args.dry_run: + script = ugraph.make_script(graph) + print(script) + else: + if args.verbose: + workspace.dump() + + # Run under a runtime environment, which may just run commands + # natively on the host or may execute commands in a container, + # depending on what the user specified. + with runtime.Runtime(args.runtime, args.image) as rt: + def add_volume(path, levels_up=0): + while levels_up: + path = os.path.dirname(path) + levels_up -= 1 + os.makedirs(path, exist_ok=True) + rt.add_volume(path) + + add_volume(workspace.build) + add_volume(workspace.package) + for c in workspace.configs(): + add_volume(c) + + for conf in configs: + for comp in conf['build'].values(): + add_volume(comp['sourcedir'], 1) + add_volume(comp['builddir']) + + for btvar in conf['buildex']['btvars'].values(): + if btvar['type'] == 'path': + rt.add_volume(btvar['value']) + + rt.start() + + ugraph.execute(graph, + args.tasks, + args.verbose, + not args.no_color) + + for c in configs: + # Dump the config. + cfg_name = os.path.join(workspace.package, + f'{c["name"]}.yaml') + with open(cfg_name, 'w') as cfg: + config.dump(c, cfg) + + # Dump the script to build the config. + graph = config.build_graph([c], args.verbose) + script = ugraph.make_script(graph) + build_name = os.path.join(workspace.package, + c['name'], + 'build.sh') + with open(build_name, 'w') as build: + build.write(script) diff --git a/shrinkwrap/commands/inspect.py b/shrinkwrap/commands/inspect.py index 78ba16a1e6e8c1af295cc226ec699023959a9dd3..caca6d4aa1160ecbe9d75437f34f1afc70c06177 100644 --- a/shrinkwrap/commands/inspect.py +++ b/shrinkwrap/commands/inspect.py @@ -51,7 +51,7 @@ def dispatch(args): execute the subcommand, with the arguments the user passed on the command line. The arguments comply with those requested in add_parser(). """ - configs = config.load_resolveb_all(args.configs) + configs = config.load_all(args.configs) width = 80 indent = 21 @@ -82,8 +82,17 @@ def dispatch(args): indent=indent, paraspace=1)) buf.write('\n') - rtvars = {k: v['value'] for k,v in c['run']['rtvars'].items()} - buf.write(_dict_wrap('run-time variables', + btvars = {k: _var_value(v['value']) + for k,v in c['buildex']['btvars'].items()} + buf.write(_dict_wrap('build-time vars', + btvars, + width=width, + kindent=indent, + vindent=vindent)) + buf.write('\n') + rtvars = {k: _var_value(v['value']) + for k,v in c['run']['rtvars'].items()} + buf.write(_dict_wrap('run-time vars', rtvars, width=width, kindent=indent, @@ -95,6 +104,12 @@ def dispatch(args): all = separator.join(descs) print(all) +def _var_value(value): + if value is None: + return '' + if value == '': + return '' + return str(value) def _text_wrap(tag, text, width=80, indent=0, paraspace=1, end='\n'): text = str(text) diff --git a/shrinkwrap/commands/process.py b/shrinkwrap/commands/process.py index 58631e7bc62882cc1dba04fc38403476ead9639c..046829af52d06e4d6e9e8460740cc8812111dd2b 100644 --- a/shrinkwrap/commands/process.py +++ b/shrinkwrap/commands/process.py @@ -3,7 +3,7 @@ import os import shrinkwrap.utils.config as config -import shrinkwrap.utils.rtvars as rtvars +import shrinkwrap.utils.vars as vars cmd_name = os.path.splitext(os.path.basename(__file__))[0] @@ -48,6 +48,15 @@ def add_parser(parser, formatter): "run" sections are used. Can be specified multiple times; left-most overlay is the first overlay applied.""") + cmdp.add_argument('-b', '--btvar', + metavar='key=value', required=False, default=[], + action='append', + help="""Override value for a single build-time variable defined + by the config. Specify option multiple times for multiple + variables. Overrides for variables that have a default + specified by the config are optional. Only used if action + is "resolveb" or "resolver".""") + cmdp.add_argument('-r', '--rtvar', metavar='key=value', required=False, default=[], action='append', @@ -79,12 +88,13 @@ def dispatch(args): if args.action == 'merge': print(config.dumps(merged)) else: - resolveb = config.resolveb(merged) + btvars = vars.parse(args.btvar, type='bt') + resolveb = config.resolveb(merged, btvars) if args.action == 'resolveb': print(config.dumps(resolveb)) else: - rtvars_dict = rtvars.parse(args.rtvar) + rtvars_dict = vars.parse(args.rtvar, type='rt') resolver = config.resolver(resolveb, rtvars_dict) if args.action == 'resolver': diff --git a/shrinkwrap/commands/run.py b/shrinkwrap/commands/run.py index d81307b95d04f6c1e96fa8ba492fc4c20fa831b6..3357dc82c07e89096269e244e4c1cf6569d744d2 100644 --- a/shrinkwrap/commands/run.py +++ b/shrinkwrap/commands/run.py @@ -7,7 +7,7 @@ import tempfile import shrinkwrap.utils.config as config import shrinkwrap.utils.logger as logger import shrinkwrap.utils.process as process -import shrinkwrap.utils.rtvars as rtvars +import shrinkwrap.utils.vars as vars import shrinkwrap.utils.runtime as runtime import shrinkwrap.utils.workspace as workspace @@ -79,7 +79,7 @@ def dispatch(args): filename = os.path.join(workspace.package, args.config) resolveb = config.load(filename, overlays) - rtvars_dict = rtvars.parse(args.rtvar) + rtvars_dict = vars.parse(args.rtvar, type='rt') resolver = config.resolver(resolveb, rtvars_dict) cmds = _pretty_print_sh(resolver['run']) diff --git a/shrinkwrap/shrinkwrap.py b/shrinkwrap/shrinkwrap.py index 60fc8eba7dbdeb4edec83cf421a745e1603436fd..1b32e624d707291946182eb55d671d3db0a655e4 100755 --- a/shrinkwrap/shrinkwrap.py +++ b/shrinkwrap/shrinkwrap.py @@ -17,6 +17,7 @@ from shrinkwrap import __version__ from shrinkwrap.commands import build +from shrinkwrap.commands import buildall from shrinkwrap.commands import clean from shrinkwrap.commands import inspect from shrinkwrap.commands import process @@ -91,6 +92,7 @@ def main(): # Register all the commands. cmds = {} cmds[build.add_parser(subparsers, formatter)] = build + cmds[buildall.add_parser(subparsers, formatter)] = buildall cmds[clean.add_parser(subparsers, formatter)] = clean cmds[inspect.add_parser(subparsers, formatter)] = inspect cmds[process.add_parser(subparsers, formatter)] = process diff --git a/shrinkwrap/utils/config.py b/shrinkwrap/utils/config.py index 943138f1fb3eaabbff8975706b46be4485b04c74..0de5ff3eb758e3e26636e5a70f47bf771dded82b 100644 --- a/shrinkwrap/utils/config.py +++ b/shrinkwrap/utils/config.py @@ -37,6 +37,9 @@ def _component_normalize(component, name): if 'toolchain' not in component: component['toolchain'] = None + if 'stderrfilt' not in component: + component['stderrfilt'] = None + if 'prebuild' not in component: component['prebuild'] = [] @@ -69,6 +72,14 @@ def _build_normalize(build): _component_normalize(component, name) +def _buildex_normalize(buildex): + """ + Fills in any missing lists or dictionaries with empty ones. + """ + if 'btvars' not in buildex: + buildex['btvars'] = {} + + def _run_normalize(run): """ Fills in any missing lists or dictionaries with empty ones. @@ -117,7 +128,11 @@ def _config_normalize(config): if 'build' not in config: config['build'] = {} + if 'buildex' not in config: + config['buildex'] = {} + _build_normalize(config['build']) + _buildex_normalize(config['buildex']) if 'artifacts' not in config: config['artifacts'] = {} @@ -143,7 +158,7 @@ def _component_sort(component): Sort the component so that the keys are in a canonical order. This improves readability by humans. """ - lut = ['repo', 'sourcedir', 'builddir', 'toolchain', 'params', + lut = ['repo', 'sourcedir', 'builddir', 'toolchain', 'stderrfilt', 'params', 'prebuild', 'build', 'postbuild', 'clean', 'artifacts'] lut = {k: i for i, k in enumerate(lut)} return dict(sorted(component.items(), key=lambda x: lut[x[0]])) @@ -178,7 +193,7 @@ def _config_sort(config): config['run'] = _run_sort(config['run']) lut = ['name', 'fullname', 'description', 'concrete', 'layers', - 'graph', 'build', 'artifacts', 'run'] + 'graph', 'build', 'buildex', 'artifacts', 'run'] lut = {k: i for i, k in enumerate(lut)} return dict(sorted(config.items(), key=lambda x: lut[x[0]])) @@ -218,7 +233,7 @@ def _config_merge(base, new): return config -def _string_tokenize(string): +def _string_tokenize(string, escape=True): """ Returns ordered list of tokens, where each token has a 'type' and 'value'. If 'type' is 'literal', 'value' is the literal string. If @@ -252,9 +267,10 @@ def _string_tokenize(string): raise Exception(f"Macro at col {lit_end}" \ f" in '{string}' is invalid.") if m['escape'] is not None: + assert(m['escape'] == '$') tokens.append({ 'type': 'literal', - 'value': m['escape'], + 'value': '$' if escape else '$$', }) if m['type'] is not None: tokens.append({ @@ -275,18 +291,19 @@ def _string_tokenize(string): return tokens -def _string_substitute(string, lut, partial=False): +def _string_substitute(string, lut, final=True): """ Takes a string containg macros and returns a string with the macros - substituted for the values found in the lut. If partial is True, any + substituted for the values found in the lut. If final is False, any macro that does not have a value in the lut will be left as a macro in - the returned string. If partial is False, any macro that does not have a - value in the lut will cause an interrupt. + the returned string. If final is True, any macro that does not have a + value in the lut will cause an exception. Final also controls unescaping + on $. If False, $$ is left as is, otherwise they are replaced with $. """ calls = [] frags = [] frag = '' - tokens = _string_tokenize(string) + tokens = _string_tokenize(string, final) for t in tokens: if t['type'] == 'literal': @@ -301,12 +318,9 @@ def _string_substitute(string, lut, partial=False): frag = '' else: frag += lu - except KeyError: - if partial: - frag += f"${{{m['type']}:{m['name']}}}" - else: - raise - + except Exception: + macro = f"${{{m['type']}:{m['name']}}}" + frag += macro else: assert(False) @@ -325,6 +339,11 @@ def _string_substitute(string, lut, partial=False): return final +def _string_has_macros(string): + tokens = _string_tokenize(string) + return any([True for t in tokens if t['type'] == 'macro']) + + def _mk_params(params, separator): pairs = [f'{k}' if v is None else f'{k}{separator}{v}' for k, v in params.items()] @@ -401,12 +420,15 @@ def dump(config, fileobj): version=(1, 2)) -def resolveb(config, clivars={}): +def resolveb(config, btvars={}, clivars={}): """ Resolves the build-time macros (params, artifacts, etc) and fixes up the config. Based on the artifact dependencies, the component build graph is determined and placed into the config along with the global artifact map. Expects a config that was previously loaded with load(). + btvars=None implies that it is OK not to resolve btvars whose default + value is None. type(btvars) == dict implies btvars values must all be + resolved. """ def _resolve_build_graph(config): def _exporters_update(exporters, name, component): @@ -421,21 +443,9 @@ def resolveb(config, clivars={}): def _importers_update(importers, name, component): artifacts = set() - macros = [] - - for s in component['params'].values(): - tokens = _string_tokenize(str(s)) - macros += [t['value'] for t in tokens if t['type'] == 'macro'] - for m in macros: - if m['type'] != 'artifact': - raise Exception(f"'{name}' uses macro of type '{m['type']}'. Components must only use 'artifact' macros.") - if m['name'] is None: - raise Exception(f"'{name}' uses unnamed 'artifact' macro. 'artifact' macros must be named.") - artifacts.add(m['name']) - - for scope in ['prebuild', 'build', 'postbuild', 'clean']: - for s in component[scope]: + def _find_artifacts(strings): + for s in strings: for t in _string_tokenize(str(s)): if t['type'] != 'macro': continue @@ -446,6 +456,13 @@ def resolveb(config, clivars={}): raise Exception(f"'{name}' uses unnamed 'artifact' macro. 'artifact' macros must be named.") artifacts.add(m['name']) + _find_artifacts(component['params'].values()) + _find_artifacts(component['prebuild']) + _find_artifacts(component['build']) + _find_artifacts(component['postbuild']) + _find_artifacts(component['clean']) + _find_artifacts(component['artifacts'].values()) + importers[name] = sorted(list(artifacts)) artifacts_exp = {} @@ -458,64 +475,77 @@ def resolveb(config, clivars={}): for depender, deps in artifacts_imp.items(): graph[depender] = [] for dep in deps: + if dep not in artifacts_exp: + raise Exception(f"Imported artifact '{dep}' not exported by any component.") dependee = artifacts_exp[dep] - graph[depender].append(dependee) + if depender != dependee: + graph[depender].append(dependee) return graph def _resolve_artifact_map(config): - - artifact_map = {} - + def _combine(config): + artifact_map = {} + for desc in config['build'].values(): + artifact_map.update(desc['artifacts'].items()) + return {'artifact': artifact_map} + + def _combine_full(config): + artifact_map = {} + for desc in config['build'].values(): + locs = {key: { + 'src': val, + 'dst': os.path.join(config['name'], os.path.basename(val)), + } for key, val in desc['artifacts'].items()} + artifact_map.update(locs) + return artifact_map + + # ${artifact:*} macros could refer to other ${artifact:*} + # macros, so iteratively substitute the maximum number of times, + # which would be once per entry in the pathalogical case. + + artifact_lut = _combine(config) + artifact_nr = len(artifact_lut['artifact']) + + while artifact_nr > 0: + artifact_nr -= 1 + + for desc in config['build'].values(): + for k, v in desc['artifacts'].items(): + desc['artifacts'][k] = _string_substitute(v, artifact_lut, False) + + if artifact_nr > 0: + artifact_lut = _combine(config) + + return _combine_full(config) + + def _substitute_macros(config, lut, final): for desc in config['build'].values(): - lut = { - 'param': { - 'sourcedir': desc['sourcedir'], - 'builddir': desc['builddir'], - 'configdir': lambda x: workspace.config(x, False), - }, - } - - for key, val in desc['artifacts'].items(): - desc['artifacts'][key] = _string_substitute(val, lut) - - locs = {key: { - 'src': val, - 'dst': os.path.join(config['name'], os.path.basename(val)), - } for key, val in desc['artifacts'].items()} + lut['param']['sourcedir'] = desc['sourcedir'] + lut['param']['builddir'] = desc['builddir'] - artifact_map.update(locs) - - return artifact_map - - def _substitute_macros(config, artifacts, clivars): - for desc in config['build'].values(): - lut = { - 'artifact': artifacts, - 'param': { - **clivars, - 'sourcedir': desc['sourcedir'], - 'builddir': desc['builddir'], - 'configdir': lambda x: workspace.config(x, False), - }, - } - - for k in desc['params']: - v = desc['params'][k] + for k, v in desc['params'].items(): if v: - desc['params'][k] = _string_substitute(str(v), lut) + desc['params'][k] = _string_substitute(str(v), lut, final) lut['param']['join_equal'] = _mk_params(desc['params'], '=') lut['param']['join_space'] = _mk_params(desc['params'], ' ') for i, s in enumerate(desc['prebuild']): - desc['prebuild'][i] = _string_substitute(s, lut) + desc['prebuild'][i] = _string_substitute(s, lut, final) for i, s in enumerate(desc['build']): - desc['build'][i] = _string_substitute(s, lut) + desc['build'][i] = _string_substitute(s, lut, final) for i, s in enumerate(desc['postbuild']): - desc['postbuild'][i] = _string_substitute(s, lut) + desc['postbuild'][i] = _string_substitute(s, lut, final) for i, s in enumerate(desc['clean']): - desc['clean'][i] = _string_substitute(s, lut) + desc['clean'][i] = _string_substitute(s, lut, final) + + for k, v in desc['artifacts'].items(): + desc['artifacts'][k] = _string_substitute(v, lut, final) + + for k, v in config['buildex']['btvars'].items(): + if v['value'] is not None: + v['value'] = _string_substitute(str(v['value']), lut, final) # Compute the source and build directories for each component. If they # are already present, then don't override. This allows users to supply @@ -531,11 +561,55 @@ def resolveb(config, clivars={}): 'build', comp_dir) + macro_lut = { + 'param': { + **uclivars.get(**clivars), + 'configdir': lambda x: workspace.config(x, False), + }, + } + + # Override the btvars with any values supplied by the user and check + # that all btvars are defined. + final_btvars = config['buildex']['btvars'] + + for k, v in final_btvars.items(): + if btvars is not None: + if k in btvars: + v['value'] = btvars[k] + if v['value'] is None: + raise Exception(f'{k} build-time variable ' \ + 'not set by user and no ' \ + 'default available.') + + if v['type'] == 'path' and \ + v['value'] and \ + not _string_has_macros(v['value']): + v['value'] = os.path.expanduser(v['value']) + v['value'] = os.path.abspath(v['value']) + + macro_lut['btvar'] = {k: v['value'] for k, v in final_btvars.items()} + + # Do a first partial substitution, to resolve all macros except + # ${artifact:*}. These macros must remain in place in order to resolve + # the build graph. But its possible that btvars resolve to ${artifact:*} + # so we need to do the first pass prior to resolving the build graph. + # btvars are external to the component so they can't be used directly to + # build the graph. + _substitute_macros(config, macro_lut, False) + + # Now resolve the build graph, which finds ${artifact:*} users. graph = _resolve_build_graph(config) + + # At this point we should only have ${artifacts:*} macros remaining to + # resolve. But there may be some cases where ${artifacts:*} resolve to + # other ${artifacts:*}. So we need to iteratively resolve the + # artifact_map. artifact_map = _resolve_artifact_map(config) artifact_src_map = {k: v['src'] for k, v in artifact_map.items()} - clivars = uclivars.get(**clivars) - _substitute_macros(config, artifact_src_map, clivars) + macro_lut['artifact'] = artifact_src_map + + # Final check to ensure everything is resolved and to fix escaped $. + _substitute_macros(config, macro_lut, True) config['graph'] = graph config['artifacts'] = artifact_map @@ -622,11 +696,11 @@ def resolver(config, rtvars={}, clivars={}): return _config_sort(config) -def load_resolveb_all(names, overlaynames=[], clivars={}): +def load_all(names, overlaynames=[]): """ Takes a list of config names and returns a corresponding list of - resolved configs. If the input list is None or empty, all standard - configs are loaded and resolved. + loaded configs. If the input list is None or empty, all standard + configs are loaded. """ explicit = names is not None and len(names) != 0 configs = [] @@ -650,8 +724,7 @@ def load_resolveb_all(names, overlaynames=[], clivars={}): try: file = filename(name) merged = load(file, overlays, name) - resolved = resolveb(merged, clivars) - configs.append(resolved) + configs.append(merged) except Exception: if explicit: raise @@ -659,17 +732,41 @@ def load_resolveb_all(names, overlaynames=[], clivars={}): return configs +def load_resolveb_all(names, overlaynames=[], clivars={}, btvarss=None): + """ + Takes a list of config names and returns a corresponding list of + resolved configs. If the input list is None or empty, all standard + configs are loaded and resolved. + """ + configs_m = load_all(names, overlaynames) + + if btvarss is None: + btvarss = [None] * len(configs_m) + + assert(len(configs_m) == len(btvarss)) + + configs_r = [] + + for merged, btvars in zip(configs_m, btvarss): + resolved = resolveb(merged, btvars, clivars) + configs_r.append(resolved) + + return configs_r + + class Script: def __init__(self, summary, config=None, component=None, preamble=None, - final=False): + final=False, + stderrfilt=None): self.summary = summary self.config = config self.component = component self.final = final + self.stderrfilt = stderrfilt self._cmds = '' self._sealed = False self._preamble = preamble @@ -809,7 +906,7 @@ def build_graph(configs, echo): g.seal() graph[g] = [gl2] - b = Script('Building', config["name"], name, preamble=pre) + b = Script('Building', config["name"], name, preamble=pre, stderrfilt=component['stderrfilt']) if len(component['prebuild']) + \ len(component['build']) + \ len(component['postbuild']) > 0: @@ -835,7 +932,7 @@ def build_graph(configs, echo): for artifact in config['artifacts'].values(): src = artifact['src'] dst = os.path.join(workspace.package, artifact['dst']) - a.append(f'cp {src} {dst}') + a.append(f'cp -r {src} {dst}') a.seal() graph[a] = [gl2] + [s for s in build_scripts.values()] diff --git a/shrinkwrap/utils/graph.py b/shrinkwrap/utils/graph.py index b80f4fcbc1e2e8f70e3a7734f8eae068711759f5..84e2f0de268194c55add69803d21f2147a1069f7 100644 --- a/shrinkwrap/utils/graph.py +++ b/shrinkwrap/utils/graph.py @@ -131,12 +131,19 @@ def execute(graph, tasks, verbose=False, colorize=True): _run_script(pm, data, frag) active += 1 + def _should_log(proc, data, streamid): + if streamid == process.STDERR and \ + (not proc.data[2].stderrfilt or \ + 'warning' in data or 'error' in data): + return True + return False + def _log(pm, proc, data, streamid): if verbose: log.log(pm, proc, data, streamid) else: proc.data[1].append(data) - if streamid == process.STDERR: + if _should_log(proc, data, streamid): log.log(pm, proc, data, streamid) lc.skip_overdraw_once() diff --git a/shrinkwrap/utils/rtvars.py b/shrinkwrap/utils/vars.py similarity index 57% rename from shrinkwrap/utils/rtvars.py rename to shrinkwrap/utils/vars.py index 1f07be3debd740f09e21797337579400f6eb2c52..96ffc8db552e5ea8bf1bf3e61a89fe6c03323b4d 100644 --- a/shrinkwrap/utils/rtvars.py +++ b/shrinkwrap/utils/vars.py @@ -1,14 +1,14 @@ # Copyright (c) 2022, Arm Limited. # SPDX-License-Identifier: MIT -def parse(args): - rtvars = {} +def parse(args, type): + vars = {} for pair in args: try: key, value = pair.split('=', maxsplit=1) - rtvars[key] = value + vars[key] = value except ValueError: - raise Exception(f'Invalid rtvar {pair}') + raise Exception(f'Invalid {type}var {pair}') - return rtvars + return vars diff --git a/test/test.py b/test/test.py index b9c6a777fc060c5b009f78756e050d7c076ce45d..fb261635c087ca0a29f7fee21e9500e4790ef40c 100755 --- a/test/test.py +++ b/test/test.py @@ -7,6 +7,8 @@ import argparse import json import os import subprocess +import tempfile +import yaml RUNTIME = None @@ -20,9 +22,21 @@ ROOTFS = os.path.join(ASSETS, 'rootfs.ext4') CONFIGS = [ - ('ns-preload.yaml', {}), - ('ns-edk2.yaml', {}), - ('ns-edk2.yaml', {'CMDLINE': '\"console=ttyAMA0 earlycon=pl011,0x1c090000 root=/dev/vda ip=dhcp acpi=force\"'}), + { + 'config': 'ns-preload.yaml', + 'btvars': {}, + 'rtvars': {}, + }, + { + 'config': 'ns-edk2.yaml', + 'btvars': {}, + 'rtvars': {}, + }, + { + 'config': 'ns-edk2.yaml', + 'btvars': {}, + 'rtvars': {'CMDLINE': '\"console=ttyAMA0 earlycon=pl011,0x1c090000 root=/dev/vda ip=dhcp acpi=force\"'}, + }, ] @@ -99,25 +113,47 @@ def run(cmd, timeout=None, expect=0): raise WrongExit(ret) -def build_configs(configs, overlay=None): +def build_configs(configs, overlay=None, btvarss=None): result = { 'type': 'build', 'status': 'fail', 'error': None, 'configs': configs, 'overlay': overlay, + 'btvarss': btvarss, } rt = f'-R {RUNTIME} -I {IMAGE}' overlay = f'-o {overlay}' if overlay else '' - args = f'{" ".join(configs)} {overlay}' - - try: - run(f'shrinkwrap {rt} clean {args} -d', None) - run(f'shrinkwrap {rt} build {args}', None) - result['status'] = 'pass' - except Exception as e: - result['error'] = str(e) + cleanargs = f'{" ".join(configs)} {overlay}' + + if btvarss is None: + btvarss = [{}] * len(configs) + + assert(len(configs) == len(btvarss)) + + cfgs = [] + for c, b in zip(configs, btvarss): + cfgs.append({'config': c, 'btvars': b}) + + with tempfile.TemporaryDirectory() as tmpdir: + tmpfilename = os.path.join(tmpdir, 'configs.yaml') + with open(tmpfilename, 'w') as tmpfile: + yaml.safe_dump({'configs': cfgs}, + tmpfile, + explicit_start=True, + sort_keys=False, + version=(1, 2)) + with open(tmpfilename, 'r') as tmpfile: + print(tmpfile.read()) + buildargs = f'{tmpfilename} {overlay}' + + try: + run(f'shrinkwrap {rt} clean {cleanargs} -d', None) + run(f'shrinkwrap {rt} buildall {buildargs}', None) + result['status'] = 'pass' + except Exception as e: + result['error'] = str(e) results.append(result) @@ -168,8 +204,11 @@ def do_main(smoke_test): arches = [ARCHES[-1]] if smoke_test else ARCHES for arch in arches: - build_configs([c for c, r in CONFIGS], arch) - for config, rtvars in CONFIGS: + configs = [c['config'] for c in CONFIGS] + btvarss = [c['btvars'] for c in CONFIGS] + rtvarss = [c['rtvars'] for c in CONFIGS] + build_configs(configs, arch, btvarss=btvarss) + for config, rtvars in zip(configs, rtvarss): run_config_kern(config, KERNEL, ROOTFS, arch, rtvars=rtvars) for arch in arches: