From 0db0595e632678e7a995998855b39f19d28b1140 Mon Sep 17 00:00:00 2001 From: Drew Reed Date: Thu, 28 Jul 2022 13:25:38 +0000 Subject: [PATCH] ci: Get lava results of test runs To allow us to see when lava tests have failed to run correctly we can use the lava results to generate a junit file for each lava based test run by parsing the results through a new conversion script. To allow us to run tests when not selected by default a new FORCE_TESTS variable has been added to all manual pipeline runs to override the standard test selection logic. Moved up to a newer version of the pipeline templates so get support for lava test results retrival from the lava server. Changelog: other Signed-off-by: Drew Reed --- .gitlab-ci.yml | 16 +++-- .gitlab/ci/lava-test.yml | 5 ++ .gitlab/ci/templates/image_build.yml.j2 | 15 ++--- .gitlab/ci/trigger-image-builds.yml | 6 +- .gitlab/scripts/lava-to-junit.py | 87 +++++++++++++++++++++++++ 5 files changed, 112 insertions(+), 17 deletions(-) create mode 100755 .gitlab/scripts/lava-to-junit.py diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 058140b..1589bff 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -19,6 +19,10 @@ variables: value: arm64 description: What build host architecture should be used (x86_64, arm64) + FORCE_TESTS: + value: none + description: Comma seperated list of tests that must be run + (all, ptest) FREQUENCY: value: adhoc description: How frequently is this build run @@ -32,13 +36,13 @@ variables: # include jobs include: - project: $PIPELINE_TEMPLATE_PROJECT - ref: $PIPELINE_TEMPLATE_VERSION + ref: v0.0.4 file: - - common.yml - - docker-image-base.yml - - danger-review.yml - - changelog.yml - - auto-release.yml + - gitlab-ci/common_gitlab-ci.yml + - gitlab-ci/docker-image-base_gitlab-ci.yml + - gitlab-ci/danger-review_gitlab-ci.yml + - gitlab-ci/changelog_gitlab-ci.yml + - gitlab-ci/auto-release_gitlab-ci.yml - local: '.gitlab/ci/rules.yml' - local: '.gitlab/ci/docker-image-builds.yml' - local: '.gitlab/ci/trigger-image-builds.yml' diff --git a/.gitlab/ci/lava-test.yml b/.gitlab/ci/lava-test.yml index 144fc73..ca6903d 100644 --- a/.gitlab/ci/lava-test.yml +++ b/.gitlab/ci/lava-test.yml @@ -67,12 +67,16 @@ - if: '$LAVA_URL != ""' .ptest-tests: + extends: .submit-cassini-lava-job variables: LAVA_JOB: ptest.yml .ptest-tests-results: extends: .complete-lava-job after_script: + - python3 .gitlab/scripts/lava-to-junit.py + lava-result.yml + "TEST-${CI_JOB_NAME}-test-report.xml" - | jfrog config add artifactory-aws --interactive=false \ --artifactory-url=$ARTIFACTORY_AWS_URL --user=$ARTIFACTORY_USER \ @@ -101,6 +105,7 @@ paths: - ptest-runner-results/${LAUNCH_CI_JOB_ID}/** - TEST-*-test-report.xml + - lava-result.yml expire_in: 1 day reports: junit: diff --git a/.gitlab/ci/templates/image_build.yml.j2 b/.gitlab/ci/templates/image_build.yml.j2 index de56263..da2f887 100644 --- a/.gitlab/ci/templates/image_build.yml.j2 +++ b/.gitlab/ci/templates/image_build.yml.j2 @@ -55,25 +55,24 @@ Lava-Test-Prep-{{ CI_JOB_NAME_SLUG }}: variables: MACHINE: "{{ MACHINE }}" -{% if 'all' in RUN_TESTS or 'ptest' in RUN_TESTS.split(',') %} +{% set can_run = ('all' in RUN_TESTS or 'ptest' in RUN_TESTS.split(',')) %} +{% set force = ('all' in FORCE_TESTS or 'ptest' in FORCE_TESTS.split(',')) %} +{% set should_run = true %} +{% if can_run and (should_run or force) %} PTest-{{ CI_JOB_NAME_SLUG }}: - extends: - - .submit-cassini-lava-job - - .ptest-tests + extends: .ptest-tests needs: - Lava-Test-Prep-{{ CI_JOB_NAME_SLUG }} variables: MACHINE: "{{ MACHINE }}" PTest-{{ CI_JOB_NAME_SLUG }}-Complete: - extends: - - .ptest-tests-results + extends: .ptest-tests-results needs: - PTest-{{ CI_JOB_NAME_SLUG }} PTest-{{ CI_JOB_NAME_SLUG }}-Cancel: - extends: - - .cancel-lava-job + extends: .cancel-lava-job needs: - PTest-{{ CI_JOB_NAME_SLUG }} {% endif %} diff --git a/.gitlab/ci/trigger-image-builds.yml b/.gitlab/ci/trigger-image-builds.yml index e885941..9e828f0 100644 --- a/.gitlab/ci/trigger-image-builds.yml +++ b/.gitlab/ci/trigger-image-builds.yml @@ -33,10 +33,10 @@ merge_jobs: cat << EOF > ${CI_JOB_FILE} include: - project: $PIPELINE_TEMPLATE_PROJECT - ref: $PIPELINE_TEMPLATE_VERSION + ref: v0.0.4 file: - - common.yml - - lava-test-base.yml + - gitlab-ci/common_gitlab-ci.yml + - gitlab-ci/lava-test-base_gitlab-ci.yml - local: .gitlab/ci/yocto-build.yml - local: .gitlab/ci/lava-test.yml diff --git a/.gitlab/scripts/lava-to-junit.py b/.gitlab/scripts/lava-to-junit.py new file mode 100755 index 0000000..803c4c3 --- /dev/null +++ b/.gitlab/scripts/lava-to-junit.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +# Copyright (c) 2022 Arm Limited or its affiliates. All rights reserved. +# +# SPDX-License-Identifier: MIT + +# Converts lava results files to junit format + +import argparse +import yaml + +from junit_xml import TestSuite, TestCase + + +def main(): + parser = argparse.ArgumentParser( + description='Converts lava test result yaml file into junit format') + + parser.add_argument("log_file", help="lava yaml test result file") + parser.add_argument("output_file", help="Output Junit log file") + parser.add_argument( + "-n", "--name", + action="store", + default="Lava test suite", + help="Test Suite Name to be used in the ouput file") + + args = parser.parse_args() + + parseData(args.log_file, args.output_file, args.name) + + +def parseData(log_file_path, export_file, name): + test_cases = [] + + with open(log_file_path, "r") as file: + data = yaml.safe_load(file) + + for test in data: + test_cases.append(addResult(test)) + + ts = TestSuite(name, test_cases) + + with open(export_file, 'w') as f: + TestSuite.to_file(f, [ts]) + + +def addResult(test_case): + md = test_case['metadata'] + if md is not None: + duration = md.get("duration") + if duration is not None: + duration = float(duration) + + test_output = md.get('extra', None) + + if test_output is not None: + test_output = yaml.dump(test_output) + + test_name = test_case['name'] + + test_extra = md.get('extra', []) + for extra in test_extra: + if 'label' in extra and test_name == "http-download": + test_name = extra['label'] + + if test_case['level'] != '': + test_name += "." + test_case['level'] + else: + if 'uuid' in md: + test_name += "." + md['uuid'].split('_', 2)[1] + + tc = TestCase(test_name, + elapsed_sec=duration, + classname=test_case['name'], + timestamp=test_case['logged'], + stdout=test_output) + + test_result = test_case['result'] + if test_result == 'fail': + tc.add_failure_info(test_result) + elif test_result == 'skipped': + tc.add_skipped_info("skipped") + + return tc + + +if __name__ == '__main__': + main() -- GitLab