diff --git a/.gitlab/ci/collate-results.yml b/.gitlab/ci/collate-results.yml index 90709b6173c23e03f06a462f04cd2c0ad6dca702..6ded770e625bf9c90b1347e381989a6382d3874a 100644 --- a/.gitlab/ci/collate-results.yml +++ b/.gitlab/ci/collate-results.yml @@ -9,18 +9,15 @@ Collate-Results: dependencies: - merge_jobs - trigger_jobs - before_script: [] + before_script: + - pip install requests envparse junit_xml script: - - mkdir --parents jobs - - pip install requests envparse - - python .gitlab/scripts/child-job-downloader.py - # cat any coverage info here - coverage: '/Statements\s+:\s(\d+.?\d+)%/' + - python .gitlab/scripts/child-job-test-report.py artifacts: paths: - - jobs/**/*.xml - expire_in: 1 day + - TEST-ChildPipeline-*.xml reports: junit: - - ./jobs/**/TEST-*-test-report.xml + - TEST-ChildPipeline-*.xml + expire_in: 1 month when: always diff --git a/.gitlab/ci/trigger-image-builds.yml b/.gitlab/ci/trigger-image-builds.yml index a963ea2cb80948b5472bb3ba80bbc998280d4c8b..f3914b30446313602d01b86848256bd91296f63f 100644 --- a/.gitlab/ci/trigger-image-builds.yml +++ b/.gitlab/ci/trigger-image-builds.yml @@ -48,16 +48,7 @@ merge_jobs: done fi # Add global variables to env file to export in child pipelines - - IFS=$'\n' read -r -d '' - -a global_vars < <(yq e '.variables[] | key' - .gitlab-ci.yml && printf '\0') - - | - for var in "${global_vars[@]}"; do - echo "${var}=${!var}" >> global_variables.env - done - - echo "CREATE_JOB_ID=${CI_JOB_ID}" >> global_variables.env - - echo "CI_PIPELINE_ID=${CI_PIPELINE_ID}" >> global_variables.env - - echo CI_PIPELINE_ID = $CI_PIPELINE_ID + - echo "CREATE_JOB_ID=${CI_JOB_ID}" > global_variables.env - cat global_variables.env - if [ "${DEBUG_PIPELINE}" == "true" ]; then cat ${CI_JOB_FILE}; @@ -76,6 +67,9 @@ trigger_jobs: - artifact: yocto-builds.yml job: merge_jobs strategy: depend + forward: + pipeline_variables: true + yaml_variables: true variables: PARENT_PIPELINE_ID: $CI_PIPELINE_ID CASSINI_VERSION: "vNext" diff --git a/.gitlab/ci/yocto-build.yml b/.gitlab/ci/yocto-build.yml index a3459a50ffdf4bc44bd440c4721188f73507617a..920e37f67626c8dd005865b20d15a3edb3fd1a7e 100644 --- a/.gitlab/ci/yocto-build.yml +++ b/.gitlab/ci/yocto-build.yml @@ -87,8 +87,9 @@ Prune-Cache: # Can fail if build job are running in other pipelines allow_failure: true rules: - - if: '$KAS_CONFIGS == null && - $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "nightly"' + - if: '$KAS_CONFIGS == null && $FREQUENCY == "nightly"' + - if: '$KAS_CONFIGS == null && $FREQUENCY == "adhoc"' + when: manual script: - du -sh $SSTATE_DIR - find $SSTATE_DIR -type f -atime +5 -delete diff --git a/.gitlab/scripts/child-job-downloader.py b/.gitlab/scripts/child-job-downloader.py deleted file mode 100644 index 1fc1586edaa7556224c129c2b5eba5cf5ae11787..0000000000000000000000000000000000000000 --- a/.gitlab/scripts/child-job-downloader.py +++ /dev/null @@ -1,156 +0,0 @@ -# Based on: https://gitlab.com/gitlab-gold/tpoffenbarger/intermingle/ -# dynamic-pipelines/-/blob/master/downloader.py -# In open-source project: https://gitlab.com/gitlab-gold/tpoffenbarger/ -# intermingle/dynamic-pipelines -# -# Original file: Copyright (c) 2022 Tim Poffenbarger -# Modifications: Copyright (c) 2022 Arm Limited and Contributors. -# All rights reserved. -# -# SPDX-License-Identifier: MIT - -# Converts ptest results to junit format - -import json -import sys - -import requests -from envparse import Env, ConfigurationError -import os -from zipfile import ZipFile, BadZipFile - - -env = Env() - - -if not os.path.isfile('global_variables.env'): - print( - 'You must save a global_variables.env file as an upstream artifact ' - 'containing the Child Pipeline Creator\'s Job ID as: CREATE_JOB_ID' - ) - # exit(1) -else: - env.read_envfile('global_variables.env') - -KEY_DESCRIPTIONS = { - 'CREATE_JOB_ID': ( - "The CREATE_JOB_ID key is missing and is used to identify " - "any job that ran after the job that triggered the job artifacts. " - "Please create a .env by running `'echo CREATE_JOB_ID=${CREATE_JOB_ID}" - " > .env'` in the script section of a job that precedes trigger: job " - "for your child pipelines. The `.env` file will need to be saved as " - "an artifact. " - - ), - 'CI_BOT_API_TOKEN': ( - "Please save your Personal Access Token as a CI_BOT_API_TOKEN variable" - " in your CI/CD settings for the project, as this is needed to use the" - " API to find child pipeline jobs and download artifacts." - ) -} - - -def get_response(url, headers): - response = requests.get(url, headers=headers) - if not response.ok: - generic_message = 'Invalid response from GitLab' - try: - generic_message = response.json().get('message', generic_message) - except json.decoder.JSONDecodeError: - pass - sys.stderr.write(generic_message + '\n') - exit(1) - return response.json() - - -def download_file(dl_url, headers, job_id): - sys.stdout.write('Downloading artifacts for child job at:' - + str(dl_url) + '\n') - sys.stdout.flush() - downloaded = requests.get( - dl_url, - headers=headers, - allow_redirects=True - ) - if not downloaded.ok: - sys.stderr.write("Failed to download a file. Cancelling rest of job." - + '\n') - exit(1) - - path_to_zip = os.path.join('jobs', f'{job_id}.zip') - with open(path_to_zip, 'wb') as f: - f.write(downloaded.content) - try: - with ZipFile(path_to_zip, 'r') as zip_ref: - zip_ref.extractall('jobs') - os.remove(path_to_zip) - except BadZipFile: - sys.stderr.write('Cannot find file at:' + str(path_to_zip) + '\n') - sys.stderr.flush() - - -def main(): - """ - download artifacts and place them in the jobs directory - """ - v4_origin = env('CI_API_V4_URL') - project_id = env("CI_PROJECT_ID") - commit_sha = env("CI_COMMIT_SHA") - headers = { - 'PRIVATE-TOKEN': env('CI_BOT_API_TOKEN') - } - - page = 1 - page_url = f'{v4_origin}/projects/{project_id}/jobs/?page={page}' - parent_pipeline_id = env('PARENT_PIPELINE_ID', default=None) - if parent_pipeline_id: - page_url = ( - f'{v4_origin}/projects/{project_id}/pipelines/{parent_pipeline_id}' - f'/jobs/?page={page}' - ) - json_response = get_response(page_url, headers) - - while json_response: - - create_job_id = env('CREATE_JOB_ID', cast=int, default=-1) - - for job in json_response: - job_with_artifacts = [ - x for x in job.get('artifacts', []) - if x.get('file_type', '') == 'archive' - ] - - if not job_with_artifacts: - continue - if job['commit']['id'] != commit_sha: - continue - if not parent_pipeline_id and int(job['id']) <= create_job_id: - return - - dl_url = ( - f'{v4_origin}/projects/{project_id}/jobs/{job["id"]}' - f'/artifacts/' - ) - download_file(dl_url, headers, job_id=job["id"]) - - page += 1 - page_url = f'{v4_origin}/projects/{project_id}/jobs/?page={page}' - if parent_pipeline_id: - page_url = ( - f'{v4_origin}/projects/{project_id}/pipelines/' - f'{parent_pipeline_id}/jobs/?page={page}' - ) - json_response = get_response(page_url, headers) - - -if __name__ == '__main__': - try: - main() - except ConfigurationError as e: - try: - sys.stderr.write( - KEY_DESCRIPTIONS[e.args[0].split("'")[1:-1][0]] + '\n' - ) - except (IndexError, KeyError): - sys.stderr.write(str(e) + '\n') - exit(5) diff --git a/.gitlab/scripts/child-job-test-report.py b/.gitlab/scripts/child-job-test-report.py new file mode 100644 index 0000000000000000000000000000000000000000..01b390ffa90919aa62e550160fbacb0712ca5fba --- /dev/null +++ b/.gitlab/scripts/child-job-test-report.py @@ -0,0 +1,94 @@ +# Copyright (c) 2022 Arm Limited or its affiliates. All rights reserved. +# +# SPDX-License-Identifier: MIT + +# Generates JUnit report for all child jobs + +import json +import sys +import requests + +from envparse import Env +from junit_xml import TestSuite, TestCase + + +def get_response(url, headers): + """ + Call the GitLab API and return the json formated result + """ + response = requests.get(url, headers=headers) + if not response.ok: + try: + generic_message = response.json().get('message', generic_message) + except json.decoder.JSONDecodeError: + generic_message = 'Invalid response from GitLab' + sys.stderr.write(generic_message + '\n') + exit(1) + return response.json() + + +def get_test_results(headers, project_url, pipeline_id): + """ + fetch a pipelines test report + """ + report_url = f'{project_url}/pipelines/{pipeline_id}/test_report' + + return get_response(f'{report_url}', headers) + + +def generate_junit_file(test_results, filename): + """ + Generate a Junit file from pipeline test results + """ + tss = [] + for test_suite in test_results['test_suites']: + tcs = [] + for test_case in test_suite['test_cases']: + tc = TestCase(test_case['name'], + elapsed_sec=test_case['execution_time'], + classname=test_suite['name'] + + "." + test_case['classname'], + stdout=test_case['system_output']) + + if test_case['status'] == 'error': + tc.add_failure_info("error") + elif test_case['status'] == 'skipped': + tc.add_skipped_info("skipped") + + tcs.append(tc) + + tss.append(TestSuite(test_suite['name'], tcs)) + + with open(filename, 'w') as f: + TestSuite.to_file(f, tss) + + +def main(): + """ + download artifacts and place them in the jobs directory + """ + env = Env() + + v4_origin = env('CI_API_V4_URL') + project_id = env("CI_PROJECT_ID") + pipeline_id = env('CI_PIPELINE_ID') + headers = { + 'PRIVATE-TOKEN': env('CI_BOT_API_TOKEN') + } + + project_url = f'{v4_origin}/projects/{project_id}' + page_url = f'{project_url}/pipelines/{pipeline_id}/bridges/' + json_response = get_response(f'{page_url}', headers) + + for bridge in json_response: + if bridge['downstream_pipeline']: + test_results = get_test_results(headers, + project_url, + bridge['downstream_pipeline']['id']) + + generate_junit_file(test_results, "TEST-ChildPipeline-" + + str(bridge['downstream_pipeline']['iid']) + ".xml") + + +if __name__ == '__main__': + main() diff --git a/.gitlab/scripts/lava-to-junit.py b/.gitlab/scripts/lava-to-junit.py index 803c4c3fbef2a3e543ad932e7129fc67fa258281..06080a206a30f1583fde5b23155486767bf24be6 100755 --- a/.gitlab/scripts/lava-to-junit.py +++ b/.gitlab/scripts/lava-to-junit.py @@ -20,7 +20,7 @@ def main(): parser.add_argument( "-n", "--name", action="store", - default="Lava test suite", + default="Lava test", help="Test Suite Name to be used in the ouput file") args = parser.parse_args() @@ -35,7 +35,7 @@ def parseData(log_file_path, export_file, name): data = yaml.safe_load(file) for test in data: - test_cases.append(addResult(test)) + test_cases.append(addResult(name, test)) ts = TestSuite(name, test_cases) @@ -43,7 +43,7 @@ def parseData(log_file_path, export_file, name): TestSuite.to_file(f, [ts]) -def addResult(test_case): +def addResult(name, test_case): md = test_case['metadata'] if md is not None: duration = md.get("duration") @@ -70,7 +70,7 @@ def addResult(test_case): tc = TestCase(test_name, elapsed_sec=duration, - classname=test_case['name'], + classname=name, timestamp=test_case['logged'], stdout=test_output)