From da3a953b0bc4724c9514db5b74805b5abf2189fe Mon Sep 17 00:00:00 2001 From: "John E. Malmberg" Date: Tue, 18 Feb 2025 15:15:27 -0600 Subject: [PATCH] SRE-2832 Build: Update for larger Jenkinsfiles Update Jenkinsfiles to support SCRIPT_SPLITTING_TRANSFORMATION per https://issues.jenkins.io/browse/JENKINS-56500 utils/githooks/README.md: Fix markuplint flagged issues. utils/scripts/helpers/install_maldet.ch : Fix defaults to be more implementation neutral. utils/scripts/helpers/repo-helper-*.sh : Optionally install CA needed to validate local repository mirrors. Dockerfiles modified to support intalling the CA. Dockerfile.code_scanning modified to allow specifying the fedora version used. Signed-off-by: John E. Malmberg --- .dockerignore | 1 + Jenkinsfile | 202 +++++------------- ci/docker/Dockerfile.maldet.el.8 | 2 + ci/docker/Dockerfile.maldet.leap.15 | 2 + ci/rpm/build.sh | 4 +- utils/docker/Dockerfile.code_scanning | 27 ++- utils/docker/Dockerfile.el.8 | 4 +- utils/docker/Dockerfile.el.9 | 4 +- utils/docker/Dockerfile.leap.15 | 4 +- utils/docker/Dockerfile.ubuntu | 21 +- utils/githooks/README.md | 12 +- utils/rpms/packaging/Dockerfile.coverity | 2 +- utils/rpms/packaging/Dockerfile.mockbuild | 21 +- utils/rpms/packaging/Dockerfile.ubuntu.20.04 | 44 ++-- .../rpms/packaging/Dockerfile.ubuntu.rolling | 2 +- utils/rpms/packaging/rpm_chrootbuild | 17 +- utils/scripts/helpers/install_maldet.sh | 6 +- utils/scripts/helpers/repo-helper-el8.sh | 17 +- utils/scripts/helpers/repo-helper-el9.sh | 17 +- utils/scripts/helpers/repo-helper-fedora.sh | 78 +++++++ utils/scripts/helpers/repo-helper-leap15.sh | 43 +++- utils/scripts/helpers/repo-helper-ubuntu.sh | 87 ++++++++ 22 files changed, 387 insertions(+), 230 deletions(-) create mode 100644 utils/scripts/helpers/repo-helper-fedora.sh create mode 100644 utils/scripts/helpers/repo-helper-ubuntu.sh diff --git a/.dockerignore b/.dockerignore index 201d9c7e482..4ddeee36322 100644 --- a/.dockerignore +++ b/.dockerignore @@ -15,6 +15,7 @@ !utils/setup_daos_server_helper.sh !utils/sl/setup_local.sh !utils/scripts +!utils/rpms !utils/run_utest.py !utils/utest.yaml !utils/unit diff --git a/Jenkinsfile b/Jenkinsfile index 8f7cf82739f..f2bb8c36c3a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -17,6 +17,7 @@ // To use a test branch (i.e. PR) until it lands to master // I.e. for testing library changes //@Library(value='pipeline-lib@your_branch') _ +@Library(value='pipeline-lib@malmberg/sre-2832') _ /* groovylint-disable-next-line CompileStatic */ job_status_internal = [:] @@ -69,11 +70,15 @@ Map nlt_test() { } // For master, this is just some wildly high number -String next_version = '1000' +String next_version() { + return '1000' +} // Don't define this as a type or it loses it's global scope target_branch = env.CHANGE_TARGET ? env.CHANGE_TARGET : env.BRANCH_NAME -String sanitized_JOB_NAME = JOB_NAME.toLowerCase().replaceAll('/', '-').replaceAll('%2f', '-') +String sanitized_JOB_NAME() { + return JOB_NAME.toLowerCase().replaceAll('/', '-').replaceAll('%2f', '-') +} // bail out of branch builds that are not on a whitelist if (!env.CHANGE_ID && @@ -154,7 +159,6 @@ pipeline { agent { label 'lightweight' } environment { - BULLSEYE = credentials('bullseye_license_key') GITHUB_USER = credentials('daos-jenkins-review-posting') SSH_KEY_ARGS = '-ici_key' CLUSH_ARGS = "-o$SSH_KEY_ARGS" @@ -404,10 +408,11 @@ pipeline { } agent { dockerfile { - filename 'Dockerfile.code_scanning' - dir 'utils/docker' + filename 'utils/docker/Dockerfile.code_scanning' label 'docker_runner' - additionalBuildArgs dockerBuildArgs(add_repos: false) + additionalBuildArgs dockerBuildArgs(add_repos: false) + + ' --build-arg FVERSION=37' + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" } } steps { @@ -443,14 +448,13 @@ pipeline { } agent { dockerfile { - filename 'packaging/Dockerfile.mockbuild' - dir 'utils/rpms' + filename 'utils/rpms/packaging/Dockerfile.mockbuild' label 'docker_runner' args '--group-add mock' + ' --cap-add=SYS_ADMIN' + ' -v /scratch:/scratch' - additionalBuildArgs dockerBuildArgs() - + additionalBuildArgs dockerBuildArgs() + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" } } steps { @@ -483,13 +487,13 @@ pipeline { } agent { dockerfile { - filename 'packaging/Dockerfile.mockbuild' - dir 'utils/rpms' + filename 'utils/rpms/packaging/Dockerfile.mockbuild' label 'docker_runner' args '--group-add mock' + ' --cap-add=SYS_ADMIN' + ' -v /scratch:/scratch' - additionalBuildArgs dockerBuildArgs() + additionalBuildArgs dockerBuildArgs() + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" } } steps { @@ -522,13 +526,13 @@ pipeline { } agent { dockerfile { - filename 'packaging/Dockerfile.mockbuild' - dir 'utils/rpms' + filename 'utils/rpms/packaging/Dockerfile.mockbuild' label 'docker_runner' args '--group-add mock' + ' --cap-add=SYS_ADMIN' + ' -v /scratch:/scratch' - additionalBuildArgs dockerBuildArgs() + additionalBuildArgs dockerBuildArgs() + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" } } steps { @@ -561,11 +565,11 @@ pipeline { } agent { dockerfile { - filename 'packaging/Dockerfile.ubuntu.20.04' - dir 'utils/rpms' + filename 'utils/rpms/packaging/Dockerfile.ubuntu.20.04' label 'docker_runner' args '--cap-add=SYS_ADMIN' - additionalBuildArgs dockerBuildArgs() + additionalBuildArgs dockerBuildArgs() + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" } } steps { @@ -602,8 +606,9 @@ pipeline { additionalBuildArgs dockerBuildArgs(repo_type: 'stable', deps_build: true, parallel_build: true) + - " -t ${sanitized_JOB_NAME}-el8 " + - ' --build-arg REPOS="' + prRepos() + '"' + " -t ${sanitized_JOB_NAME()}-el8 " + + ' --build-arg REPOS="' + prRepos() + '"' + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" } } steps { @@ -628,46 +633,6 @@ pipeline { } } } - stage('Build on EL 8 Bullseye') { - when { - beforeAgent true - expression { !skipStage() } - } - agent { - dockerfile { - filename 'utils/docker/Dockerfile.el.8' - label 'docker_runner' - additionalBuildArgs dockerBuildArgs(repo_type: 'stable', - deps_build: true, - parallel_build: true) + - " -t ${sanitized_JOB_NAME}-el8 " + - ' --build-arg BULLSEYE=' + env.BULLSEYE + - ' --build-arg REPOS="' + prRepos() + '"' - } - } - steps { - job_step_update( - sconsBuild(parallel_build: true, - stash_files: 'ci/test_files_to_stash.txt', - build_deps: 'yes', - stash_opt: true, - scons_args: sconsFaultsArgs() + - ' PREFIX=/opt/daos TARGET_TYPE=release')) - } - post { - unsuccessful { - sh label: 'Save failed Bullseye logs', - script: '''if [ -f config.log ]; then - mv config.log config.log-el8-covc - fi''' - archiveArtifacts artifacts: 'config.log-el8-covc', - allowEmptyArchive: true - } - cleanup { - job_status_update() - } - } - } stage('Build on Leap 15.5 with Intel-C and TARGET_PREFIX') { when { beforeAgent true @@ -680,8 +645,9 @@ pipeline { additionalBuildArgs dockerBuildArgs(repo_type: 'stable', parallel_build: true, deps_build: true) + - " -t ${sanitized_JOB_NAME}-leap15" + - ' --build-arg COMPILER=icc' + " -t ${sanitized_JOB_NAME()}-leap15" + + ' --build-arg COMPILER=icc' + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" } } steps { @@ -795,34 +761,6 @@ pipeline { } } } - stage('Unit Test Bullseye on EL 8.8') { - when { - beforeAgent true - expression { !skipStage() } - } - agent { - label cachedCommitPragma(pragma: 'VM1-label', def_val: params.CI_UNIT_VM1_LABEL) - } - steps { - job_step_update( - unitTest(timeout_time: 60, - unstash_opt: true, - ignore_failure: true, - inst_repos: prRepos(), - inst_rpms: unitPackages())) - } - post { - always { - // This is only set while dealing with issues - // caused by code coverage instrumentation affecting - // test results, and while code coverage is being - // added. - unitTestPost ignore_failure: true, - artifacts: ['covc_test_logs/', 'covc_vm_test/**'] - job_status_update() - } - } - } // stage('Unit test Bullseye on EL 8.8') stage('Unit Test with memcheck on EL 8.8') { when { beforeAgent true @@ -893,7 +831,7 @@ pipeline { job_step_update( functionalTest( inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version, 'tests-internal'), + inst_rpms: functionalPackages(1, next_version(), 'tests-internal'), test_function: 'runTestFunctionalV2')) } post { @@ -915,7 +853,7 @@ pipeline { job_step_update( functionalTest( inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version, 'tests-internal'), + inst_rpms: functionalPackages(1, next_version(), 'tests-internal'), test_function: 'runTestFunctionalV2')) } post { @@ -937,7 +875,7 @@ pipeline { job_step_update( functionalTest( inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version, 'tests-internal'), + inst_rpms: functionalPackages(1, next_version(), 'tests-internal'), test_function: 'runTestFunctionalV2')) } post { @@ -959,7 +897,7 @@ pipeline { job_step_update( functionalTest( inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version, 'tests-internal'), + inst_rpms: functionalPackages(1, next_version(), 'tests-internal'), test_function: 'runTestFunctionalV2', image_version: 'leap15.6')) } @@ -982,7 +920,7 @@ pipeline { job_step_update( functionalTest( inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version, 'tests-internal'), + inst_rpms: functionalPackages(1, next_version(), 'tests-internal'), test_function: 'runTestFunctionalV2')) } post { @@ -1003,7 +941,8 @@ pipeline { label 'docker_runner' additionalBuildArgs dockerBuildArgs(repo_type: 'stable', parallel_build: true, - deps_build: true) + deps_build: true) + + " --build-arg DAOS_LAB_CA_FILE_URL=${DAOS_LAB_CA_FILE_URL}" args '--tmpfs /mnt/daos_0' } } @@ -1056,7 +995,7 @@ pipeline { steps { job_step_update( testRpm(inst_repos: daosRepos(), - daos_pkg_version: daosPackagesVersion(next_version)) + daos_pkg_version: daosPackagesVersion(next_version())) ) } post { @@ -1083,7 +1022,7 @@ pipeline { name: 'Test RPMs on Leap 15.5', pragma_suffix: '', label: params.CI_UNIT_VM1_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: '', default_tags: 'test_daos_management', nvme: 'auto', @@ -1097,7 +1036,7 @@ pipeline { test_tag: 'test_daos_management', ftest_arg: '--yaml_extension single_host', inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version, 'tests-internal'), + inst_rpms: functionalPackages(1, next_version(), 'tests-internal'), test_function: 'runTestFunctionalV2')) } post { @@ -1108,7 +1047,7 @@ pipeline { } */ job_step_update( testRpm(inst_repos: daosRepos(), - daos_pkg_version: daosPackagesVersion(next_version)) + daos_pkg_version: daosPackagesVersion(next_version())) ) } post { @@ -1131,7 +1070,7 @@ pipeline { job_step_update( storagePrepTest( inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version, 'tests-internal'))) + inst_rpms: functionalPackages(1, next_version(), 'tests-internal'))) } post { cleanup { @@ -1152,7 +1091,7 @@ pipeline { pragma_suffix: '-hw-medium', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_MEDIUM_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw,medium,-provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', nvme: 'auto', @@ -1165,7 +1104,7 @@ pipeline { pragma_suffix: '-hw-medium-md-on-ssd', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_MEDIUM_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw,medium,-provider', default_tags: startedByTimer() ? 'pr,md_on_ssd daily_regression,md_on_ssd' : 'pr,md_on_ssd', @@ -1179,7 +1118,7 @@ pipeline { pragma_suffix: '-hw-medium-vmd', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_MEDIUM_VMD_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw_vmd,medium', /* groovylint-disable-next-line UnnecessaryGetter */ default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', @@ -1193,7 +1132,7 @@ pipeline { pragma_suffix: '-hw-medium-verbs-provider', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_MEDIUM_VERBS_PROVIDER_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw,medium,provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto', @@ -1207,7 +1146,7 @@ pipeline { pragma_suffix: '-hw-medium-verbs-provider-md-on-ssd', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_MEDIUM_VERBS_PROVIDER_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw,medium,provider', default_tags: startedByTimer() ? 'pr,md_on_ssd daily_regression,md_on_ssd' : 'pr,md_on_ssd', @@ -1222,7 +1161,7 @@ pipeline { pragma_suffix: '-hw-medium-ucx-provider', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_MEDIUM_UCX_PROVIDER_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw,medium,provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto', @@ -1236,7 +1175,7 @@ pipeline { pragma_suffix: '-hw-large', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_LARGE_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw,large', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto', @@ -1249,7 +1188,7 @@ pipeline { pragma_suffix: '-hw-large-md-on-ssd', base_branch: 'master', label: params.FUNCTIONAL_HARDWARE_LARGE_LABEL, - next_version: next_version, + next_version: next_version(), stage_tags: 'hw,large', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto_md_on_ssd', @@ -1261,45 +1200,10 @@ pipeline { } } } // stage('Test Hardware') - stage('Test Report') { - parallel { - stage('Bullseye Report on EL 8') { - when { - beforeAgent true - expression { !skipStage() } - } - agent { - dockerfile { - filename 'utils/docker/Dockerfile.el.8' - label 'docker_runner' - additionalBuildArgs dockerBuildArgs(repo_type: 'stable') + - " -t ${sanitized_JOB_NAME}-el8 " + - ' --build-arg BULLSEYE=' + env.BULLSEYE + - ' --build-arg REPOS="' + prRepos() + '"' - } - } - steps { - // The coverage_healthy is primarily set here - // while the code coverage feature is being implemented. - job_step_update( - cloverReportPublish( - coverage_stashes: ['el8-covc-unit-cov', - 'func-vm-cov', - 'func-hw-medium-cov', - 'func-hw-large-cov'], - coverage_healthy: [methodCoverage: 0, - conditionalCoverage: 0, - statementCoverage: 0], - ignore_failure: true)) - } - post { - cleanup { - job_status_update() - } - } - } // stage('Bullseye Report on EL 8') - } // parallel - } // stage ('Test Report') + // stage('Test Report') { + // parallel { + // } // parallel + //} // stage ('Test Report') } // stages post { always { diff --git a/ci/docker/Dockerfile.maldet.el.8 b/ci/docker/Dockerfile.maldet.el.8 index 8575f549515..dd333d49aa0 100755 --- a/ci/docker/Dockerfile.maldet.el.8 +++ b/ci/docker/Dockerfile.maldet.el.8 @@ -1,4 +1,5 @@ # Copyright 2018-2022 Intel Corporation +# Copyright 2025 Hewlett Packard Enterprise Development LP # All rights reserved. # # 'recipe' for Docker to build an environment scan DAOS for malware. @@ -17,6 +18,7 @@ ARG CB0 ARG REPO_FILE_URL ARG JENKINS_URL ARG REPOS +ARG DAOS_LAB_CA_FILE_URL # script to translate lookup distro names to commonly used names COPY ./utils/scripts/helpers/distro_info.sh /tmp/distro_info.sh # script to install OS updates basic tools and maldet diff --git a/ci/docker/Dockerfile.maldet.leap.15 b/ci/docker/Dockerfile.maldet.leap.15 index dae53a61ad6..a083c2e4fcd 100755 --- a/ci/docker/Dockerfile.maldet.leap.15 +++ b/ci/docker/Dockerfile.maldet.leap.15 @@ -1,4 +1,5 @@ # Copyright 2018-2022 Intel Corporation +# Copyright 2025 Hewlett Packard Enterprise Development LP # All rights reserved. # # 'recipe' for Docker to build an environment scan DAOS for malware. @@ -17,6 +18,7 @@ ARG CB0 ARG REPO_FILE_URL ARG JENKINS_URL ARG REPOS +ARG DAOS_LAB_CA_FILE_URL # script to translate lookup distro names to commonly used names COPY ./utils/scripts/helpers/distro_info.sh /tmp/distro_info.sh # script to install OS updates basic tools and maldet diff --git a/ci/rpm/build.sh b/ci/rpm/build.sh index 1aa300f1554..a21e4b37d5d 100755 --- a/ci/rpm/build.sh +++ b/ci/rpm/build.sh @@ -21,7 +21,7 @@ fi mydir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" ci_envs="$mydir/../parse_ci_envs.sh" if [ -e "${ci_envs}" ]; then - # shellcheck source=parse_ci_envs.sh + # shellcheck source=parse_ci_envs.sh disable=SC1091 source "${ci_envs}" fi @@ -52,6 +52,8 @@ if ! mkdir -p "artifacts/${TARGET}/"; then exit 1 fi +export + # shellcheck disable=SC2086 DEBEMAIL="$DAOS_EMAIL" DEBFULLNAME="$DAOS_FULLNAME" \ TOPDIR=$PWD make CHROOT_NAME="${CHROOT_NAME}" ${JOB_REPOS} \ diff --git a/utils/docker/Dockerfile.code_scanning b/utils/docker/Dockerfile.code_scanning index c2339fedb94..a46acf6df55 100644 --- a/utils/docker/Dockerfile.code_scanning +++ b/utils/docker/Dockerfile.code_scanning @@ -4,15 +4,34 @@ # 'recipe' for Docker for code scanning. # -# Pull base image -FROM fedora:latest +ARG FVERSION=latest +FROM fedora:$FVERSION +# Needed for later use of FVERSION +ARG FVERSION LABEL maintainer="daos@daos.groups.io" # Intermittent cache-bust. Used to reduce load on the actual CACHEBUST later. ARG CB0 +# Use local repo server if present +ARG REPO_FILE_URL +ARG DAOS_LAB_CA_FILE_URL +# script to install OS updates basic tools and daos dependencies +# COPY ./utils/scripts/install-fedora.sh /tmp/install.sh +# script to setup local repo if available +COPY ./utils/scripts/helpers/repo-helper-fedora.sh /tmp/repo-helper.sh + +RUN chmod +x /tmp/repo-helper.sh && \ + /tmp/repo-helper.sh && \ + rm -f /tmp/repo-helper.sh + # Install Python Bandit scanner. -RUN dnf -y upgrade && dnf -y install bandit && dnf clean all +# The unset commands are currently needed for the combination of running +# with a local repository, yet needing a proxy to reach outside repositories. +# This needs to be moved to a shell script like above in the future to +# properly only remove the proxy variables only when they need to be removed +RUN unset HTTPS_PROXY && unset https_proxy && \ + dnf -y upgrade && dnf -y install bandit && dnf clean all ARG CB1 -RUN dnf -y upgrade && dnf clean all +RUN unset HTTPS_PROXY && unset https_proxy && dnf -y upgrade && dnf clean all diff --git a/utils/docker/Dockerfile.el.8 b/utils/docker/Dockerfile.el.8 index 9e511941a89..46fc2755311 100644 --- a/utils/docker/Dockerfile.el.8 +++ b/utils/docker/Dockerfile.el.8 @@ -1,4 +1,5 @@ # Copyright 2018-2024 Intel Corporation +# Copyright 2025 Hewlett Packard Enterprise Development LP # All rights reserved. # # 'recipe' for Docker to build an image of EL 8 based @@ -8,7 +9,7 @@ # Pull base image ARG POINT_RELEASE= ARG BASE_DISTRO=rockylinux/rockylinux:8$POINT_RELEASE -FROM $BASE_DISTRO as basic +FROM $BASE_DISTRO AS basic LABEL maintainer="daos@daos.groups.io" # Needed for later use of BASE_DISTRO ARG BASE_DISTRO @@ -19,6 +20,7 @@ ARG CB0 ARG REPO_FILE_URL ARG JENKINS_URL ARG REPOS +ARG DAOS_LAB_CA_FILE_URL # script to install OS updates basic tools and daos dependencies COPY ./utils/scripts/install-el8.sh /tmp/install.sh # script to setup local repo if available diff --git a/utils/docker/Dockerfile.el.9 b/utils/docker/Dockerfile.el.9 index afd7594a360..99bca7a9957 100644 --- a/utils/docker/Dockerfile.el.9 +++ b/utils/docker/Dockerfile.el.9 @@ -1,4 +1,5 @@ # Copyright 2022-2024 Intel Corporation +# Copyright 2025 Hewlett Packard Enterprise Development LP # All rights reserved. # # 'recipe' for Docker to build an image of EL 9 based @@ -8,7 +9,7 @@ # Pull base image ARG POINT_RELEASE= ARG BASE_DISTRO=almalinux:9$POINT_RELEASE -FROM $BASE_DISTRO as basic +FROM $BASE_DISTRO AS basic LABEL maintainer="daos@daos.groups.io" # Needed for later use of BASE_DISTRO ARG BASE_DISTRO @@ -19,6 +20,7 @@ ARG CB0 ARG REPO_FILE_URL ARG JENKINS_URL ARG REPOS +ARG DAOS_LAB_CA_FILE_URL # script to install OS updates basic tools and daos dependencies COPY ./utils/scripts/install-el9.sh /tmp/install.sh # script to setup local repo if available diff --git a/utils/docker/Dockerfile.leap.15 b/utils/docker/Dockerfile.leap.15 index b8326e9c840..a6fe3271bfd 100644 --- a/utils/docker/Dockerfile.leap.15 +++ b/utils/docker/Dockerfile.leap.15 @@ -1,4 +1,5 @@ # Copyright 2018-2024 Intel Corporation +# Copyright 2025 Hewlett Packard Enterprise Development LP # All rights reserved. # # 'recipe' for Docker to build an image of Leap based @@ -8,7 +9,7 @@ # Pull base image ARG POINT_RELEASE= ARG BASE_DISTRO=registry.opensuse.org/opensuse/leap-dnf:15$POINT_RELEASE -FROM $BASE_DISTRO as basic +FROM $BASE_DISTRO AS basic LABEL maintainer="daos@daos.groups.io" # Needed for later use of BASE_DISTRO ARG BASE_DISTRO @@ -19,6 +20,7 @@ ARG CB0 ARG REPO_FILE_URL ARG JENKINS_URL ARG REPOS +ARG DAOS_LAB_CA_FILE_URL # script to install OS updates basic tools and daos dependencies COPY ./utils/scripts/install-leap15.sh /tmp/install.sh # script to setup local repo if available diff --git a/utils/docker/Dockerfile.ubuntu b/utils/docker/Dockerfile.ubuntu index 4f6c138db02..6abeba1e35c 100644 --- a/utils/docker/Dockerfile.ubuntu +++ b/utils/docker/Dockerfile.ubuntu @@ -1,4 +1,5 @@ # Copyright 2018-2024 Intel Corporation +# Copyright 2025 Hewlett Packard Enterprise Development LP # All rights reserved. # # 'recipe' for Docker to build an image of Ubuntu-based environment for building the DAOS project. @@ -10,6 +11,8 @@ ARG BASE_DISTRO=ubuntu:22.04 FROM $BASE_DISTRO LABEL maintainer="daos@daos.groups.io" +# Needed for later use of BASE_DISTRO +ARG BASE_DISTRO # Intermittent cache-bust. Used to reduce load on the actual CB1 later. ARG CB0 @@ -17,17 +20,13 @@ ARG CB0 ENV DEBIAN_FRONTEND=noninteractive # Install basic tools -COPY ./utils/scripts/install-ubuntu.sh /tmp/install.sh -RUN echo "APT::Get::Assume-Yes \"true\";" > /etc/apt/apt.conf.d/no-prompt -RUN echo "APT::Install-Recommends \"false\";" > /etc/apt/apt.conf.d/no-recommends -RUN apt-get update && \ - apt-get upgrade && \ - apt-get install gpg-agent software-properties-common && \ - add-apt-repository ppa:longsleep/golang-backports && \ - apt-get update && \ - chmod +x /tmp/install.sh && \ - /tmp/install.sh && \ - apt-get clean all +ARG DAOS_LAB_CA_FILE_URL +# script to setup local repo if available +COPY ./utils/scripts/helpers/repo-helper-ubuntu.sh /tmp/repo-helper.sh + +RUN chmod +x /tmp/repo-helper.sh && \ + /tmp/repo-helper.sh && \ + rm -f /tmp/repo-helper.sh RUN locale-gen en_US.UTF-8 diff --git a/utils/githooks/README.md b/utils/githooks/README.md index f5e726e99af..4fcf02448cd 100644 --- a/utils/githooks/README.md +++ b/utils/githooks/README.md @@ -1,7 +1,7 @@ # About DAOS Git hooks Githooks are a [well documented](https://git-scm.com/docs/githooks) feature -of git that enable various local exectubles to be run during various stages of +of git that enable various local executables to be run during various stages of the git workflow. The DAOS repo contains several built-in githooks that are intended @@ -18,6 +18,7 @@ Recommended: Configure your `core.hookspath`. Any new githooks added to the repository will automatically run, but possibly require additional software to produce the desired effect. Additionally, as the branch changes, the githooks change with it. + ```sh git config core.hookspath utils/githooks ``` @@ -33,10 +34,13 @@ effectively a noop if such is not installed. Requirements come from a combination of `pip` and system packages and can usually be installed through standard means. To install `pip` packages specified in [utils/cq/requirements.txt](../../utils/cq/requirements.txt) it is recommended to setup a virtual environment and install with pip. If you already have a [virtual environment for building](../../docs/QSG/build_from_scratch.md#python-packages) you can simply install the requirements: + ```sh python3 -m pip install -r utils/cq/requirements.txt ``` + Install system packages with your package manager - for example: + ```sh sudo dnf install git-clang-format -y ``` @@ -52,7 +56,7 @@ If the check is unable to parse the version output, it will fail. Try running 3. `flake8` 4. `isort` 5. `yamllint` -6. `gofmt` +6. `gofmt` or `golang-go` 7. `codespell` #### Optional tools @@ -79,11 +83,13 @@ any one of the required tools is missing. allowing the user to inspect the changes and retry the commit. - See [.clang-format](../../.clang-format) for configuration - In some cases unwanted formatting changes are made. To disable formatting, for example: - ``` + + ```c /* clang-format off */ ... /* clang-format on */ ``` + 6. gofmt - Automatically formats for modified GO files 7. isort - Linter for python imports on modified python files 8. flake - Linter for python files diff --git a/utils/rpms/packaging/Dockerfile.coverity b/utils/rpms/packaging/Dockerfile.coverity index 7eed2c35d58..3ba809bfdc6 100755 --- a/utils/rpms/packaging/Dockerfile.coverity +++ b/utils/rpms/packaging/Dockerfile.coverity @@ -6,7 +6,7 @@ # Pull base image FROM fedora:latest -MAINTAINER daos-stack +LABEL maintainer="daos-stack " # use same UID as host and default value of 1000 if not specified ARG UID=1000 diff --git a/utils/rpms/packaging/Dockerfile.mockbuild b/utils/rpms/packaging/Dockerfile.mockbuild index 76a6e941849..baf32623482 100644 --- a/utils/rpms/packaging/Dockerfile.mockbuild +++ b/utils/rpms/packaging/Dockerfile.mockbuild @@ -1,5 +1,6 @@ # # Copyright 2018-2024 Intel Corporation +# Copyright 2025 Hewlett Packard Enterprise Development LP # # 'recipe' for Docker to build an RPM # @@ -13,15 +14,15 @@ LABEL maintainer="daos@daos.groups.io" # Use local repo server if present ARG REPO_FILE_URL -RUN if [ -n "$REPO_FILE_URL" ]; then \ - cd /etc/yum.repos.d/ && \ - curl -k -f -o daos_ci-fedora-artifactory.repo.tmp \ - "$REPO_FILE_URL"daos_ci-fedora-artifactory.repo && \ - for file in *.repo; do \ - true > $file; \ - done; \ - mv daos_ci-fedora-artifactory.repo{.tmp,}; \ - fi +ARG DAOS_LAB_CA_FILE_URL +# script to install OS updates basic tools and daos dependencies +# COPY ./utils/scripts/install-fedora.sh /tmp/install.sh +# script to setup local repo if available +COPY ./utils/scripts/helpers/repo-helper-fedora.sh /tmp/repo-helper.sh + +RUN chmod +x /tmp/repo-helper.sh && \ + /tmp/repo-helper.sh && \ + rm -f /tmp/repo-helper.sh # Install basic tools RUN dnf -y install mock make \ @@ -50,7 +51,7 @@ RUN dnf -y upgrade && \ # RPM doesn't wipe out our patch # Ditto for the patch to zero and display ccache stats # https://github.com/rpm-software-management/mock/pull/1299 -ARG PACKAGINGDIR=packaging +ARG PACKAGINGDIR=./utils/rpms/packaging COPY ${PACKAGINGDIR}/*.patch ./ RUN (cd $(python3 -c 'import site; print(site.getsitepackages()[-1])') && \ if ! grep -e --ignore-unused-rpmlintrc rpmlint/cli.py; then \ diff --git a/utils/rpms/packaging/Dockerfile.ubuntu.20.04 b/utils/rpms/packaging/Dockerfile.ubuntu.20.04 index 4d0546448cc..6a9fa6ca865 100644 --- a/utils/rpms/packaging/Dockerfile.ubuntu.20.04 +++ b/utils/rpms/packaging/Dockerfile.ubuntu.20.04 @@ -4,38 +4,24 @@ # 'recipe' for Docker to build an Debian package # # Pull base image -FROM ubuntu:20.04 +ARG BASE_DISTRO=ubuntu:20.04 +FROM $BASE_DISTRO LABEL org.opencontainers.image.authors="daos@daos.groups.io" +# Needed for later use of BASE_DISTRO +ARG BASE_DISTRO + +ARG REPO_FILE_URL +ARG DAOS_LAB_CA_FILE_URL +# script to setup local repo if available +COPY ./utils/scripts/helpers/repo-helper-ubuntu.sh /tmp/repo-helper.sh + +RUN chmod +x /tmp/repo-helper.sh && \ + /tmp/repo-helper.sh \ + rm -f /tmp/repo-helper.sh RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ curl gpg -ARG REPO_FILE_URL -RUN if [ -n "$REPO_FILE_URL" ]; then \ - cd /etc/apt/sources.list.d && \ - curl -f -o daos_ci-ubuntu20.04-artifactory.list.tmp \ - "$REPO_FILE_URL"daos_ci-ubuntu20.04-artifactory.list && \ - true > ../sources.list && \ - mv daos_ci-ubuntu20.04-artifactory.list.tmp \ - daos_ci-ubuntu20.04-artifactory.list; \ - url="${REPO_FILE_URL%/*/}/hpe-ilorest-ubuntu-bionic-proxy/"; \ - else \ - url="https://downloads.linux.hpe.com/SDR/repo/ilorest/"; \ - fi; \ - cd -; \ - mkdir -p /usr/local/share/keyrings/; \ - curl -f -O "$url"GPG-KEY-hprest; \ - gpg --no-default-keyring --keyring ./temp-keyring.gpg \ - --import GPG-KEY-hprest; \ - gpg --no-default-keyring --keyring ./temp-keyring.gpg --export \ - --output /usr/local/share/keyrings/hpe-sdr-public.gpg; \ - rm ./temp-keyring.gpg; \ - curl -f -O "$REPO_FILE_URL"esad_repo.key; \ - gpg --no-default-keyring --keyring ./temp-keyring.gpg \ - --import esad_repo.key; \ - gpg --no-default-keyring --keyring ./temp-keyring.gpg --export \ - --output /usr/local/share/keyrings/daos-stack-public.gpg - # Install basic tools RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ autoconf bash ca-certificates curl debhelper dh-make \ @@ -45,10 +31,10 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ cmake valgrind rpmdevtools # use same UID as host and default value of 1000 if not specified -ARG UID=1000 +ARG UID 1000 # Add build user (to keep chrootbuild happy) -ENV USER build +ENV USER=build RUN useradd -u $UID -ms /bin/bash $USER # need to run the build command as root, as it needs to chroot diff --git a/utils/rpms/packaging/Dockerfile.ubuntu.rolling b/utils/rpms/packaging/Dockerfile.ubuntu.rolling index 02aca458445..ec6acd3774b 100644 --- a/utils/rpms/packaging/Dockerfile.ubuntu.rolling +++ b/utils/rpms/packaging/Dockerfile.ubuntu.rolling @@ -5,7 +5,7 @@ # # Pull base image FROM ubuntu:rolling -Maintainer daos-stack +LABEL maintainer="daos-stack " # use same UID as host and default value of 1000 if not specified ARG UID=1000 diff --git a/utils/rpms/packaging/rpm_chrootbuild b/utils/rpms/packaging/rpm_chrootbuild index d6443b7357e..34e059ddff9 100755 --- a/utils/rpms/packaging/rpm_chrootbuild +++ b/utils/rpms/packaging/rpm_chrootbuild @@ -2,6 +2,8 @@ set -uex +: "${HTTPS_PROXY:=}" + cp /etc/mock/"$CHROOT_NAME".cfg mock.cfg # Enable mock ccache plugin @@ -10,6 +12,13 @@ config_opts['plugin_conf']['ccache_enable'] = True config_opts['plugin_conf']['ccache_opts']['dir'] = "%(cache_topdir)s/%(root)s/ccache/" EOF +if [ -n "$HTTPS_PROXY" ];then + yum_proxy="http://${HTTPS_PROXY##*//}" + echo "config_opts['https_proxy']=$yum_proxy" >> mock.cfg +fi + +cat mock.cfg + if [[ $CHROOT_NAME == *epel-8-x86_64 ]]; then cat <> mock.cfg @@ -60,7 +69,11 @@ if [ -n "${ARTIFACTORY_URL:-}" ] && "$LOCAL_REPOS"; then REPO_FILE_URL="file://$(readlink -e "$REPO_FILES_PR")/" fi fi - curl -sSf "$REPO_FILE_URL"daos_ci-"${CHROOT_NAME%-*}".repo >> mock.cfg + curl_proxy=() + # if [ -n "$HTTPS_PROXY" ]; then + # curl_proxy=(-x "${HTTPS_PROXY##*//}") + # fi + curl "${curl_proxy[@]}" -sSf "$REPO_FILE_URL"daos_ci-"${CHROOT_NAME%-*}".repo >> mock.cfg repo_adds+=("--enablerepo *-artifactory") fi fi @@ -127,7 +140,7 @@ if ! eval time mock -r mock.cfg ${repo_dels[*]} ${repo_adds[*]} --no-clean \ fi # Save the ccache -if [ -d /scratch/ ]; then +if [ -d /scratch/mock/ ]; then mkdir -p "$bs_dir"/ if ! flock "$bs_dir" -c "tar -czf $bs_dir/ccache-$CHROOT_NAME-$PACKAGE.tar.gz /var/cache/mock/${CHROOT_NAME}/ccache"; then echo "Failed to save ccache. Plowing onward." diff --git a/utils/scripts/helpers/install_maldet.sh b/utils/scripts/helpers/install_maldet.sh index 5f9666d348d..9cc3701d77b 100755 --- a/utils/scripts/helpers/install_maldet.sh +++ b/utils/scripts/helpers/install_maldet.sh @@ -9,7 +9,7 @@ set -uex mydir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" -# shellcheck source=utils/scripts/helpers/distro_info.sh +# shellcheck source=utils/scripts/helpers/distro_info.sh disable=SC1091 source "$mydir/distro_info.sh" # We need sudo for running the scan and git for backward @@ -33,7 +33,7 @@ mkdir -p /etc/clamd.d printf "LogSyslog yes\n" >> /etc/clamd.d/scan.conf lmd_tarball='maldetect-current.tar.gz' -: "${REPO_FILE_URL:=https://artifactory.dc.hpdd.intel.com/artifactory/repo-files/}" +: "${REPO_FILE_URL:=https://artifactory/artifactory/repo-files/}" lmd_base_url="$(dirname "$REPO_FILE_URL")" lmd_base="${lmd_base_url#*://}" lmd_url="${lmd_base_url}/maldetect/downloads/${lmd_tarball}" @@ -56,7 +56,7 @@ popd /usr/local/sbin/maldet --update-sigs printf "ScriptedUpdates no\n" >> /etc/freshclam.conf -: "${JOB_URL:=https://build.hpdd.intel.com/job/clamav_daily_update/}" +: "${JOB_URL:=https://build/job/clamav_daily_update/}" printf "PrivateMirror %s" \ "${JOB_URL}lastSuccessfulBuild/artifact/download/clam" \ >> /etc/freshclam.conf diff --git a/utils/scripts/helpers/repo-helper-el8.sh b/utils/scripts/helpers/repo-helper-el8.sh index 987e9914bbb..7ab532957ef 100755 --- a/utils/scripts/helpers/repo-helper-el8.sh +++ b/utils/scripts/helpers/repo-helper-el8.sh @@ -10,6 +10,7 @@ set -uex : "${BASE_DISTRO:=rockylinux/rockylinux:$MAJOR_VER}" : "${JENKINS_URL:=}" : "${REPOS:=}" +: "${DAOS_LAB_CA_FILE_URL:=}" # shellcheck disable=SC2120 disable_repos () { @@ -38,6 +39,15 @@ install_curl() { : } +# Use local repo server if present +install_optional_ca() { + ca_storage="/etc/pki/ca-trust/source/anchors/" + if [ -n "$DAOS_LAB_CA_FILE_URL" ]; then + curl -k --noproxy '*' -sSf -o "${ca_storage}lab_ca_file.pem" "$DAOS_LAB_CA_FILE_URL" + update-ca-trust + fi +} + # installs/upgrades of epel-release add repos # Disable mirrorlist check when using local repos. DISTRO="rocky" @@ -53,12 +63,17 @@ MAJOR_VER="${BASE_DISTRO##*:}" MAJOR_VER="${MAJOR_VER%%.*}" if [ -n "$REPO_FILE_URL" ]; then install_curl + install_optional_ca mkdir -p /etc/yum.repos.d pushd /etc/yum.repos.d/ - curl -k -f -o daos_ci-el"$MAJOR_VER"-artifactory.repo \ + curl -k --noproxy '*' -sSf -o daos_ci-el"$MAJOR_VER"-artifactory.repo \ "$REPO_FILE_URL"daos_ci-el"$MAJOR_VER"-artifactory.repo disable_repos /etc/yum.repos.d/ popd + # These may have been created in the Dockerfile must be removed + # when using a local repository. + unset HTTPS_PROXY + unset https_proxy fi dnf -y --disablerepo \*epel\* install dnf-plugins-core dnf -y config-manager --save --setopt=assumeyes=True diff --git a/utils/scripts/helpers/repo-helper-el9.sh b/utils/scripts/helpers/repo-helper-el9.sh index fe1959589ef..82c4f764ca9 100644 --- a/utils/scripts/helpers/repo-helper-el9.sh +++ b/utils/scripts/helpers/repo-helper-el9.sh @@ -10,6 +10,7 @@ set -uex : "${BASE_DISTRO:=rockylinux/rockylinux:$MAJOR_VER}" : "${JENKINS_URL:=}" : "${REPOS:=}" +: "${DAOS_LAB_CA_FILE_URL:=}" # shellcheck disable=SC2120 disable_repos () { @@ -38,6 +39,15 @@ install_curl() { : } +# Use local repo server if present +install_optional_ca() { + ca_storage="/etc/pki/ca-trust/source/anchors/" + if [ -n "$DAOS_LAB_CA_FILE_URL" ]; then + curl -k --noproxy '*' -sSf -o "${ca_storage}lab_ca_file.pem" "$DAOS_LAB_CA_FILE_URL" + update-ca-trust + fi +} + # installs/upgrades of epel-release add repos # Disable mirrorlist check when using local repos. DISTRO="rocky" @@ -53,12 +63,17 @@ MAJOR_VER="${BASE_DISTRO##*:}" MAJOR_VER="${MAJOR_VER%%.*}" if [ -n "$REPO_FILE_URL" ]; then install_curl + install_optional_ca mkdir -p /etc/yum.repos.d pushd /etc/yum.repos.d/ - curl -k -f -o daos_ci-el"$MAJOR_VER"-artifactory.repo \ + curl -k --noproxy '*' -sSf -o daos_ci-el"$MAJOR_VER"-artifactory.repo \ "$REPO_FILE_URL"daos_ci-el"$MAJOR_VER"-artifactory.repo disable_repos /etc/yum.repos.d/ popd + # These may have been created in the Dockerfile must be removed + # when using a local repository. + unset HTTPS_PROXY + unset https_proxy fi dnf -y --disablerepo \*epel\* install dnf-plugins-core dnf -y config-manager --save --setopt=assumeyes=True diff --git a/utils/scripts/helpers/repo-helper-fedora.sh b/utils/scripts/helpers/repo-helper-fedora.sh new file mode 100644 index 00000000000..62a84c4fe46 --- /dev/null +++ b/utils/scripts/helpers/repo-helper-fedora.sh @@ -0,0 +1,78 @@ +#!/bin/bash +set -uex + +# This script is used by dockerfiles to optionally use +# a local repository instead of a distro provided repository. +# It will also optionally allow running a /tmp/install script +# for custom packages if present. + +: "${REPO_FILE_URL:=}" +: "${DAOS_LAB_CA_FILE_URL:=}" +: "${FVERSION:=latest}" +: "${archive:=}" +if [ "$FVERSION" != "latest" ]; then + archive="-archive" +fi + +# shellcheck disable=SC2120 +disable_repos () { + local repos_dir="$1" + shift + local save_repos + IFS=" " read -r -a save_repos <<< "${*:-} daos_ci-fedora${archive}-artifactory" + if [ -n "$REPO_FILE_URL" ]; then + pushd "$repos_dir" + local repo + for repo in "${save_repos[@]}"; do + mv "$repo".repo{,.tmp} + done + for file in *.repo; do + true > "$file" + done + for repo in "${save_repos[@]}"; do + mv "$repo".repo{.tmp,} + done + popd + fi +} + +# Use local repo server if present +install_curl() { + : +} + +# Use local repo server if present +install_optional_ca() { + ca_storage="/etc/pki/ca-trust/source/anchors/" + if [ -n "$DAOS_LAB_CA_FILE_URL" ]; then + curl -k --noproxy '*' -sSf -o "${ca_storage}lab_ca_file.pem" "$DAOS_LAB_CA_FILE_URL" + update-ca-trust + fi +} + +# Use local repo server if present +# if a local repo server is present and the distro repo server can not +# be reached, have to bootstrap in an environment to get curl installed +# to then install the pre-built repo file. + +if [ -n "$REPO_FILE_URL" ]; then + install_curl + install_optional_ca + mkdir -p /etc/yum.repos.d + pushd /etc/yum.repos.d/ + curl -k --noproxy '*' -sSf -o "daos_ci-fedora${archive}-artifactory.repo" \ + "{$REPO_FILE_URL}daos_ci-fedora${archive}-artifactory.repo" + disable_repos /etc/yum.repos.d/ + popd + # These may have been created in the Dockerfile must be removed + # when using a local repository. + unset HTTPS_PROXY + unset https_proxy +fi +dnf -y install dnf-plugins-core +# This does not work in fedora/41 anymore -- needs investigation +# dnf -y config-manager --save --setopt=assumeyes=True +# dnf config-manager --save --setopt=install_weak_deps=False +dnf clean all + +disable_repos /etc/yum.repos.d/ "${save_repos[@]}" diff --git a/utils/scripts/helpers/repo-helper-leap15.sh b/utils/scripts/helpers/repo-helper-leap15.sh index 1a772435f3b..b2279884a2e 100755 --- a/utils/scripts/helpers/repo-helper-leap15.sh +++ b/utils/scripts/helpers/repo-helper-leap15.sh @@ -37,22 +37,29 @@ disable_repos () { install_curl() { if command -v curl; then + echo "found curl!" return fi - if command -v dnf; then - dnf -y install curl + if command -v wget; then + echo "found wget!" return fi - if command -v zypper; then - zypper mr --all --disable - zypper addrepo \ - "${REPO_FILE_URL%/*/}/opensuse-proxy/distribution/leap/${BASE_DISTRO##*:}/repo/oss/" \ - temp_opensuse_oss_proxy - zypper --non-interactive install curl - zypper removerepo temp_opensuse_oss_proxy - fi + # If we did not already have curl, we won't be able to easily install it. + #if command -v dnf; then + # dnf -y install curl + # return + #fi + + #if command -v zypper; then + # zypper mr --all --disable + # zypper addrepo \ + # "${REPO_FILE_URL%/*/}/opensuse-proxy/distribution/leap/${BASE_DISTRO##*:}/repo/oss/" \ + # temp_opensuse_oss_proxy + # zypper --non-interactive install curl + # zypper removerepo temp_opensuse_oss_proxy + #fi } install_dnf() { @@ -72,6 +79,15 @@ install_dnf() { zypper removerepo temp_opensuse_oss_proxy } +# Use local repo server if present +install_optional_ca() { + ca_storage="/etc/pki/trust/anchors/" + if [ -n "$DAOS_LAB_CA_FILE_URL" ]; then + curl -k --noproxy '*' -sSf -o "${ca_storage}lab_ca_file.pem" "$DAOS_LAB_CA_FILE_URL" + update-ca-certificates + fi +} + # Use local repo server if present # if a local repo server is present and the distro repo server can not # be reached, have to bootstrap in an environment to get curl installed @@ -86,12 +102,17 @@ else fi if [ -n "$REPO_FILE_URL" ]; then install_curl + install_optional_ca mkdir -p "$repos_dir" pushd "$repos_dir" - curl -k -f -o daos_ci-leap"$MAJOR_VER"-artifactory.repo \ + curl -k --noproxy '*' -sSf -o daos_ci-leap"$MAJOR_VER"-artifactory.repo \ "$REPO_FILE_URL"daos_ci-leap"$MAJOR_VER"-artifactory.repo disable_repos "$repos_dir" popd + # These may have been created in the Dockerfile must be removed + # when using a local repository. + unset HTTPS_PROXY + unset https_proxy install_dnf else if ! command -v dnf; then diff --git a/utils/scripts/helpers/repo-helper-ubuntu.sh b/utils/scripts/helpers/repo-helper-ubuntu.sh new file mode 100644 index 00000000000..b92e25f6d18 --- /dev/null +++ b/utils/scripts/helpers/repo-helper-ubuntu.sh @@ -0,0 +1,87 @@ +#!/bin/bash +set -uex + +# This script is used by dockerfiles to optionally use +# a local repository instead of a distro provided repository. +# It will also optionally allow running a /tmp/install script +# for custom packages if present. + +: "${REPO_FILE_URL:=}" +: "${HTTPS_PROXY:=}" +: "${DAOS_LAB_CA_FILE_UR:=}" + +disable_repos () { + mv /etc/apt/sources.list.d/ubuntu.sources \ + etc/apt/sources.list.d/ubuntu.sources.disabled +} + +# Use local repo server if present +install_curl() { + + if command -v curl; then + echo "found curl!" + return + else + apt-get update + apt-get install curl ca-certificates + fi + + if command -v wget; then + echo "found wget!" + return + fi + # If we don't find one of these, we are basically sunk for using + # a local repository mirror. +} + +# Use local repo server if present +install_optional_ca() { + ca_storage="/usr/local/share/ca-certificates/" + if [ -n "$DAOS_LAB_CA_FILE_URL" ]; then + curl -k --noproxy '*' --sSf -o "${ca_storage}lab_ca_file.pem" "$DAOS_LAB_CA_FILE_URL" + update-ca-certificates + fi +} + +echo "APT::Get::Assume-Yes \"true\";" > /etc/apt/apt.conf.d/no-prompt +echo "APT::Install-Recommends \"false\";" > /etc/apt/apt.conf.d/no-recommends +if [ -n "$HTTPS_PROXY" ];then + apt_proxy="http://${HTTPS_PROXY##*//}" + echo "Acquire::http::Proxy \"$apt_proxy\";" > \ + /etc/apt/apt.conf.d/local_proxy + if [ -n "$REPO_FILE_URL" ]; then + direct="${REPO_FILE_URL##*//}" + direct="${direct%%/*}" + echo "Acquire::http::Proxy { $direct DIRECT; };" >> \ + /etc/apt/apt.conf.d/local_proxy + fi +fi + +# Use local repo server if present +# if a local repo server is present and the distro repo server can not +# be reached, have to bootstrap in an environment to get curl installed +# to then install the pre-built repo file. +DISTRO_VERSION="${BASE_DISTRO##*:}" +if [ -n "$REPO_FILE_URL" ]; then + install_curl + install_optional_ca + curl -k --noproxy '*' -sSf -o daos_ci-ubuntu"$DISTRO_VERSION"-artifactory.repo \ + "$REPO_FILE_URL"daos_ci-ubuntu"$DISTRO_VERSION"-artifactory.repo + disable_repos + mkdir -p /usr/local/share/keyrings/ + curl --noproxy '*' -sSf -O "$REPO_FILE_URL"esad_repo.key + gpg --no-default-keyring --keyring ./temp-keyring.gpg \ + --import esad_repo.key + gpg --no-default-keyring --keyring ./temp-keyring.gpg --export \ + --output /usr/local/share/keyrings/daos-stack-public.gpg + popd +fi + +apt-get update +apt-get upgrade +apt-get install gpg-agent software-properties-common +add-apt-repository ppa:longsleep/golang-backports +apt-get update +chmod +x /tmp/install.sh +/tmp/install.sh +apt-get clean all