diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 388720e3995ea..1f1fcf2f9d4a1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -146,17 +146,17 @@ variables: # To use images from datadog-agent-buildimages dev branches, set the corresponding # SUFFIX variable to _test_only DATADOG_AGENT_BUILDIMAGES_SUFFIX: "" - DATADOG_AGENT_BUILDIMAGES: v16690913-5148788 + DATADOG_AGENT_BUILDIMAGES: v16742019-a2675dc DATADOG_AGENT_WINBUILDIMAGES_SUFFIX: "" - DATADOG_AGENT_WINBUILDIMAGES: v16690913-5148788 + DATADOG_AGENT_WINBUILDIMAGES: v16742019-a2675dc DATADOG_AGENT_ARMBUILDIMAGES_SUFFIX: "" - DATADOG_AGENT_ARMBUILDIMAGES: v16690913-5148788 + DATADOG_AGENT_ARMBUILDIMAGES: v16742019-a2675dc DATADOG_AGENT_SYSPROBE_BUILDIMAGES_SUFFIX: "" - DATADOG_AGENT_SYSPROBE_BUILDIMAGES: v16026304-782441d + DATADOG_AGENT_SYSPROBE_BUILDIMAGES: v16742019-a2675dc DATADOG_AGENT_NIKOS_BUILDIMAGES_SUFFIX: "" - DATADOG_AGENT_NIKOS_BUILDIMAGES: v16026304-782441d + DATADOG_AGENT_NIKOS_BUILDIMAGES: v16742019-a2675dc DATADOG_AGENT_BTF_GEN_BUILDIMAGES_SUFFIX: "" - DATADOG_AGENT_BTF_GEN_BUILDIMAGES: v16026304-782441d + DATADOG_AGENT_BTF_GEN_BUILDIMAGES: v16742019-a2675dc DATADOG_AGENT_BUILDERS: v9930706-ef9d493 DATADOG_AGENT_EMBEDDED_PATH: /opt/datadog-agent/embedded diff --git a/.gitlab/binary_build/cluster_agent.yml b/.gitlab/binary_build/cluster_agent.yml index 113ae06679094..3c29fdce8e0ee 100644 --- a/.gitlab/binary_build/cluster_agent.yml +++ b/.gitlab/binary_build/cluster_agent.yml @@ -19,8 +19,8 @@ cluster_agent-build_amd64: variables: ARCH: amd64 before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 cluster_agent-build_arm64: extends: .cluster_agent-build_common @@ -32,5 +32,5 @@ cluster_agent-build_arm64: variables: ARCH: arm64 before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 diff --git a/.gitlab/binary_build/cluster_agent_cloudfoundry.yml b/.gitlab/binary_build/cluster_agent_cloudfoundry.yml index 1046527ad405e..3f98c6413c0ab 100644 --- a/.gitlab/binary_build/cluster_agent_cloudfoundry.yml +++ b/.gitlab/binary_build/cluster_agent_cloudfoundry.yml @@ -13,8 +13,8 @@ cluster_agent_cloudfoundry-build_amd64: variables: ARCH: amd64 before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: - inv check-go-version - inv -e cluster-agent-cloudfoundry.build diff --git a/.gitlab/binary_build/linux.yml b/.gitlab/binary_build/linux.yml index 3061b5d36b088..a5e6d0d142b99 100644 --- a/.gitlab/binary_build/linux.yml +++ b/.gitlab/binary_build/linux.yml @@ -5,13 +5,13 @@ build_dogstatsd_static-binary_x64: rules: !reference [.on_a7] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["lint_deb-x64", "tests_deb-x64-py3", "go_deps"] variables: ARCH: amd64 before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: - inv check-go-version - inv -e dogstatsd.build --static --major-version 7 @@ -22,13 +22,13 @@ build_dogstatsd_static-binary_arm64: rules: !reference [.on_a7] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_arm64$DATADOG_AGENT_ARMBUILDIMAGES_SUFFIX:$DATADOG_AGENT_ARMBUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["lint_deb-arm64", "tests_deb-arm64-py3", "go_deps"] variables: ARCH: arm64 before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: - inv check-go-version - inv -e dogstatsd.build --static --major-version 7 @@ -39,11 +39,11 @@ build_dogstatsd-binary_x64: rules: !reference [.on_a7] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["lint_deb-x64", "tests_deb-x64-py3", "go_deps"] before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: - inv check-go-version - inv -e dogstatsd.build --major-version 7 @@ -54,13 +54,13 @@ build_dogstatsd-binary_arm64: !reference [.on_all_builds_a7] stage: binary_build image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_arm64$DATADOG_AGENT_ARMBUILDIMAGES_SUFFIX:$DATADOG_AGENT_ARMBUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["lint_deb-arm64", "tests_deb-arm64-py3", "go_deps"] variables: ARCH: arm64 before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: - inv check-go-version - inv -e dogstatsd.build --major-version 7 @@ -72,11 +72,12 @@ build_iot_agent-binary_x64: rules: !reference [.on_a7] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["lint_deb-x64", "tests_deb-x64-py3", "go_deps"] before_script: - !reference [.retrieve_linux_go_deps] script: + - source /root/.bashrc - inv check-go-version - inv -e agent.build --flavor iot --major-version 7 - $S3_CP_CMD $CI_PROJECT_DIR/$AGENT_BINARIES_DIR/agent $S3_ARTIFACTS_URI/iot/agent @@ -86,12 +87,13 @@ build_iot_agent-binary_arm64: !reference [.on_all_builds_a7] stage: binary_build image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["lint_deb-arm64", "tests_deb-arm64-py3", "go_deps"] variables: ARCH: arm64 before_script: - !reference [.retrieve_linux_go_deps] script: + - source /root/.bashrc - inv check-go-version - inv -e agent.build --flavor iot --major-version 7 diff --git a/.gitlab/binary_build/serverless.yml b/.gitlab/binary_build/serverless.yml index 0196a1e4bc62b..61ad89c721589 100644 --- a/.gitlab/binary_build/serverless.yml +++ b/.gitlab/binary_build/serverless.yml @@ -2,6 +2,7 @@ .build_serverless_common: stage: binary_build before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] script: - inv check-go-version @@ -12,7 +13,7 @@ build_serverless-deb_x64: variables: BINARY_NAME: datadog-agent-x64 image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps"] artifacts: expire_in: 1 day @@ -24,5 +25,5 @@ build_serverless-deb_arm64: variables: BINARY_NAME: datadog-agent-arm64 image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["go_deps"] diff --git a/.gitlab/deploy_6/container.yml b/.gitlab/deploy_6/container.yml index b1037e6a81657..7abf39d52d3ba 100644 --- a/.gitlab/deploy_6/container.yml +++ b/.gitlab/deploy_6/container.yml @@ -14,6 +14,7 @@ stage: deploy6 dependencies: [] before_script: + - source /root/.bashrc - if [[ "$VERSION" == "" ]]; then export VERSION="$(inv agent.version --major-version 6 --url-safe)"; fi - export IMG_SOURCES="${SRC_AGENT}:v${CI_PIPELINE_ID}-${CI_COMMIT_SHORT_SHA}-6${JMX}-amd64,${SRC_AGENT}:v${CI_PIPELINE_ID}-${CI_COMMIT_SHORT_SHA}-6${JMX}-arm64" - export IMG_DESTINATIONS="${AGENT_REPOSITORY}:${VERSION}${JMX}" diff --git a/.gitlab/deploy_7/container.yml b/.gitlab/deploy_7/container.yml index 45516c6772a14..df3db2ace79f1 100644 --- a/.gitlab/deploy_7/container.yml +++ b/.gitlab/deploy_7/container.yml @@ -14,6 +14,7 @@ stage: deploy7 dependencies: [] before_script: + - source /root/.bashrc - if [[ "$VERSION" == "" ]]; then export VERSION="$(inv agent.version --major-version 7 --url-safe)"; fi - export IMG_BASE_SRC="${SRC_AGENT}:v${CI_PIPELINE_ID}-${CI_COMMIT_SHORT_SHA}" - export IMG_LINUX_SOURCES="${IMG_BASE_SRC}-7${JMX}-amd64,${IMG_BASE_SRC}-7${JMX}-arm64" @@ -55,6 +56,7 @@ deploy_containers-dogstatsd: !reference [.on_deploy_a7_manual_auto_on_rc] dependencies: [] before_script: + - source /root/.bashrc - export VERSION="$(inv agent.version --major-version 7 --url-safe)" - export IMG_SOURCES="${SRC_DSD}:v${CI_PIPELINE_ID}-${CI_COMMIT_SHORT_SHA}-amd64,${SRC_DSD}:v${CI_PIPELINE_ID}-${CI_COMMIT_SHORT_SHA}-arm64" - export IMG_DESTINATIONS="${DSD_REPOSITORY}:${VERSION}" diff --git a/.gitlab/deploy_dca.yml b/.gitlab/deploy_dca.yml index dc433def2a662..1e4db98735f20 100644 --- a/.gitlab/deploy_dca.yml +++ b/.gitlab/deploy_dca.yml @@ -14,6 +14,7 @@ stage: deploy_dca dependencies: [] before_script: + - source /root/.bashrc - if [[ "$VERSION" == "" ]]; then export VERSION="$(inv agent.version --major-version 7 --url-safe)"; fi - if [[ "$CLUSTER_AGENT_REPOSITORY" == "" ]]; then export CLUSTER_AGENT_REPOSITORY="cluster-agent"; fi - export IMG_BASE_SRC="${SRC_DCA}:v${CI_PIPELINE_ID}-${CI_COMMIT_SHORT_SHA}" diff --git a/.gitlab/deps_fetch.yml b/.gitlab/deps_fetch.yml index 9dfbeefe0956a..5fb0ebc885f73 100644 --- a/.gitlab/deps_fetch.yml +++ b/.gitlab/deps_fetch.yml @@ -14,9 +14,10 @@ go_deps: stage: deps_fetch image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: [] script: + - source /root/.bashrc - inv -e deps --verbose - cd $GOPATH/pkg/mod/ && tar czf $CI_PROJECT_DIR/modcache.tar.gz . artifacts: @@ -28,9 +29,10 @@ go_deps: go_tools_deps: stage: deps_fetch image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: [] script: + - source /root/.bashrc - inv -e download-tools - cd $GOPATH/pkg/mod/ && tar czf $CI_PROJECT_DIR/modcache_tools.tar.gz . artifacts: diff --git a/.gitlab/docker_common/publish_job_templates.yml b/.gitlab/docker_common/publish_job_templates.yml index f6eb8e19425c2..144e9ce5416aa 100644 --- a/.gitlab/docker_common/publish_job_templates.yml +++ b/.gitlab/docker_common/publish_job_templates.yml @@ -12,6 +12,7 @@ IMG_VARIABLES: "" IMG_SIGNING: "" script: # We can't use the 'trigger' keyword on manual jobs, otherwise they can't be run if the pipeline fails and is retried + - source /root/.bashrc - export GITLAB_TOKEN=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.gitlab_pipelines_scheduler_token --with-decryption --query "Parameter.Value" --out text) - ECR_RELEASE_SUFFIX="${CI_COMMIT_TAG+-release}" - IMG_VARIABLES="$(sed -E "s#(${SRC_AGENT}|${SRC_DSD}|${SRC_DCA})#\1${ECR_RELEASE_SUFFIX}#g" <<<"$IMG_VARIABLES")" diff --git a/.gitlab/integration_test.yml b/.gitlab/integration_test.yml index d58299e7f8990..c2740355dbf96 100644 --- a/.gitlab/integration_test.yml +++ b/.gitlab/integration_test.yml @@ -10,7 +10,7 @@ dogstatsd_x64_size_test: tags: ["runner:main"] needs: ["build_dogstatsd_static-binary_x64"] before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - mkdir -p $STATIC_BINARIES_DIR - $S3_CP_CMD $S3_ARTIFACTS_URI/static/dogstatsd.amd64 $STATIC_BINARIES_DIR/dogstatsd script: diff --git a/.gitlab/internal_image_deploy.yml b/.gitlab/internal_image_deploy.yml index 2ea78e96f088b..8732e081f66ac 100644 --- a/.gitlab/internal_image_deploy.yml +++ b/.gitlab/internal_image_deploy.yml @@ -22,6 +22,7 @@ docker_trigger_internal: TMPL_SRC_REPO: ci/datadog-agent/agent RELEASE_STAGING: "true" script: + - source /root/.bashrc - export GITLAB_TOKEN=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.gitlab_pipelines_scheduler_token --with-decryption --query "Parameter.Value" --out text) - if [ "$BUCKET_BRANCH" = "beta" ] || [ "$BUCKET_BRANCH" = "stable" ]; then TMPL_SRC_REPO="${TMPL_SRC_REPO}-release"; fi - if [ "$BUCKET_BRANCH" = "nightly" ]; then RELEASE_TAG="${RELEASE_TAG}-${CI_COMMIT_SHORT_SHA}"; fi @@ -49,6 +50,7 @@ docker_trigger_cluster_agent_internal: RELEASE_STAGING: "true" RELEASE_PROD: "true" script: + - source /root/.bashrc - export GITLAB_TOKEN=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.gitlab_pipelines_scheduler_token --with-decryption --query "Parameter.Value" --out text) - if [ "$BUCKET_BRANCH" = "beta" ] || [ "$BUCKET_BRANCH" = "stable" ]; then TMPL_SRC_REPO="${TMPL_SRC_REPO}-release"; fi - if [ "$BUCKET_BRANCH" = "nightly" ]; then RELEASE_TAG="${RELEASE_TAG}-${CI_COMMIT_SHORT_SHA}"; fi diff --git a/.gitlab/internal_kubernetes_deploy.yml b/.gitlab/internal_kubernetes_deploy.yml index b5002a111f19e..dd630696f5876 100644 --- a/.gitlab/internal_kubernetes_deploy.yml +++ b/.gitlab/internal_kubernetes_deploy.yml @@ -27,5 +27,6 @@ internal_kubernetes_deploy_experimental: WORKFLOW: "agents" FILTER: "cluster.env == 'experimental' and cluster.shortName == 'snowver'" script: + - source /root/.bashrc - export GITLAB_TOKEN=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.gitlab_pipelines_scheduler_token --with-decryption --query "Parameter.Value" --out text) - inv pipeline.trigger-child-pipeline --project-name "DataDog/k8s-datadog-agent-ops" --git-ref "main" --variables "OPTION_AUTOMATIC_ROLLOUT,WORKFLOW,OPTION_PRE_SCRIPT,FILTER,SKIP_PLAN_CHECK" diff --git a/.gitlab/notify.yml b/.gitlab/notify.yml index 422e7525c870a..2d84c4ef958db 100644 --- a/.gitlab/notify.yml +++ b/.gitlab/notify.yml @@ -48,7 +48,7 @@ send_pipeline_stats: when: always dependencies: [] script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - set +x - export GITLAB_TOKEN=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.gitlab_read_api_token --with-decryption --query "Parameter.Value" --out text) - export DD_API_KEY=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.datadog_api_key_org2 --with-decryption --query "Parameter.Value" --out text) diff --git a/.gitlab/package_build/deb.yml b/.gitlab/package_build/deb.yml index 120abc482690c..701c545981951 100644 --- a/.gitlab/package_build/deb.yml +++ b/.gitlab/package_build/deb.yml @@ -8,10 +8,11 @@ .agent_build_common_deb: script: - - echo "About to build for $RELEASE_VERSION" + - source /root/.bashrc - !reference [.setup_ruby_mirror_linux] - !reference [.setup_python_mirror_linux] - !reference [.retrieve_linux_go_deps] + - echo "About to build for $RELEASE_VERSION" # remove artifacts from previous pipelines that may come from the cache - rm -rf $OMNIBUS_PACKAGE_DIR/* # Artifacts and cache must live within project directory but we run omnibus in a neutral directory. @@ -77,7 +78,6 @@ agent_deb-x64-a6: DESTINATION_DEB: 'datadog-agent_6_amd64.deb' DESTINATION_DBG_DEB: 'datadog-agent-dbg_6_amd64.deb' before_script: - - source /root/.bashrc && conda activate ddpy3 - export RELEASE_VERSION=$RELEASE_VERSION_6 agent_deb-x64-a7: @@ -96,7 +96,6 @@ agent_deb-x64-a7: DESTINATION_DEB: 'datadog-agent_7_amd64.deb' DESTINATION_DBG_DEB: 'datadog-agent-dbg_7_amd64.deb' before_script: - - source /root/.bashrc && conda activate ddpy3 - export RELEASE_VERSION=$RELEASE_VERSION_7 agent_deb-arm64-a6: @@ -114,7 +113,6 @@ agent_deb-arm64-a6: DESTINATION_DEB: 'datadog-agent_6_arm64.deb' DESTINATION_DBG_DEB: 'datadog-agent-dbg_6_arm64.deb' before_script: - - source /root/.bashrc && conda activate ddpy3 - export RELEASE_VERSION=$RELEASE_VERSION_6 agent_deb-arm64-a7: @@ -132,11 +130,11 @@ agent_deb-arm64-a7: DESTINATION_DEB: 'datadog-agent_7_arm64.deb' DESTINATION_DBG_DEB: 'datadog-agent-dbg_7_arm64.deb' before_script: - - source /root/.bashrc && conda activate ddpy3 - export RELEASE_VERSION=$RELEASE_VERSION_7 .iot_agent_build_common_deb: script: + - source /root/.bashrc - !reference [.setup_ruby_mirror_linux] - !reference [.setup_python_mirror_linux] - !reference [.retrieve_linux_go_deps] @@ -206,8 +204,8 @@ dogstatsd_deb-x64: variables: AWS_CONTAINER_CREDENTIALS_RELATIVE_URI: /credentials before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: # remove artifacts from previous pipelines that may come from the cache - rm -rf $OMNIBUS_PACKAGE_DIR/* @@ -233,8 +231,8 @@ dogstatsd_deb-arm64: tags: ["runner:docker-arm", "platform:arm64"] needs: ["go_mod_tidy_check", "build_dogstatsd-binary_arm64", "go_deps"] before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: # remove artifacts from previous pipelines that may come from the cache - rm -rf $OMNIBUS_PACKAGE_DIR/* diff --git a/.gitlab/package_build/dmg.yml b/.gitlab/package_build/dmg.yml index 0a62c850004c9..74c5a02b6dff8 100644 --- a/.gitlab/package_build/dmg.yml +++ b/.gitlab/package_build/dmg.yml @@ -33,4 +33,5 @@ agent_dmg-x64-a7: AGENT_MAJOR_VERSION: 7 PYTHON_RUNTIMES: '3' before_script: + - source /root/.bashrc - export RELEASE_VERSION=$RELEASE_VERSION_7 diff --git a/.gitlab/package_build/rpm.yml b/.gitlab/package_build/rpm.yml index 1913089da9349..4a80e234d2efc 100644 --- a/.gitlab/package_build/rpm.yml +++ b/.gitlab/package_build/rpm.yml @@ -72,7 +72,7 @@ agent_rpm-x64-a6: PYTHON_RUNTIMES: '2,3' PACKAGE_ARCH: amd64 before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - export RELEASE_VERSION=$RELEASE_VERSION_6 # build Agent package for rpm-x64 @@ -90,7 +90,7 @@ agent_rpm-x64-a7: PYTHON_RUNTIMES: '3' PACKAGE_ARCH: amd64 before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - export RELEASE_VERSION=$RELEASE_VERSION_7 # build Agent package for rpm-arm64 @@ -107,7 +107,7 @@ agent_rpm-arm64-a6: PYTHON_RUNTIMES: '2,3' PACKAGE_ARCH: arm64 before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - export RELEASE_VERSION=$RELEASE_VERSION_6 # build Agent package for rpm-arm64 @@ -124,12 +124,13 @@ agent_rpm-arm64-a7: PYTHON_RUNTIMES: '3' PACKAGE_ARCH: arm64 before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - export RELEASE_VERSION=$RELEASE_VERSION_7 .iot_agent_build_common_rpm: script: - echo "About to build iot agent for $RELEASE_VERSION_7" + - source /root/.bashrc - !reference [.setup_ruby_mirror_linux] - !reference [.setup_python_mirror_linux] - !reference [.retrieve_linux_go_deps] @@ -195,8 +196,8 @@ dogstatsd_rpm-x64: variables: AWS_CONTAINER_CREDENTIALS_RELATIVE_URI: /credentials before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: # remove artifacts from previous pipelines that may come from the cache - rm -rf $OMNIBUS_PACKAGE_DIR/* diff --git a/.gitlab/package_build/suse_rpm.yml b/.gitlab/package_build/suse_rpm.yml index 1648ccf420410..859c354bc991b 100644 --- a/.gitlab/package_build/suse_rpm.yml +++ b/.gitlab/package_build/suse_rpm.yml @@ -74,7 +74,7 @@ agent_suse-x64-a6: PYTHON_RUNTIMES: '2,3' PACKAGE_ARCH: amd64 before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - export RELEASE_VERSION=$RELEASE_VERSION_6 # build Agent package for suse-x64 @@ -92,7 +92,7 @@ agent_suse-x64-a7: PYTHON_RUNTIMES: '3' PACKAGE_ARCH: amd64 before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - export RELEASE_VERSION=$RELEASE_VERSION_7 iot_agent_suse-x64: @@ -103,7 +103,7 @@ iot_agent_suse-x64: tags: ["runner:main"] needs: ["go_mod_tidy_check", "go_deps"] before_script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc script: - echo "About to build iot agent for $RELEASE_VERSION_7" - !reference [.setup_ruby_mirror_linux] @@ -142,8 +142,8 @@ dogstatsd_suse-x64: variables: AWS_CONTAINER_CREDENTIALS_RELATIVE_URI: /credentials before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] - - source /root/.bashrc && conda activate ddpy3 script: # remove artifacts from previous pipelines that may come from the cache - rm -rf $OMNIBUS_PACKAGE_DIR_SUSE/* diff --git a/.gitlab/pkg_metrics.yml b/.gitlab/pkg_metrics.yml index d25ac0f58e385..466756668c7fe 100644 --- a/.gitlab/pkg_metrics.yml +++ b/.gitlab/pkg_metrics.yml @@ -51,7 +51,7 @@ send_pkg_size-a6: - ls -l $OMNIBUS_PACKAGE_DIR - ls -l $OMNIBUS_PACKAGE_DIR_SUSE script: - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - mkdir -p /tmp/deb/amd64-agent - mkdir -p /tmp/deb/arm64-agent - mkdir -p /tmp/rpm/amd64-agent @@ -119,9 +119,10 @@ send_pkg_size-a7: - ls -l $OMNIBUS_PACKAGE_DIR - ls -l $OMNIBUS_PACKAGE_DIR_SUSE script: + - source /root/.bashrc - !reference [.add_metric_func, script] - - source /root/.bashrc && conda activate ddpy3 + - source /root/.bashrc - mkdir -p /tmp/amd64-deb/agent /tmp/amd64-deb/dogstatsd /tmp/amd64-deb/iot-agent /tmp/amd64-deb/heroku-agent - mkdir -p /tmp/arm64-deb/agent /tmp/arm64-deb/dogstatsd /tmp/arm64-deb/iot-agent - mkdir -p /tmp/amd64-rpm/agent /tmp/amd64-rpm/dogstatsd /tmp/amd64-rpm/iot-agent @@ -190,11 +191,13 @@ send_pkg_size-a7: image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES tags: ["runner:main"] script: + - source /root/.bashrc - !reference [.add_metric_func, script] - ls -l $OMNIBUS_PACKAGE_DIR - if [[ "${ARCH}" == "amd64" ]]; then ls -l $OMNIBUS_PACKAGE_DIR_SUSE; fi - - source /root/.bashrc && conda activate ddpy3 + + - source /root/.bashrc - export failures=0 - export last_stable=$(inv release.get-release-json-value "last_stable::${MAJOR_VERSION}") # Get stable packages from S3 buckets, send new package sizes & compare stable and new package sizes diff --git a/.gitlab/setup/setup.yml b/.gitlab/setup/setup.yml index 5ab044f7541f2..471b0aa69699b 100644 --- a/.gitlab/setup/setup.yml +++ b/.gitlab/setup/setup.yml @@ -2,8 +2,9 @@ setup_agent_version: stage: setup image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] script: + - source /root/.bashrc - inv -e agent.version --version-cached - $S3_CP_CMD $CI_PROJECT_DIR/agent-version.cache $S3_ARTIFACTS_URI/agent-version.cache diff --git a/.gitlab/source_test/golang_deps_generate.yml b/.gitlab/source_test/golang_deps_generate.yml index b906fefd6cd4c..db182cbdcf25d 100644 --- a/.gitlab/source_test/golang_deps_generate.yml +++ b/.gitlab/source_test/golang_deps_generate.yml @@ -6,9 +6,10 @@ golang_deps_generate: !reference [.on_main_or_release_branch] stage: source_test image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps"] before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] script: - inv agent.build-dep-tree diff --git a/.gitlab/source_test/linux.yml b/.gitlab/source_test/linux.yml index e06e950690290..61006a3e20c95 100644 --- a/.gitlab/source_test/linux.yml +++ b/.gitlab/source_test/linux.yml @@ -2,8 +2,8 @@ .rtloader_tests: stage: source_test before_script: - - !reference [.retrieve_linux_go_deps] - source /root/.bashrc && conda activate $CONDA_ENV + - !reference [.retrieve_linux_go_deps] - inv -e rtloader.make --install-prefix=$CI_PROJECT_DIR/dev --python-runtimes "$PYTHON_RUNTIMES" - inv -e rtloader.install - inv -e rtloader.format --raise-if-changed @@ -38,14 +38,14 @@ - inv -e rtloader.make --install-prefix=$CI_PROJECT_DIR/dev --python-runtimes "3" - inv -e rtloader.install - inv -e install-tools - - inv -e lint-go $FLAVORS $EXTRA_OPTS + - inv -e lint-go --cpus 4 $FLAVORS $EXTRA_OPTS tests_deb-x64-py2: extends: .rtloader_tests rules: !reference [.on_a6] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps"] variables: PYTHON_RUNTIMES: '2' @@ -56,7 +56,7 @@ tests_deb-x64-py3: - .rtloader_tests - .linux_tests image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: PYTHON_RUNTIMES: '3' @@ -66,7 +66,7 @@ lint_deb-x64: extends: - .linux_lint image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] tests_flavor_iot_deb-x64: @@ -74,7 +74,7 @@ tests_flavor_iot_deb-x64: - .rtloader_tests - .linux_tests image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: PYTHON_RUNTIMES: '3' @@ -85,7 +85,7 @@ lint_flavor_iot_deb-x64: extends: - .linux_lint image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: FLAVORS: '--flavors iot' @@ -95,7 +95,7 @@ tests_flavor_dogstatsd_deb-x64: - .rtloader_tests - .linux_tests image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: PYTHON_RUNTIMES: '3' @@ -106,7 +106,7 @@ lint_flavor_dogstatsd_deb-x64: extends: - .linux_lint image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: FLAVORS: '--flavors dogstatsd' @@ -116,7 +116,7 @@ tests_flavor_heroku_deb-x64: - .rtloader_tests - .linux_tests image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: PYTHON_RUNTIMES: '3' @@ -127,7 +127,7 @@ lint_flavor_heroku_deb-x64: extends: - .linux_lint image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: FLAVORS: '--flavors heroku' @@ -140,7 +140,7 @@ tests_rpm-x64-py2: rules: !reference [.on_a6] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/rpm_x64_testing$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps"] variables: PYTHON_RUNTIMES: '2' @@ -152,7 +152,7 @@ tests_rpm-x64-py3: - .rtloader_tests - .linux_tests image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/rpm_x64_testing$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: PYTHON_RUNTIMES: '3' @@ -163,7 +163,7 @@ lint_rpm-x64: extends: - .linux_lint image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/rpm_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps", "go_tools_deps"] variables: EXTRA_OPTS: '--build-exclude=systemd' @@ -173,7 +173,7 @@ tests_deb-arm64-py2: rules: !reference [.on_a6] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["go_deps"] variables: PYTHON_RUNTIMES: '2' @@ -185,7 +185,7 @@ tests_deb-arm64-py3: - .linux_tests needs: ["go_deps", "go_tools_deps"] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] variables: PYTHON_RUNTIMES: '3' CONDA_ENV: ddpy3 @@ -195,14 +195,14 @@ lint_deb-arm64: - .linux_lint needs: ["go_deps", "go_tools_deps"] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] tests_rpm-arm64-py2: extends: .rtloader_tests rules: !reference [.on_a6] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/rpm_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["go_deps"] variables: PYTHON_RUNTIMES: '2' @@ -213,7 +213,7 @@ tests_rpm-arm64-py3: - .rtloader_tests - .linux_tests image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/rpm_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["go_deps", "go_tools_deps"] variables: PYTHON_RUNTIMES: '3' @@ -223,16 +223,17 @@ lint_rpm-arm64: extends: - .linux_lint image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/rpm_arm64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:docker-arm", "platform:arm64"] + tags: ["arch:arm64"] needs: ["go_deps", "go_tools_deps"] # Check consistency of go.mod file with project imports go_mod_tidy_check: stage: source_test image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] needs: ["go_deps"] before_script: + - source /root/.bashrc - !reference [.retrieve_linux_go_deps] script: - inv -e check-mod-tidy diff --git a/.gitlab/source_test/macos.yml b/.gitlab/source_test/macos.yml index 0507dff8ad5e3..3d2c3908ddc24 100644 --- a/.gitlab/source_test/macos.yml +++ b/.gitlab/source_test/macos.yml @@ -4,10 +4,11 @@ tests_macos: rules: !reference [.on_a6] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] variables: PYTHON_RUNTIMES: '3' script: + - source /root/.bashrc - export GITHUB_KEY_B64=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.macos_github_key_b64 --with-decryption --query "Parameter.Value" --out text) - export GITHUB_APP_ID=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.macos_github_app_id --with-decryption --query "Parameter.Value" --out text) - export GITHUB_INSTALLATION_ID=$(aws ssm get-parameter --region us-east-1 --name ci.datadog-agent.macos_github_installation_id --with-decryption --query "Parameter.Value" --out text) diff --git a/.gitlab/source_test/slack.yml b/.gitlab/source_test/slack.yml index 79ba633dd21bc..26c2f351d9fa7 100644 --- a/.gitlab/source_test/slack.yml +++ b/.gitlab/source_test/slack.yml @@ -3,8 +3,9 @@ slack_teams_channels_check: stage: source_test image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES - tags: ["runner:main"] + tags: ["arch:amd64"] script: + - source /root/.bashrc - python3 -m pip install codeowners - inv -e pipeline.check-notify-teams diff --git a/.gitlab/trigger_release.yml b/.gitlab/trigger_release.yml index c0e0f8cb0a38d..787b860571518 100644 --- a/.gitlab/trigger_release.yml +++ b/.gitlab/trigger_release.yml @@ -18,6 +18,7 @@ script: # agent-release-management creates pipeline for both Agent 6 and Agent 7 # when triggered with major version 7 + - source /root/.bashrc - export RELEASE_VERSION=$(inv agent.version --major-version 7 --url-safe --omnibus-format)-1 - inv pipeline.trigger-child-pipeline --no-follow --project-name "DataDog/agent-release-management" --git-ref "main" --variables "ACTION,AUTO_RELEASE,BUILD_PIPELINE_ID,RELEASE_PRODUCT,RELEASE_VERSION,TARGET_REPO,TARGET_REPO_BRANCH" dependencies: [] diff --git a/pkg/clusteragent/admission/controllers/webhook/controller_v1_test.go b/pkg/clusteragent/admission/controllers/webhook/controller_v1_test.go index 83354bb5f9629..6effd3a5307ee 100644 --- a/pkg/clusteragent/admission/controllers/webhook/controller_v1_test.go +++ b/pkg/clusteragent/admission/controllers/webhook/controller_v1_test.go @@ -184,7 +184,7 @@ func TestGenerateTemplatesV1(t *testing.T) { Name: name, ClientConfig: admiv1.WebhookClientConfig{ Service: &admiv1.ServiceReference{ - Namespace: "default", + Namespace: "nsfoo", Name: "datadog-admission-controller", Port: &port, Path: &path, @@ -484,6 +484,9 @@ func TestGenerateTemplatesV1(t *testing.T) { }, }, } + + mockConfig.Set("kube_resources_namespace", "nsfoo") + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setupConfig() @@ -499,6 +502,7 @@ func TestGenerateTemplatesV1(t *testing.T) { } func TestGetWebhookSkeletonV1(t *testing.T) { + mockConfig := config.Mock(t) defaultReinvocationPolicy := admiv1.IfNeededReinvocationPolicy failurePolicy := admiv1.Ignore matchPolicy := admiv1.Exact @@ -514,7 +518,7 @@ func TestGetWebhookSkeletonV1(t *testing.T) { Name: "datadog.webhook.foo", ClientConfig: admiv1.WebhookClientConfig{ Service: &admiv1.ServiceReference{ - Namespace: "default", + Namespace: "nsfoo", Name: "datadog-admission-controller", Port: &port, Path: &path, @@ -582,11 +586,14 @@ func TestGetWebhookSkeletonV1(t *testing.T) { want: webhook(&customTimeout, objectSelector, nil), }, } + + mockConfig.Set("kube_resources_namespace", "nsfoo") + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if tt.timeout != nil { - config.Datadog.Set("admission_controller.timeout_seconds", *tt.timeout) - defer config.Datadog.SetDefault("admission_controller.timeout_seconds", defaultTimeout) + mockConfig.Set("admission_controller.timeout_seconds", *tt.timeout) + defer mockConfig.SetDefault("admission_controller.timeout_seconds", defaultTimeout) } c := &ControllerV1{} diff --git a/pkg/clusteragent/admission/controllers/webhook/controller_v1beta1_test.go b/pkg/clusteragent/admission/controllers/webhook/controller_v1beta1_test.go index 15de5affd4353..30bbc76127319 100644 --- a/pkg/clusteragent/admission/controllers/webhook/controller_v1beta1_test.go +++ b/pkg/clusteragent/admission/controllers/webhook/controller_v1beta1_test.go @@ -184,7 +184,7 @@ func TestGenerateTemplatesV1beta1(t *testing.T) { Name: name, ClientConfig: admiv1beta1.WebhookClientConfig{ Service: &admiv1beta1.ServiceReference{ - Namespace: "default", + Namespace: "nsfoo", Name: "datadog-admission-controller", Port: &port, Path: &path, @@ -485,6 +485,9 @@ func TestGenerateTemplatesV1beta1(t *testing.T) { }, }, } + + mockConfig.Set("kube_resources_namespace", "nsfoo") + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setupConfig() @@ -500,6 +503,7 @@ func TestGenerateTemplatesV1beta1(t *testing.T) { } func TestGetWebhookSkeletonV1beta1(t *testing.T) { + mockConfig := config.Mock(t) failurePolicy := admiv1beta1.Ignore matchPolicy := admiv1beta1.Exact sideEffects := admiv1beta1.SideEffectClassNone @@ -515,7 +519,7 @@ func TestGetWebhookSkeletonV1beta1(t *testing.T) { Name: "datadog.webhook.foo", ClientConfig: admiv1beta1.WebhookClientConfig{ Service: &admiv1beta1.ServiceReference{ - Namespace: "default", + Namespace: "nsfoo", Name: "datadog-admission-controller", Port: &port, Path: &path, @@ -583,11 +587,14 @@ func TestGetWebhookSkeletonV1beta1(t *testing.T) { want: webhook(&customTimeout, objectSelector, nil), }, } + + mockConfig.Set("kube_resources_namespace", "nsfoo") + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if tt.timeout != nil { - config.Datadog.Set("admission_controller.timeout_seconds", *tt.timeout) - defer config.Datadog.SetDefault("admission_controller.timeout_seconds", defaultTimeout) + mockConfig.Set("admission_controller.timeout_seconds", *tt.timeout) + defer mockConfig.SetDefault("admission_controller.timeout_seconds", defaultTimeout) } c := &ControllerV1beta1{} diff --git a/pkg/clusteragent/admission/mutate/config_test.go b/pkg/clusteragent/admission/mutate/config_test.go index 81ab07ad0c694..c7b542c3a792b 100644 --- a/pkg/clusteragent/admission/mutate/config_test.go +++ b/pkg/clusteragent/admission/mutate/config_test.go @@ -11,6 +11,7 @@ import ( "testing" "github.com/DataDog/datadog-agent/pkg/config" + "github.com/DataDog/datadog-agent/pkg/util/kubernetes/apiserver/common" "github.com/DataDog/datadog-agent/pkg/util/pointer" "github.com/stretchr/testify/assert" @@ -125,7 +126,7 @@ func TestInjectService(t *testing.T) { pod = withLabels(pod, map[string]string{"admission.datadoghq.com/enabled": "true", "admission.datadoghq.com/config.mode": "service"}) err := injectConfig(pod, "", nil) assert.Nil(t, err) - assert.Contains(t, pod.Spec.Containers[0].Env, fakeEnvWithValue("DD_AGENT_HOST", "datadog.default.svc.cluster.local")) + assert.Contains(t, pod.Spec.Containers[0].Env, fakeEnvWithValue("DD_AGENT_HOST", "datadog."+common.GetMyNamespace()+".svc.cluster.local")) } func TestInjectSocket(t *testing.T) { diff --git a/pkg/gohai/filesystem/filesystem_nix_test.go b/pkg/gohai/filesystem/filesystem_nix_test.go index 9e233ab38a73c..f182cbc232347 100644 --- a/pkg/gohai/filesystem/filesystem_nix_test.go +++ b/pkg/gohai/filesystem/filesystem_nix_test.go @@ -9,6 +9,8 @@ package filesystem import ( + "os" + "runtime" "testing" "time" @@ -134,6 +136,10 @@ func TestFaileDfWithData(t *testing.T) { } func TestGetFileSystemInfo(t *testing.T) { + if os.Getenv("CI") != "" && runtime.GOOS == "linux" && runtime.GOARCH == "arm64" { + t.Skip("Test disabled on arm64 Linux CI runners, as df doesn't work") + } + out, err := getFileSystemInfo() require.NoError(t, err) outArray := out.([]interface{}) diff --git a/pkg/gohai/gohai_test.go b/pkg/gohai/gohai_test.go index 155ec96aa583c..c31cd1e48b24f 100644 --- a/pkg/gohai/gohai_test.go +++ b/pkg/gohai/gohai_test.go @@ -8,6 +8,7 @@ package main import ( "encoding/json" "net" + "os" "runtime" "testing" @@ -89,6 +90,10 @@ type gohaiPayload struct { } func TestGohaiSerialization(t *testing.T) { + if os.Getenv("CI") != "" && runtime.GOOS == "linux" && runtime.GOARCH == "arm64" { + t.Skip("Test disabled on arm64 Linux CI runners, as df doesn't work") + } + gohai, err := Collect() assert.NoError(t, err) diff --git a/pkg/metadata/host/host_tags_test.go b/pkg/metadata/host/host_tags_test.go index 1d49ed4279d5c..bc0e11ca33f7f 100644 --- a/pkg/metadata/host/host_tags_test.go +++ b/pkg/metadata/host/host_tags_test.go @@ -23,6 +23,9 @@ func init() { func TestGetHostTags(t *testing.T) { ctx := context.Background() mockConfig := config.Mock(t) + mockConfig.Set("autoconfig_from_environment", false) + defer mockConfig.Set("autoconfig_from_environment", true) + mockConfig.Set("tags", []string{"tag1:value1", "tag2", "tag3"}) defer mockConfig.Set("tags", nil) @@ -33,6 +36,11 @@ func TestGetHostTags(t *testing.T) { func TestGetEmptyHostTags(t *testing.T) { ctx := context.Background() + + mockConfig := config.Mock(t) + mockConfig.Set("autoconfig_from_environment", false) + defer mockConfig.Set("autoconfig_from_environment", true) + // getHostTags should never return a nil value under System even when there are no host tags hostTags := GetHostTags(ctx, false) assert.NotNil(t, hostTags.System) @@ -42,6 +50,9 @@ func TestGetEmptyHostTags(t *testing.T) { func TestGetHostTagsWithSplits(t *testing.T) { ctx := context.Background() mockConfig := config.Mock(t) + mockConfig.Set("autoconfig_from_environment", false) + defer mockConfig.Set("autoconfig_from_environment", true) + mockConfig.Set("tag_value_split_separator", map[string]string{"kafka_partition": ","}) mockConfig.Set("tags", []string{"tag1:value1", "tag2", "tag3", "kafka_partition:0,1,2"}) defer mockConfig.Set("tags", nil) @@ -54,6 +65,9 @@ func TestGetHostTagsWithSplits(t *testing.T) { func TestGetHostTagsWithoutSplits(t *testing.T) { ctx := context.Background() mockConfig := config.Mock(t) + mockConfig.Set("autoconfig_from_environment", false) + defer mockConfig.Set("autoconfig_from_environment", true) + mockConfig.Set("tag_value_split_separator", map[string]string{"kafka_partition": ";"}) mockConfig.Set("tags", []string{"tag1:value1", "tag2", "tag3", "kafka_partition:0,1,2"}) defer mockConfig.Set("tags", nil) @@ -66,6 +80,9 @@ func TestGetHostTagsWithoutSplits(t *testing.T) { func TestGetHostTagsWithEnv(t *testing.T) { ctx := context.Background() mockConfig := config.Mock(t) + mockConfig.Set("autoconfig_from_environment", false) + defer mockConfig.Set("autoconfig_from_environment", true) + mockConfig.Set("tags", []string{"tag1:value1", "tag2", "tag3", "env:prod"}) mockConfig.Set("env", "preprod") defer mockConfig.Set("tags", nil) @@ -91,6 +108,9 @@ func TestMarshalEmptyHostTags(t *testing.T) { func TestCombineExtraTags(t *testing.T) { ctx := context.Background() mockConfig := config.Mock(t) + mockConfig.Set("autoconfig_from_environment", false) + defer mockConfig.Set("autoconfig_from_environment", true) + mockConfig.Set("tags", []string{"tag1:value1", "tag2", "tag4"}) mockConfig.Set("extra_tags", []string{"tag1:value2", "tag3", "tag4"}) defer mockConfig.Set("tags", nil) @@ -104,6 +124,9 @@ func TestCombineExtraTags(t *testing.T) { func TestHostTagsCache(t *testing.T) { ctx := context.Background() mockConfig := config.Mock(t) + mockConfig.Set("autoconfig_from_environment", false) + defer mockConfig.Set("autoconfig_from_environment", true) + mockConfig.Set("collect_gce_tags", false) fooTags := []string{"foo1:value1"} diff --git a/pkg/metadata/internal/gohai/gohai_test.go b/pkg/metadata/internal/gohai/gohai_test.go index 3d1e3fa24bf78..08267875b5c44 100644 --- a/pkg/metadata/internal/gohai/gohai_test.go +++ b/pkg/metadata/internal/gohai/gohai_test.go @@ -6,12 +6,18 @@ package gohai import ( + "os" + "runtime" "testing" "github.com/stretchr/testify/assert" ) func TestGetPayload(t *testing.T) { + if os.Getenv("CI") != "" && runtime.GOOS == "linux" && runtime.GOARCH == "arm64" { + t.Skip("Test disabled on arm64 Linux CI runners, as df doesn't work") + } + gohai := GetPayload() assert.NotNil(t, gohai.Gohai.CPU) @@ -22,6 +28,10 @@ func TestGetPayload(t *testing.T) { } func TestGetPayloadContainerized(t *testing.T) { + if os.Getenv("CI") != "" && runtime.GOOS == "linux" && runtime.GOARCH == "arm64" { + t.Skip("Test disabled on arm64 Linux CI runners, as df doesn't work") + } + t.Setenv("DOCKER_DD_AGENT", "true") detectDocker0() @@ -39,6 +49,10 @@ func TestGetPayloadContainerized(t *testing.T) { } func TestGetPayloadContainerizedWithDocker0(t *testing.T) { + if os.Getenv("CI") != "" && runtime.GOOS == "linux" && runtime.GOARCH == "arm64" { + t.Skip("Test disabled on arm64 Linux CI runners, as df doesn't work") + } + t.Setenv("DOCKER_DD_AGENT", "true") detectDocker0() diff --git a/pkg/util/kubernetes/apiserver/common/common_test.go b/pkg/util/kubernetes/apiserver/common/common_test.go index 7c9433cabbcd8..ce464237341e6 100644 --- a/pkg/util/kubernetes/apiserver/common/common_test.go +++ b/pkg/util/kubernetes/apiserver/common/common_test.go @@ -24,7 +24,7 @@ func TestGetOrCreateClusterID(t *testing.T) { // kube-system doesn't exist GetOrCreateClusterID(client) - _, err := client.ConfigMaps("default").Get(context.TODO(), defaultClusterIDMap, metav1.GetOptions{}) + _, err := client.ConfigMaps(GetMyNamespace()).Get(context.TODO(), defaultClusterIDMap, metav1.GetOptions{}) assert.True(t, errors.IsNotFound(err)) // kube-system does exist @@ -39,7 +39,7 @@ func TestGetOrCreateClusterID(t *testing.T) { GetOrCreateClusterID(client) - cm, err := client.ConfigMaps("default").Get(context.TODO(), defaultClusterIDMap, metav1.GetOptions{}) + cm, err := client.ConfigMaps(GetMyNamespace()).Get(context.TODO(), defaultClusterIDMap, metav1.GetOptions{}) assert.Nil(t, err) id, found := cm.Data["id"] assert.True(t, found) diff --git a/pkg/util/kubernetes/apiserver/hpa_controller_test.go b/pkg/util/kubernetes/apiserver/hpa_controller_test.go index 039dd9398713a..82ae89beb9960 100644 --- a/pkg/util/kubernetes/apiserver/hpa_controller_test.go +++ b/pkg/util/kubernetes/apiserver/hpa_controller_test.go @@ -3,7 +3,7 @@ // This product includes software developed at Datadog (https://www.datadoghq.com/). // Copyright 2016-present Datadog, Inc. -//go:build kubeapiserver +//go:build !race && kubeapiserver package apiserver @@ -28,6 +28,7 @@ import ( "k8s.io/client-go/tools/record" "github.com/DataDog/datadog-agent/pkg/clusteragent/custommetrics" + "github.com/DataDog/datadog-agent/pkg/config" "github.com/DataDog/datadog-agent/pkg/errors" "github.com/DataDog/datadog-agent/pkg/util/kubernetes/autoscalers" "github.com/DataDog/datadog-agent/pkg/util/pointer" @@ -183,8 +184,11 @@ func makeAnnotations(metricName string, labels map[string]string) map[string]str // TestupdateExternalMetrics checks the reconciliation between the local cache and the global store logic func TestUpdate(t *testing.T) { + mockConfig := config.Mock(t) + mockConfig.Set("kube_resources_namespace", "nsfoo") + name := custommetrics.GetConfigmapName() - store, client := newFakeConfigMapStore(t, "default", name, nil) + store, client := newFakeConfigMapStore(t, "nsfoo", name, nil) d := &fakeDatadogClient{} p := &fakeProcessor{ @@ -214,13 +218,13 @@ func TestUpdate(t *testing.T) { // Start the DCA with already existing Data // Check if nothing in local store and Global Store is full we update the Global Store metrics correctly metricsToStore := map[string]custommetrics.ExternalMetricValue{ - "external_metric-horizontal-default-foo-metric1": { + "external_metric-horizontal-nsfoo-foo-metric1": { MetricName: "metric1", Labels: map[string]string{"foo": "bar"}, Ref: custommetrics.ObjectReference{ Type: "horizontal", Name: "foo", - Namespace: "default", + Namespace: "nsfoo", }, Value: 1.3, Valid: true, @@ -247,13 +251,13 @@ func TestUpdate(t *testing.T) { // Fresh start // Check if local store is not empty hctrl.toStore.m.Lock() - hctrl.toStore.data["external_metric-horizontal-default-foo-metric2"] = custommetrics.ExternalMetricValue{ + hctrl.toStore.data["external_metric-horizontal-nsfoo-foo-metric2"] = custommetrics.ExternalMetricValue{ MetricName: "metric2", Labels: map[string]string{"foo": "bar"}, Ref: custommetrics.ObjectReference{ Type: "horizontal", Name: "foo", - Namespace: "default", + Namespace: "nsfoo", }, } require.Len(t, hctrl.toStore.data, 1) @@ -268,13 +272,13 @@ func TestUpdate(t *testing.T) { // Check that if there is conflicting info from the local store and the Global Store that we merge correctly // Check conflict on metric name and labels hctrl.toStore.m.Lock() - hctrl.toStore.data["external_metric-horizontal-default-foo-metric2"] = custommetrics.ExternalMetricValue{ + hctrl.toStore.data["external_metric-horizontal-nsfoo-foo-metric2"] = custommetrics.ExternalMetricValue{ MetricName: "metric2", Labels: map[string]string{"foo": "baz"}, Ref: custommetrics.ObjectReference{ Type: "horizontal", Name: "foo", - Namespace: "default", + Namespace: "nsfoo", }, } require.Len(t, hctrl.toStore.data, 1) @@ -294,9 +298,12 @@ func TestUpdate(t *testing.T) { // TestAutoscalerController is an integration test of the AutoscalerController func TestAutoscalerController(t *testing.T) { + mockConfig := config.Mock(t) + mockConfig.Set("kube_resources_namespace", "nsfoo") + penTime := (int(time.Now().Unix()) - int(maxAge.Seconds()/2)) * 1000 name := custommetrics.GetConfigmapName() - store, client := newFakeConfigMapStore(t, "default", name, nil) + store, client := newFakeConfigMapStore(t, "nsfoo", name, nil) metricName := "foo" ddSeries := []datadog.Series{ { @@ -332,14 +339,14 @@ func TestAutoscalerController(t *testing.T) { mockedHPA := newFakeHorizontalPodAutoscaler( "hpa_1", - "default", + "nsfoo", "1", "foo", map[string]string{"foo": "bar"}, ) mockedHPA.Annotations = makeAnnotations("foo", map[string]string{"foo": "bar"}) - _, err := c.HorizontalPodAutoscalers("default").Create(context.TODO(), mockedHPA, metav1.CreateOptions{}) + _, err := c.HorizontalPodAutoscalers("nsfoo").Create(context.TODO(), mockedHPA, metav1.CreateOptions{}) require.NoError(t, err) timeout := time.NewTimer(5 * time.Second) @@ -453,14 +460,14 @@ func TestAutoscalerController(t *testing.T) { newMockedHPA := newFakeHorizontalPodAutoscaler( "hpa_2", - "default", + "nsfoo", "1", "foo", map[string]string{"foo": "bar"}, ) mockedHPA.Annotations = makeAnnotations("foo", map[string]string{"foo": "bar"}) - _, err = c.HorizontalPodAutoscalers("default").Create(context.TODO(), newMockedHPA, metav1.CreateOptions{}) + _, err = c.HorizontalPodAutoscalers("nsfoo").Create(context.TODO(), newMockedHPA, metav1.CreateOptions{}) require.NoError(t, err) select { case key := <-hctrl.autoscalers: @@ -470,7 +477,7 @@ func TestAutoscalerController(t *testing.T) { } // Verify that a Delete removes the Data from the Global Store and decreases metricsProcessdCount - err = c.HorizontalPodAutoscalers("default").Delete(context.TODO(), newMockedHPA.Name, metav1.DeleteOptions{}) + err = c.HorizontalPodAutoscalers("nsfoo").Delete(context.TODO(), newMockedHPA.Name, metav1.DeleteOptions{}) require.NoError(t, err) select { case <-ticker.C: @@ -479,7 +486,7 @@ func TestAutoscalerController(t *testing.T) { require.FailNow(t, "Timeout waiting for HPAs to update") } // Verify that a Delete removes the Data from the Global Store - err = c.HorizontalPodAutoscalers("default").Delete(context.TODO(), mockedHPA.Name, metav1.DeleteOptions{}) + err = c.HorizontalPodAutoscalers("nsfoo").Delete(context.TODO(), mockedHPA.Name, metav1.DeleteOptions{}) require.NoError(t, err) select { case <-ticker.C: diff --git a/pkg/util/kubernetes/apiserver/wpa_controller_test.go b/pkg/util/kubernetes/apiserver/wpa_controller_test.go index de26fbb888900..2e53539d9944d 100644 --- a/pkg/util/kubernetes/apiserver/wpa_controller_test.go +++ b/pkg/util/kubernetes/apiserver/wpa_controller_test.go @@ -3,7 +3,8 @@ // This product includes software developed at Datadog (https://www.datadoghq.com/). // Copyright 2016-present Datadog, Inc. -//go:build kubeapiserver +//go:build !race && kubeapiserver +// +build !race,kubeapiserver package apiserver @@ -40,6 +41,7 @@ import ( "github.com/DataDog/watermarkpodautoscaler/api/v1alpha1" "github.com/DataDog/datadog-agent/pkg/clusteragent/custommetrics" + "github.com/DataDog/datadog-agent/pkg/config" "github.com/DataDog/datadog-agent/pkg/errors" "github.com/DataDog/datadog-agent/pkg/util/kubernetes/autoscalers" "github.com/DataDog/datadog-agent/pkg/util/log" @@ -57,8 +59,11 @@ func init() { // TestupdateExternalMetrics checks the reconciliation between the local cache and the global store logic func TestUpdateWPA(t *testing.T) { + mockConfig := config.Mock(t) + mockConfig.Set("kube_resources_namespace", "nsfoo") + name := custommetrics.GetConfigmapName() - store, client := newFakeConfigMapStore(t, "default", name, nil) + store, client := newFakeConfigMapStore(t, "nsfoo", name, nil) d := &fakeDatadogClient{} p := &fakeProcessor{ @@ -88,13 +93,13 @@ func TestUpdateWPA(t *testing.T) { // Start the DCA with already existing Data // Check if nothing in local store and Global Store is full we update the Global Store metrics correctly metricsToStore := map[string]custommetrics.ExternalMetricValue{ - "external_metric-watermark-default-foo-metric1": { + "external_metric-watermark-nsfoo-foo-metric1": { MetricName: "metric1", Labels: map[string]string{"foo": "bar"}, Ref: custommetrics.ObjectReference{ Type: "watermark", Name: "foo", - Namespace: "default", + Namespace: "nsfoo", }, Value: 1.3, Valid: true, @@ -121,13 +126,13 @@ func TestUpdateWPA(t *testing.T) { // Fresh start // Check if local store is not empty hctrl.toStore.m.Lock() - hctrl.toStore.data["external_metric-watermark-default-foo-metric2"] = custommetrics.ExternalMetricValue{ + hctrl.toStore.data["external_metric-watermark-nsfoo-foo-metric2"] = custommetrics.ExternalMetricValue{ MetricName: "metric2", Labels: map[string]string{"foo": "bar"}, Ref: custommetrics.ObjectReference{ Type: "watermark", Name: "foo", - Namespace: "default", + Namespace: "nsfoo", }, } require.Len(t, hctrl.toStore.data, 1) @@ -142,13 +147,13 @@ func TestUpdateWPA(t *testing.T) { // Check that if there is conflicting info from the local store and the Global Store that we merge correctly // Check conflict on metric name and labels hctrl.toStore.m.Lock() - hctrl.toStore.data["external_metric-watermark-default-foo-metric2"] = custommetrics.ExternalMetricValue{ + hctrl.toStore.data["external_metric-watermark-nsfoo-foo-metric2"] = custommetrics.ExternalMetricValue{ MetricName: "metric2", Labels: map[string]string{"foo": "baz"}, Ref: custommetrics.ObjectReference{ Type: "watermark", Name: "foo", - Namespace: "default", + Namespace: "nsfoo", }, } require.Len(t, hctrl.toStore.data, 1) @@ -225,9 +230,12 @@ func TestWPAController(t *testing.T) { logFlush := configureLoggerForTest(t) defer logFlush() metricName := "foo" - namespace := "default" + namespace := "nsfoo" wpaName := "wpa_1" + mockConfig := config.Mock(t) + mockConfig.Set("kube_resources_namespace", "nsfoo") + penTime := (int(time.Now().Unix()) - int(maxAge.Seconds()/2)) * 1000 name := custommetrics.GetConfigmapName() store, client := newFakeConfigMapStore(t, namespace, name, nil) diff --git a/pkg/util/kubernetes/kubelet/kubelet_test.go b/pkg/util/kubernetes/kubelet/kubelet_test.go index ff90a1d15a83b..cf560c5ae5e70 100644 --- a/pkg/util/kubernetes/kubelet/kubelet_test.go +++ b/pkg/util/kubernetes/kubelet/kubelet_test.go @@ -559,6 +559,7 @@ func (suite *KubeletTestSuite) TestKubeletInitTokenHttps() { mockConfig.Set("kubelet_auth_token_path", "./testdata/fakeBearerToken") mockConfig.Set("kubelet_tls_verify", false) mockConfig.Set("kubernetes_kubelet_host", "127.0.0.1") + mockConfig.Set("kubelet_client_ca", "./testdata/ca.crt") ku := NewKubeUtil() err = ku.init() @@ -577,6 +578,7 @@ func (suite *KubeletTestSuite) TestKubeletInitTokenHttps() { map[string]string{ "url": fmt.Sprintf("https://127.0.0.1:%d", kubeletPort), "verify_tls": "false", + "ca_cert": "./testdata/ca.crt", "token": "fakeBearerToken", }, ku.GetRawConnectionInfo()) } @@ -597,7 +599,7 @@ func (suite *KubeletTestSuite) TestKubeletInitHttpsCerts() { mockConfig.Set("kubernetes_https_kubelet_port", kubeletPort) mockConfig.Set("kubernetes_http_kubelet_port", -1) - mockConfig.Set("kubelet_auth_token_path", "") + mockConfig.Set("kubelet_auth_token_path", "./testdata/fakeBearerToken") mockConfig.Set("kubelet_tls_verify", true) mockConfig.Set("kubelet_client_crt", k.testingCertificate) mockConfig.Set("kubelet_client_key", k.testingPrivateKey) @@ -610,16 +612,20 @@ func (suite *KubeletTestSuite) TestKubeletInitHttpsCerts() { <-k.Requests // Throwing away first GET assert.Equal(suite.T(), fmt.Sprintf("https://127.0.0.1:%d", kubeletPort), ku.kubeletClient.kubeletURL) - assert.False(suite.T(), ku.kubeletClient.client.Transport.(*http.Transport).TLSClientConfig.InsecureSkipVerify) + if transport, ok := ku.kubeletClient.client.Transport.(*http.Transport); ok { + assert.False(suite.T(), transport.TLSClientConfig.InsecureSkipVerify) + } b, code, err := ku.QueryKubelet(ctx, "/healthz") assert.Nil(suite.T(), err) assert.Equal(suite.T(), "ok", string(b)) assert.Equal(suite.T(), 200, code) r := <-k.Requests - assert.Equal(suite.T(), "", r.Header.Get(authorizationHeaderKey)) - clientCerts := ku.kubeletClient.client.Transport.(*http.Transport).TLSClientConfig.Certificates - require.Equal(suite.T(), 1, len(clientCerts)) - assert.Equal(suite.T(), clientCerts, s.TLS.Certificates) + assert.Equal(suite.T(), "Bearer fakeBearerToken", r.Header.Get(authorizationHeaderKey)) + if transport, ok := ku.kubeletClient.client.Transport.(*http.Transport); ok { + clientCerts := transport.TLSClientConfig.Certificates + require.Equal(suite.T(), 1, len(clientCerts)) + assert.Equal(suite.T(), clientCerts, s.TLS.Certificates) + } require.EqualValues(suite.T(), map[string]string{ @@ -628,6 +634,7 @@ func (suite *KubeletTestSuite) TestKubeletInitHttpsCerts() { "client_crt": k.testingCertificate, "client_key": k.testingPrivateKey, "ca_cert": k.testingCertificate, + "token": "fakeBearerToken", }, ku.GetRawConnectionInfo()) } diff --git a/pkg/util/kubernetes/kubelet/testdata/ca.crt b/pkg/util/kubernetes/kubelet/testdata/ca.crt new file mode 100644 index 0000000000000..f8ee20cedcfd6 --- /dev/null +++ b/pkg/util/kubernetes/kubelet/testdata/ca.crt @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIE+zCCAuOgAwIBAgIULxSv8YbxTVq2/XMtdHjOR6zjnRMwDQYJKoZIhvcNAQEL +BQAwDTELMAkGA1UEBhMCRlIwHhcNMjMwNDAzMTA0MjMyWhcNMzMwMzMxMTA0MjMy +WjANMQswCQYDVQQGEwJGUjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +AKZOZvYrSTYvladnE5tSxJQy4iePH75FX9TfzTA14vFR/hBj4800KRoPMZ0RuPnm +Xj8tgaRQ4cW8vMiQumCorQbH8uRVcJ6bIexfEKIcvsXvQPQTDoXoGWgvQE0+aVM8 +0EmWecrD9QoAyx0wVtsv/rn9l1d9TVTmcphIMCnXzZ6/4qXqW9uiMWcrSM9vd4Kk ++hfSv1naogjGQoU6aiR/F4admifGU2ubF8ajTjrweLP++uxk5kK4TSCGkBYbPqjR +ZM962cnJ+0PsHZ2qABDzWrltqzRIgtPubVO5AMpVMf0n8wF90ZHfeGcYiNi7FrCX +vwdsKB9QQ3w8b/l4Oa7P4gmHBosvBjLqpecwfmsdf6T3rxHfmVAafrLguwhyZEq9 +ZlWiIcdox47384A79hspFCB57xjHDjikWOwu7QZFWUWC7GemaTDt6pcVaNAcl7Pk +wADDs8tgsMdrCqg7cMovrYeRo0yRuljJmqYWd86W329X8l4/uiKzW7PMLFNsij6M +ujs6G9GNlNKNYx0Or0tf5jUVmoL+MA6sOc6qPk6e1m5oXQwYfT3kplSRI1KgBFRI +6Z+eVEdlz4yrFTidni7+pKL+9HzeRL2pyXO3KrhDa3svNvCym46Y0E37t2xOH6/Y +KO2QRrrnt/pf8jCjePx1Wn9RFd2LdBRTlOw4vUgQqUhHAgMBAAGjUzBRMB0GA1Ud +DgQWBBTgTQTss0gITdgLd55jerdV69tJdjAfBgNVHSMEGDAWgBTgTQTss0gITdgL +d55jerdV69tJdjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQCQ +P+JQsEVxD8cU1jrjF0s3QCut4t6rusXw1B8vWBZGCR0dokhILqhDcoutqge8kpBX +SFBSRfJvYMPu9ySlOJ3jPBQ/ekU6EbScowkX+gLSGnYUZ9muvxkYtWGBWxsBv7OC +Lw4hRA50XfszEGkY0lafYZoBqbvWKJboM7ssr5y2cDcHJ8c4XNbn36nqMRXbgfdi +bSleo9yki/Vu3LkzTOdsG9CBrZ0padmuvNCW5wafqzSpjA4JBM/lQDWWruAL1LvV +vyPuH0KYhJ88RVIYgQGRSD7pXLH+IG45Fuj5HUQMq8ayQEv2zNrYee0w4eG8XWd8 ++8DeyIpnutI6i9goFtbEXrA8SyUvkaI1Qc6kgHj+ofSYZRA+jbz7/u4FzV9iaeAM +nnSFnDp0hZiiNfJkoVXalljHHoEFZVO2A4vS3ojyc42snH7A/6u4kdBQIRGjluTU +mDQZf5mhZaLWjaCSLOsmc07aNPMNrSa6BTITnWUvA/1zdTxavnkym+T0BHB5SPL8 +q+a0ivVw/kr9HZYR7C/69qtUp+dgiUGlUu+ogEGBjgZsLLwli0PewVzACJAtmYDw +PtHo/Ho4mDzt2BE5pcRO2aMUERJX7SU+0zNyVdG48nVryD8L6PJvq6EyIFK5XiCI +nleTIrTrQpY1luBWe1sJ9QUJrBLmGoU572CXXhqc/w== +-----END CERTIFICATE----- diff --git a/tasks/test.py b/tasks/test.py index 141d962a52253..d0156e944e03a 100644 --- a/tasks/test.py +++ b/tasks/test.py @@ -272,7 +272,13 @@ def get_failure(self, flavor): def lint_flavor( - ctx, modules: List[GoModule], flavor: AgentFlavor, build_tags: List[str], arch: str, rtloader_root: bool + ctx, + modules: List[GoModule], + flavor: AgentFlavor, + build_tags: List[str], + arch: str, + rtloader_root: bool, + concurrency: int, ): """ Runs linters for given flavor, build tags, and modules. @@ -281,7 +287,12 @@ def lint_flavor( def command(module_results, module, module_result): with ctx.cd(module.full_path()): lint_results = run_golangci_lint( - ctx, targets=module.targets, rtloader_root=rtloader_root, build_tags=build_tags, arch=arch + ctx, + targets=module.targets, + rtloader_root=rtloader_root, + build_tags=build_tags, + arch=arch, + concurrency=concurrency, ) for lint_result in lint_results: module_result.lint_outputs.append(lint_result) @@ -457,7 +468,7 @@ def test( rtloader_root=None, python_home_2=None, python_home_3=None, - cpus=0, + cpus=None, major_version='7', python_runtimes='3', timeout=180, @@ -495,11 +506,26 @@ def test( # } modules_results_per_phase = defaultdict(dict) + # Sanitize environment variables + # We want to ignore all `DD_` variables, as they will interfere with the behavior + # of some unit tests + for env in os.environ.keys(): + if env.startswith("DD_"): + del os.environ[env] + # Run linters first if not skip_linters: modules_results_per_phase["lint"] = run_lint_go( - ctx, module, targets, flavors, build_include, build_exclude, rtloader_root, arch + ctx, + module, + targets, + flavors, + build_include, + build_exclude, + rtloader_root, + arch, + cpus, ) # Process input arguments @@ -515,8 +541,6 @@ def test( timeout = int(timeout) - # Lint - ldflags, gcflags, env = get_build_flags( ctx, rtloader_root=rtloader_root, @@ -646,6 +670,7 @@ def run_lint_go( build_exclude=None, rtloader_root=None, arch="x64", + cpus=None, ): modules, flavors = process_input_args(module, targets, flavors) @@ -662,7 +687,13 @@ def run_lint_go( for flavor, build_tags in linter_tags.items(): modules_lint_results_per_flavor[flavor] = lint_flavor( - ctx, modules=modules, flavor=flavor, build_tags=build_tags, arch=arch, rtloader_root=rtloader_root + ctx, + modules=modules, + flavor=flavor, + build_tags=build_tags, + arch=arch, + rtloader_root=rtloader_root, + concurrency=cpus, ) return modules_lint_results_per_flavor @@ -678,6 +709,7 @@ def lint_go( build_exclude=None, rtloader_root=None, arch="x64", + cpus=None, ): """ Run go linters on the given module and targets. @@ -704,7 +736,15 @@ def lint_go( modules_results_per_phase = defaultdict(dict) modules_results_per_phase["lint"] = run_lint_go( - ctx, module, targets, flavors, build_include, build_exclude, rtloader_root, arch + ctx, + module, + targets, + flavors, + build_include, + build_exclude, + rtloader_root, + arch, + cpus, ) success = process_module_results(modules_results_per_phase)