From 20f0e49635f229a38c61766096a775dad8fb832c Mon Sep 17 00:00:00 2001 From: Alex Lopez Date: Mon, 20 Nov 2023 16:05:27 +0100 Subject: [PATCH] Remove use of nested blocks in integrations-py3 software definition --- .../datadog-agent-integrations-py3.rb | 480 +++++++++--------- 1 file changed, 228 insertions(+), 252 deletions(-) diff --git a/omnibus/config/software/datadog-agent-integrations-py3.rb b/omnibus/config/software/datadog-agent-integrations-py3.rb index 91c692b8777d23..4268e5f02df5c1 100644 --- a/omnibus/config/software/datadog-agent-integrations-py3.rb +++ b/omnibus/config/software/datadog-agent-integrations-py3.rb @@ -8,6 +8,9 @@ name 'datadog-agent-integrations-py3' +license "BSD-3-Clause" +license_file "./LICENSE" + dependency 'datadog-agent' dependency 'datadog-agent-integrations-py3-dependencies' @@ -89,9 +92,6 @@ agent_requirements_in = 'agent_requirements.in' build do - license "BSD-3-Clause" - license_file "./LICENSE" - # The dir for confs if osx_target? conf_dir = "#{install_dir}/etc/conf.d" @@ -114,121 +114,119 @@ } # Install the checks along with their dependencies - block do - if windows_target? - wheel_build_dir = "#{windows_safe_path(project_dir)}\\.wheels" - build_deps_dir = "#{windows_safe_path(project_dir)}\\.build_deps" - else - wheel_build_dir = "#{project_dir}/.wheels" - build_deps_dir = "#{project_dir}/.build_deps" - end - - # - # Prepare the build env, these dependencies are only needed to build and - # install the core integrations. - # - command "#{python} -m pip download --dest #{build_deps_dir} hatchling==0.25.1", :env => pre_build_env - command "#{python} -m pip download --dest #{build_deps_dir} setuptools==66.1.1", :env => pre_build_env # Version from ./setuptools3.rb - command "#{python} -m pip install wheel==0.38.4", :env => pre_build_env - command "#{python} -m pip install pip-tools==7.3.0", :env => pre_build_env - uninstall_buildtime_deps = ['rtloader', 'click', 'first', 'pip-tools'] - nix_build_env = { - "PIP_FIND_LINKS" => "#{build_deps_dir}", - "PIP_CONFIG_FILE" => "#{pip_config_file}", - # Specify C99 standard explicitly to avoid issues while building some - # wheels (eg. ddtrace) - "CFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", - "CXXFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", - "LDFLAGS" => "-L#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", - "LD_RUN_PATH" => "#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", - "PATH" => "#{install_dir}/embedded/bin:#{ENV['PATH']}", - } - - win_build_env = { - "PIP_FIND_LINKS" => "#{build_deps_dir}", - "PIP_CONFIG_FILE" => "#{pip_config_file}", - } - - # On Linux & Windows, specify the C99 standard explicitly to avoid issues while building some - # wheels (eg. ddtrace). - # Not explicitly setting that option has caused us problems in the past on SUSE, where the ddtrace - # wheel has to be manually built, as the C code in ddtrace doesn't follow the C89 standard (the default value of std). - # Note: We don't set this on MacOS, as on MacOS we need to build a bunch of packages & C extensions that - # don't have precompiled MacOS wheels. When building C extensions, the CFLAGS variable is added to - # the command-line parameters, even when compiling C++ code, where -std=c99 is invalid. - # See: https://github.com/python/cpython/blob/v3.8.8/Lib/distutils/sysconfig.py#L227 - if linux_target? || windows_target? - nix_build_env["CFLAGS"] += " -std=c99" - end + if windows_target? + wheel_build_dir = "#{windows_safe_path(project_dir)}\\.wheels" + build_deps_dir = "#{windows_safe_path(project_dir)}\\.build_deps" + else + wheel_build_dir = "#{project_dir}/.wheels" + build_deps_dir = "#{project_dir}/.build_deps" + end - # We only have gcc 10.4.0 on linux for now - if linux_target? - nix_build_env["CC"] = "/opt/gcc-#{gcc_version}/bin/gcc" - nix_build_env["CXX"] = "/opt/gcc-#{gcc_version}/bin/g++" - end + # + # Prepare the build env, these dependencies are only needed to build and + # install the core integrations. + # + command "#{python} -m pip download --dest #{build_deps_dir} hatchling==0.25.1", :env => pre_build_env + command "#{python} -m pip download --dest #{build_deps_dir} setuptools==66.1.1", :env => pre_build_env # Version from ./setuptools3.rb + command "#{python} -m pip install wheel==0.38.4", :env => pre_build_env + command "#{python} -m pip install pip-tools==7.3.0", :env => pre_build_env + uninstall_buildtime_deps = ['rtloader', 'click', 'first', 'pip-tools'] + nix_build_env = { + "PIP_FIND_LINKS" => "#{build_deps_dir}", + "PIP_CONFIG_FILE" => "#{pip_config_file}", + # Specify C99 standard explicitly to avoid issues while building some + # wheels (eg. ddtrace) + "CFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", + "CXXFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", + "LDFLAGS" => "-L#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", + "LD_RUN_PATH" => "#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", + "PATH" => "#{install_dir}/embedded/bin:#{ENV['PATH']}", + } - # Some libraries (looking at you, aerospike-client-python) need EXT_CFLAGS instead of CFLAGS. - nix_specific_build_env = { - "aerospike" => nix_build_env.merge({"EXT_CFLAGS" => nix_build_env["CFLAGS"] + " -std=gnu99"}), - # Always build pyodbc from source to link to the embedded version of libodbc - "pyodbc" => nix_build_env.merge({"PIP_NO_BINARY" => "pyodbc"}), - } + win_build_env = { + "PIP_FIND_LINKS" => "#{build_deps_dir}", + "PIP_CONFIG_FILE" => "#{pip_config_file}", + } - win_specific_build_env = {} - - # We need to explicitly specify RUSTFLAGS for libssl and libcrypto - # See https://github.com/pyca/cryptography/issues/8614#issuecomment-1489366475 - if redhat_target? && !arm_target? - nix_specific_build_env["cryptography"] = nix_build_env.merge( - { - "RUSTFLAGS" => "-C link-arg=-Wl,-rpath,#{install_dir}/embedded/lib", - "OPENSSL_DIR" => "#{install_dir}/embedded/", - # We have a manually installed dependency (snowflake connector) that already installed cryptography (but without the flags) - # We force reinstall it from source to be sure we use the flag - "PIP_NO_CACHE_DIR" => "off", - "PIP_FORCE_REINSTALL" => "1", - } - ) - end + # Some libraries (looking at you, aerospike-client-python) need EXT_CFLAGS instead of CFLAGS. + nix_specific_build_env = { + "aerospike" => nix_build_env.merge({"EXT_CFLAGS" => nix_build_env["CFLAGS"] + " -std=gnu99"}), + # Always build pyodbc from source to link to the embedded version of libodbc + "pyodbc" => nix_build_env.merge({"PIP_NO_BINARY" => "pyodbc"}), + } + win_specific_build_env = {} + + # On Linux & Windows, specify the C99 standard explicitly to avoid issues while building some + # wheels (eg. ddtrace). + # Not explicitly setting that option has caused us problems in the past on SUSE, where the ddtrace + # wheel has to be manually built, as the C code in ddtrace doesn't follow the C89 standard (the default value of std). + # Note: We don't set this on MacOS, as on MacOS we need to build a bunch of packages & C extensions that + # don't have precompiled MacOS wheels. When building C extensions, the CFLAGS variable is added to + # the command-line parameters, even when compiling C++ code, where -std=c99 is invalid. + # See: https://github.com/python/cpython/blob/v3.8.8/Lib/distutils/sysconfig.py#L227 + if linux_target? || windows_target? + nix_build_env["CFLAGS"] += " -std=c99" + end - # - # Prepare the requirements file containing ALL the dependencies needed by - # any integration. This will provide the "static Python environment" of the Agent. - # We don't use the .in file provided by the base check directly because we - # want to filter out things before installing. - # - if windows_target? - static_reqs_in_file = "#{windows_safe_path(project_dir)}\\datadog_checks_base\\datadog_checks\\base\\data\\#{agent_requirements_in}" - static_reqs_out_folder = "#{windows_safe_path(project_dir)}\\" - static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in - compiled_reqs_file_path = "#{windows_safe_path(install_dir)}\\#{agent_requirements_file}" - else - static_reqs_in_file = "#{project_dir}/datadog_checks_base/datadog_checks/base/data/#{agent_requirements_in}" - static_reqs_out_folder = "#{project_dir}/" - static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in - compiled_reqs_file_path = "#{install_dir}/#{agent_requirements_file}" - end + # We only have gcc 10.4.0 on linux for now + if linux_target? + nix_build_env["CC"] = "/opt/gcc-#{gcc_version}/bin/gcc" + nix_build_env["CXX"] = "/opt/gcc-#{gcc_version}/bin/g++" + end - # Remove any excluded requirements from the static-environment req file - requirements = Array.new + # We need to explicitly specify RUSTFLAGS for libssl and libcrypto + # See https://github.com/pyca/cryptography/issues/8614#issuecomment-1489366475 + if redhat_target? && !arm_target? + nix_specific_build_env["cryptography"] = nix_build_env.merge( + { + "RUSTFLAGS" => "-C link-arg=-Wl,-rpath,#{install_dir}/embedded/lib", + "OPENSSL_DIR" => "#{install_dir}/embedded/", + # We have a manually installed dependency (snowflake connector) that already installed cryptography (but without the flags) + # We force reinstall it from source to be sure we use the flag + "PIP_NO_CACHE_DIR" => "off", + "PIP_FORCE_REINSTALL" => "1", + } + ) + end - # Creating a hash containing the requirements and requirements file path associated to every lib - requirements_custom = Hash.new() + # + # Prepare the requirements file containing ALL the dependencies needed by + # any integration. This will provide the "static Python environment" of the Agent. + # We don't use the .in file provided by the base check directly because we + # want to filter out things before installing. + # + if windows_target? + static_reqs_in_file = "#{windows_safe_path(project_dir)}\\datadog_checks_base\\datadog_checks\\base\\data\\#{agent_requirements_in}" + static_reqs_out_folder = "#{windows_safe_path(project_dir)}\\" + static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in + compiled_reqs_file_path = "#{windows_safe_path(install_dir)}\\#{agent_requirements_file}" + else + static_reqs_in_file = "#{project_dir}/datadog_checks_base/datadog_checks/base/data/#{agent_requirements_in}" + static_reqs_out_folder = "#{project_dir}/" + static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in + compiled_reqs_file_path = "#{install_dir}/#{agent_requirements_file}" + end - specific_build_env = windows_target? ? win_specific_build_env : nix_specific_build_env - build_env = windows_target? ? win_build_env : nix_build_env - cwd_base = windows_target? ? "#{windows_safe_path(project_dir)}\\datadog_checks_base" : "#{project_dir}/datadog_checks_base" - cwd_downloader = windows_target? ? "#{windows_safe_path(project_dir)}\\datadog_checks_downloader" : "#{project_dir}/datadog_checks_downloader" + specific_build_env = windows_target? ? win_specific_build_env : nix_specific_build_env + build_env = windows_target? ? win_build_env : nix_build_env + cwd_base = windows_target? ? "#{windows_safe_path(project_dir)}\\datadog_checks_base" : "#{project_dir}/datadog_checks_base" + cwd_downloader = windows_target? ? "#{windows_safe_path(project_dir)}\\datadog_checks_downloader" : "#{project_dir}/datadog_checks_downloader" + + # Creating a hash containing the requirements and requirements file path associated to every lib + requirements_custom = Hash.new() + specific_build_env.each do |lib, env| + lib_compiled_req_file_path = (windows_target? ? "#{windows_safe_path(install_dir)}\\" : "#{install_dir}/") + "agent_#{lib}_requirements-py3.txt" + requirements_custom[lib] = { + "req_lines" => Array.new, + "req_file_path" => static_reqs_out_folder + lib + "-py3.in", + "compiled_req_file_path" => lib_compiled_req_file_path, + } + end - specific_build_env.each do |lib, env| - lib_compiled_req_file_path = (windows_target? ? "#{windows_safe_path(install_dir)}\\" : "#{install_dir}/") + "agent_#{lib}_requirements-py3.txt" - requirements_custom[lib] = { - "req_lines" => Array.new, - "req_file_path" => static_reqs_out_folder + lib + "-py3.in", - "compiled_req_file_path" => lib_compiled_req_file_path, - } - end + # Remove any excluded requirements from the static-environment req file + requirements = Array.new + block "Create filtered requirements" do File.open("#{static_reqs_in_file}", 'r+').readlines().each do |line| next if excluded_packages.any? { |package_regex| line.match(package_regex) } @@ -275,11 +273,11 @@ command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => build_env, :cwd => cwd_downloader command "#{python} -m pip install datadog_checks_downloader --no-deps --no-index --find-links=#{wheel_build_dir}" command "#{python} -m piptools compile --generate-hashes --output-file #{compiled_reqs_file_path} #{static_reqs_out_file} " \ - "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => build_env + "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => build_env # Pip-compiling seperately each lib that needs a custom build installation specific_build_env.each do |lib, env| command "#{python} -m piptools compile --generate-hashes --output-file #{requirements_custom[lib]["compiled_req_file_path"]} #{requirements_custom[lib]["req_file_path"]} " \ - "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => env + "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => env end # @@ -295,7 +293,7 @@ # Then we install the rest (already installed libraries will be ignored) with the main flags command "#{python} -m pip install --no-deps --require-hashes -r #{compiled_reqs_file_path}", :env => build_env # Remove the file after use so it is not shipped - delete "#{compiled_reqs_file_path}" + delete "#{compiled_reqs_file_path}" # # Install Core integrations @@ -306,85 +304,85 @@ command "#{python} -m pip freeze > #{install_dir}/#{final_constraints_file}" if windows_target? - cached_wheels_dir = "#{windows_safe_path(wheel_build_dir)}\\.cached" + cached_wheels_dir = "#{windows_safe_path(wheel_build_dir)}\\.cached" else - cached_wheels_dir = "#{wheel_build_dir}/.cached" + cached_wheels_dir = "#{wheel_build_dir}/.cached" end + checks_to_install = Array.new - # Go through every integration package in `integrations-core`, build and install - Dir.glob("#{project_dir}/*").each do |check_dir| - check = check_dir.split('/').last + block "Collect integrations to install" do + # Go through every integration package in `integrations-core`, build and install + Dir.glob("#{project_dir}/*").each do |check_dir| + check = check_dir.split('/').last - # do not install excluded integrations - next if !File.directory?("#{check_dir}") || excluded_folders.include?(check) + # do not install excluded integrations + next if !File.directory?("#{check_dir}") || excluded_folders.include?(check) - # If there is no manifest file, then we should assume the folder does not - # contain a working check and move onto the next - manifest_file_path = "#{check_dir}/manifest.json" + # If there is no manifest file, then we should assume the folder does not + # contain a working check and move onto the next + manifest_file_path = "#{check_dir}/manifest.json" - # If there is no manifest file, then we should assume the folder does not - # contain a working check and move onto the next - File.exist?(manifest_file_path) || next + # If there is no manifest file, then we should assume the folder does not + # contain a working check and move onto the next + File.exist?(manifest_file_path) || next - manifest = JSON.parse(File.read(manifest_file_path)) - if manifest.key?("supported_os") - manifest["supported_os"].include?(os) || next - else - if os == "mac_os" - tag = "Supported OS::macOS" + manifest = JSON.parse(File.read(manifest_file_path)) + if manifest.key?("supported_os") + manifest["supported_os"].include?(os) || next else - tag = "Supported OS::#{os.capitalize}" + if os == "mac_os" + tag = "Supported OS::macOS" + else + tag = "Supported OS::#{os.capitalize}" + end + + manifest["tile"]["classifier_tags"].include?(tag) || next end - manifest["tile"]["classifier_tags"].include?(tag) || next - end + File.file?("#{check_dir}/setup.py") || File.file?("#{check_dir}/pyproject.toml") || next + # Check if it supports Python 3. + support = `inv agent.check-supports-python-version #{check_dir} 3` + if support == "False" + log.info(log_key) { "Skipping '#{check}' since it does not support Python 3." } + next + end - File.file?("#{check_dir}/setup.py") || File.file?("#{check_dir}/pyproject.toml") || next - # Check if it supports Python 3. - support = `inv agent.check-supports-python-version #{check_dir} 3` - if support == "False" - log.info(log_key) { "Skipping '#{check}' since it does not support Python 3." } - next + checks_to_install.push(check) end - - checks_to_install.push(check) end - tasks_dir_in = windows_safe_path(Dir.pwd) + installed_list = Array.new cache_bucket = ENV.fetch('INTEGRATION_WHEELS_CACHE_BUCKET', '') - cache_branch = `cd .. && inv release.get-release-json-value base_branch`.strip - # On windows, `aws` actually executes Ruby's AWS SDK, but we want the Python one - awscli = if windows_target? then '"c:\Program files\python39\scripts\aws"' else 'aws' end - if cache_bucket != '' - mkdir cached_wheels_dir - command "inv -e agent.get-integrations-from-cache " \ - "--python 3 --bucket #{cache_bucket} " \ - "--branch #{cache_branch || 'main'} " \ - "--integrations-dir #{windows_safe_path(project_dir)} " \ - "--target-dir #{cached_wheels_dir} " \ - "--integrations #{checks_to_install.join(',')} " \ - "--awscli #{awscli}", - :cwd => tasks_dir_in - - # install all wheels from cache in one pip invocation to speed things up - if windows_target? - command "#{python} -m pip install --no-deps --no-index " \ - " --find-links #{windows_safe_path(cached_wheels_dir)} -r #{windows_safe_path(cached_wheels_dir)}\\found.txt" - else - command "#{python} -m pip install --no-deps --no-index " \ - "--find-links #{cached_wheels_dir} -r #{cached_wheels_dir}/found.txt" + block "Install cached wheels" do + tasks_dir_in = windows_safe_path(Dir.pwd) + cache_branch = (shellout! "inv release.get-release-json-value base_branch", cwd: File.expand_path('..', tasks_dir_in)).stdout.strip + # On windows, `aws` actually executes Ruby's AWS SDK, but we want the Python one + awscli = if windows_target? then '"c:\Program files\python39\scripts\aws"' else 'aws' end + if cache_bucket != '' + mkdir cached_wheels_dir + shellout! "inv -e agent.get-integrations-from-cache " \ + "--python 3 --bucket #{cache_bucket} " \ + "--branch #{cache_branch || 'main'} " \ + "--integrations-dir #{windows_safe_path(project_dir)} " \ + "--target-dir #{cached_wheels_dir} " \ + "--integrations #{checks_to_install.join(',')} " \ + "--awscli #{awscli}", + :cwd => tasks_dir_in + + # install all wheels from cache in one pip invocation to speed things up + if windows_target? + shellout! "#{python} -m pip install --no-deps --no-index " \ + " --find-links #{windows_safe_path(cached_wheels_dir)} -r #{windows_safe_path(cached_wheels_dir)}\\found.txt" + else + shellout! "#{python} -m pip install --no-deps --no-index " \ + "--find-links #{cached_wheels_dir} -r #{cached_wheels_dir}/found.txt" + end end - end - - block do - # we have to do this operation in block, so that it can access files created by the - # inv agent.get-integrations-from-cache command # get list of integration wheels already installed from cache - installed_list = Array.new if cache_bucket != '' - installed_out = `#{python} -m pip list --format json` + installed_out = (shellout! "#{python} -m pip list --format json").stdout if $?.exitstatus == 0 installed = JSON.parse(installed_out) installed.each do |package| @@ -398,104 +396,82 @@ raise "Failed to list pip installed packages" end end + end - checks_to_install.each do |check| - check_dir = File.join(project_dir, check) - check_conf_dir = "#{conf_dir}/#{check}.d" - # For each conf file, if it already exists, that means the `datadog-agent` software def - # wrote it first. In that case, since the agent's confs take precedence, skip the conf - - # Copy the check config to the conf directories - conf_file_example = "#{check_dir}/datadog_checks/#{check}/data/conf.yaml.example" - if File.exist? conf_file_example - mkdir check_conf_dir - copy conf_file_example, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/conf.yaml.example" - end - - # Copy the default config, if it exists - conf_file_default = "#{check_dir}/datadog_checks/#{check}/data/conf.yaml.default" - if File.exist? conf_file_default - mkdir check_conf_dir - copy conf_file_default, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/conf.yaml.default" - end + checks_to_install.each do |check| + check_dir = File.join(project_dir, check) + check_conf_dir = "#{conf_dir}/#{check}.d" - # Copy the metric file, if it exists - metrics_yaml = "#{check_dir}/datadog_checks/#{check}/data/metrics.yaml" - if File.exist? metrics_yaml + # For each conf file, if it already exists, that means the `datadog-agent` software def + # wrote it first. In that case, since the agent's confs take precedence, skip the conf + conf_files = ["conf.yaml.example", "conf.yaml.default", "metrics.yaml", "auto_conf.yaml"] + conf_files.each do |filename| + src = windows_safe_path(check_dir,"datadog_checks", check, "data", filename) + if File.exist? src mkdir check_conf_dir - copy metrics_yaml, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/metrics.yaml" - end - - # We don't have auto_conf on windows yet - auto_conf_yaml = "#{check_dir}/datadog_checks/#{check}/data/auto_conf.yaml" - if File.exist? auto_conf_yaml - mkdir check_conf_dir - copy auto_conf_yaml, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/auto_conf.yaml" - end - - # Copy SNMP profiles - profile_folders = ['profiles', 'default_profiles'] - profile_folders.each do |profile_folder| - folder_path = "#{check_dir}/datadog_checks/#{check}/data/#{profile_folder}" - if File.exist? folder_path - copy folder_path, "#{check_conf_dir}/" - end - end - - # pip < 21.2 replace underscores by dashes in package names per https://pip.pypa.io/en/stable/news/#v21-2 - # whether or not this might switch back in the future is not guaranteed, so we check for both name - # with dashes and underscores - if installed_list.include?(check) || installed_list.include?(check.gsub('_', '-')) - next + dest = check_conf_dir + # Requires a block because requires info only available at build time + block "Copy #{src} to #{dest} without overwriting" do + FileUtils.cp_r(src, dest) unless File.exist(windows_safe_path(dest, filename)) + end end + end - if windows_target? - command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => win_build_env, :cwd => "#{windows_safe_path(project_dir)}\\#{check}" - else - command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => nix_build_env, :cwd => "#{project_dir}/#{check}" - end - command "#{python} -m pip install datadog-#{check} --no-deps --no-index --find-links=#{wheel_build_dir}" - if cache_bucket != '' && ENV.fetch('INTEGRATION_WHEELS_SKIP_CACHE_UPLOAD', '') == '' && cache_branch != nil - command "inv -e agent.upload-integration-to-cache " \ - "--python 3 --bucket #{cache_bucket} " \ - "--branch #{cache_branch} " \ - "--integrations-dir #{windows_safe_path(project_dir)} " \ - "--build-dir #{wheel_build_dir} " \ - "--integration #{check} " \ - "--awscli #{awscli}", - :cwd => tasks_dir_in + # Copy SNMP profiles + profile_folders = ['profiles', 'default_profiles'] + profile_folders.each do |profile_folder| + folder_path = "#{check_dir}/datadog_checks/#{check}/data/#{profile_folder}" + if File.exist? folder_path + copy folder_path, "#{check_conf_dir}/" end end - # From now on we don't need piptools anymore, uninstall its deps so we don't include them in the final artifact - uninstall_buildtime_deps.each do |dep| - command "#{python} -m pip uninstall -y #{dep}" + # pip < 21.2 replace underscores by dashes in package names per https://pip.pypa.io/en/stable/news/#v21-2 + # whether or not this might switch back in the future is not guaranteed, so we check for both name + # with dashes and underscores + if installed_list.include?(check) || installed_list.include?(check.gsub('_', '-')) + next end - end - - block do - # We have to run these operations in block, so they get applied after operations - # from the last block - # Patch applies to only one file: set it explicitly as a target, no need for -p if windows_target? - patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{python_3_embedded}/Lib/site-packages/psutil/__init__.py" + command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => win_build_env, :cwd => "#{windows_safe_path(project_dir)}\\#{check}" else - patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{install_dir}/embedded/lib/python3.11/site-packages/psutil/__init__.py" + command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => nix_build_env, :cwd => "#{project_dir}/#{check}" + end + command "#{python} -m pip install datadog-#{check} --no-deps --no-index --find-links=#{wheel_build_dir}" + if cache_bucket != '' && ENV.fetch('INTEGRATION_WHEELS_SKIP_CACHE_UPLOAD', '') == '' && cache_branch != nil + command "inv -e agent.upload-integration-to-cache " \ + "--python 3 --bucket #{cache_bucket} " \ + "--branch #{cache_branch} " \ + "--integrations-dir #{windows_safe_path(project_dir)} " \ + "--build-dir #{wheel_build_dir} " \ + "--integration #{check} " \ + "--awscli #{awscli}", + :cwd => tasks_dir_in end + end - # Run pip check to make sure the agent's python environment is clean, all the dependencies are compatible - command "#{python} -m pip check" + # From now on we don't need piptools anymore, uninstall its deps so we don't include them in the final artifact + uninstall_buildtime_deps.each do |dep| + command "#{python} -m pip uninstall -y #{dep}" end - block do - # Removing tests that don't need to be shipped in the embedded folder - if windows_target? - delete "#{python_3_embedded}/Lib/site-packages/Cryptodome/SelfTest/" - else - delete "#{install_dir}/embedded/lib/python3.11/site-packages/Cryptodome/SelfTest/" - end + # Patch applies to only one file: set it explicitly as a target, no need for -p + if windows_target? + patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{python_3_embedded}/Lib/site-packages/psutil/__init__.py" + else + patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{install_dir}/embedded/lib/python3.11/site-packages/psutil/__init__.py" end + + # Run pip check to make sure the agent's python environment is clean, all the dependencies are compatible + command "#{python} -m pip check" + end + + # Removing tests that don't need to be shipped in the embedded folder + if windows_target? + delete "#{python_3_embedded}/Lib/site-packages/Cryptodome/SelfTest/" + else + delete "#{install_dir}/embedded/lib/python3.11/site-packages/Cryptodome/SelfTest/" end # Ship `requirements-agent-release.txt` file containing the versions of every check shipped with the agent