From 43458742ff845c6f6c1d770aee9bf0179e9e1cf7 Mon Sep 17 00:00:00 2001 From: Alex Lopez Date: Fri, 17 Nov 2023 13:54:28 +0100 Subject: [PATCH] Remove use of nested blocks in integrations-py2 software definition --- .../datadog-agent-integrations-py2.rb | 524 +++++++++--------- 1 file changed, 251 insertions(+), 273 deletions(-) diff --git a/omnibus/config/software/datadog-agent-integrations-py2.rb b/omnibus/config/software/datadog-agent-integrations-py2.rb index ed4a30f305203d..b504c15ab38cbf 100644 --- a/omnibus/config/software/datadog-agent-integrations-py2.rb +++ b/omnibus/config/software/datadog-agent-integrations-py2.rb @@ -8,6 +8,9 @@ name 'datadog-agent-integrations-py2' +license "BSD-3-Clause" +license_file "./LICENSE" + dependency 'datadog-agent' dependency 'datadog-agent-integrations-py2-dependencies' @@ -77,9 +80,6 @@ agent_requirements_in = 'agent_requirements.in' build do - license "BSD-3-Clause" - license_file "./LICENSE" - # The dir for confs if osx_target? conf_dir = "#{install_dir}/etc/conf.d" @@ -105,101 +105,100 @@ } # Install the checks along with their dependencies - block do - if windows_target? - wheel_build_dir = "#{windows_safe_path(project_dir)}\\.wheels" - build_deps_dir = "#{windows_safe_path(project_dir)}\\.build_deps" - else - wheel_build_dir = "#{project_dir}/.wheels" - build_deps_dir = "#{project_dir}/.build_deps" - end - - # - # Prepare the build env, these dependencies are only needed to build and - # install the core integrations. - # - command "#{pip} download --dest #{build_deps_dir} hatchling==0.25.1", :env => pre_build_env - command "#{pip} download --dest #{build_deps_dir} setuptools==40.9.0", :env => pre_build_env # Version from ./setuptools2.rb - command "#{pip} install wheel==0.37.1", :env => pre_build_env # Pin to the last version that supports Python 2 - command "#{pip} install setuptools-scm==5.0.2", :env => pre_build_env # Pin to the last version that supports Python 2 - command "#{pip} install pip-tools==5.4.0", :env => pre_build_env - uninstall_buildtime_deps = ['rtloader', 'click', 'first', 'pip-tools'] - nix_build_env = { - "PIP_FIND_LINKS" => "#{build_deps_dir}", - "PIP_CONFIG_FILE" => "#{pip_config_file}", - "CFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", - "CXXFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", - "LDFLAGS" => "-L#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", - "LD_RUN_PATH" => "#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", - "PATH" => "#{install_dir}/embedded/bin:#{ENV['PATH']}", - } - win_build_env = { - "PIP_FIND_LINKS" => "#{build_deps_dir}", - "PIP_CONFIG_FILE" => "#{pip_config_file}", - } - # Some libraries (looking at you, aerospike-client-python) need EXT_CFLAGS instead of CFLAGS. - nix_specific_build_env = { - "aerospike" => nix_build_env.merge({"EXT_CFLAGS" => nix_build_env["CFLAGS"] + " -std=gnu99"}), - # Always build pyodbc from source to link to the embedded version of libodbc - "pyodbc" => nix_build_env.merge({"PIP_NO_BINARY" => "pyodbc"}), - } - win_specific_build_env = {} - - - # On Linux & Windows, specify the C99 standard explicitly to avoid issues while building some - # wheels (eg. ddtrace). - # Not explicitly setting that option has caused us problems in the past on SUSE, where the ddtrace - # wheel has to be manually built, as the C code in ddtrace doesn't follow the C89 standard (the default value of std). - # Note: We don't set this on MacOS, as on MacOS we need to build a bunch of packages & C extensions that - # don't have precompiled MacOS wheels. When building C extensions, the CFLAGS variable is added to - # the command-line parameters, even when compiling C++ code, where -std=c99 is invalid. - # See: https://github.com/python/cpython/blob/v2.7.18/Lib/distutils/sysconfig.py#L222 - if linux_target? || windows_target? - nix_build_env["CFLAGS"] += " -std=c99" - end - - # - # Prepare the requirements file containing ALL the dependencies needed by - # any integration. This will provide the "static Python environment" of the Agent. - # We don't use the .in file provided by the base check directly because we - # want to filter out things before installing. - # - if windows_target? - static_reqs_in_file = "#{windows_safe_path(project_dir)}\\datadog_checks_base\\datadog_checks\\base\\data\\#{agent_requirements_in}" - static_reqs_out_folder = "#{windows_safe_path(project_dir)}\\" - static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in - compiled_reqs_file_path = "#{windows_safe_path(install_dir)}\\#{agent_requirements_file}" - else - static_reqs_in_file = "#{project_dir}/datadog_checks_base/datadog_checks/base/data/#{agent_requirements_in}" - static_reqs_out_folder = "#{project_dir}/" - static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in - compiled_reqs_file_path = "#{install_dir}/#{agent_requirements_file}" - end + if windows_target? + wheel_build_dir = "#{windows_safe_path(project_dir)}\\.wheels" + build_deps_dir = "#{windows_safe_path(project_dir)}\\.build_deps" + else + wheel_build_dir = "#{project_dir}/.wheels" + build_deps_dir = "#{project_dir}/.build_deps" + end - # Remove any excluded requirements from the static-environment req file - requirements = Array.new + # + # Prepare the build env, these dependencies are only needed to build and + # install the core integrations. + # + command "#{pip} download --dest #{build_deps_dir} hatchling==0.25.1", :env => pre_build_env + command "#{pip} download --dest #{build_deps_dir} setuptools==40.9.0", :env => pre_build_env # Version from ./setuptools2.rb + command "#{pip} install wheel==0.37.1", :env => pre_build_env # Pin to the last version that supports Python 2 + command "#{pip} install setuptools-scm==5.0.2", :env => pre_build_env # Pin to the last version that supports Python 2 + command "#{pip} install pip-tools==5.4.0", :env => pre_build_env + uninstall_buildtime_deps = ['rtloader', 'click', 'first', 'pip-tools'] + nix_build_env = { + "PIP_FIND_LINKS" => "#{build_deps_dir}", + "PIP_CONFIG_FILE" => "#{pip_config_file}", + "CFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", + "CXXFLAGS" => "-I#{install_dir}/embedded/include -I/opt/mqm/inc", + "LDFLAGS" => "-L#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", + "LD_RUN_PATH" => "#{install_dir}/embedded/lib -L/opt/mqm/lib64 -L/opt/mqm/lib", + "PATH" => "#{install_dir}/embedded/bin:#{ENV['PATH']}", + } + win_build_env = { + "PIP_FIND_LINKS" => "#{build_deps_dir}", + "PIP_CONFIG_FILE" => "#{pip_config_file}", + } + # Some libraries (looking at you, aerospike-client-python) need EXT_CFLAGS instead of CFLAGS. + nix_specific_build_env = { + "aerospike" => nix_build_env.merge({"EXT_CFLAGS" => nix_build_env["CFLAGS"] + " -std=gnu99"}), + # Always build pyodbc from source to link to the embedded version of libodbc + "pyodbc" => nix_build_env.merge({"PIP_NO_BINARY" => "pyodbc"}), + } + win_specific_build_env = {} + + + # On Linux & Windows, specify the C99 standard explicitly to avoid issues while building some + # wheels (eg. ddtrace). + # Not explicitly setting that option has caused us problems in the past on SUSE, where the ddtrace + # wheel has to be manually built, as the C code in ddtrace doesn't follow the C89 standard (the default value of std). + # Note: We don't set this on MacOS, as on MacOS we need to build a bunch of packages & C extensions that + # don't have precompiled MacOS wheels. When building C extensions, the CFLAGS variable is added to + # the command-line parameters, even when compiling C++ code, where -std=c99 is invalid. + # See: https://github.com/python/cpython/blob/v2.7.18/Lib/distutils/sysconfig.py#L222 + if linux_target? || windows_target? + nix_build_env["CFLAGS"] += " -std=c99" + end - # Creating a hash containing the requirements and requirements file path associated to every lib - requirements_custom = Hash.new() + # + # Prepare the requirements file containing ALL the dependencies needed by + # any integration. This will provide the "static Python environment" of the Agent. + # We don't use the .in file provided by the base check directly because we + # want to filter out things before installing. + # + if windows_target? + static_reqs_in_file = "#{windows_safe_path(project_dir)}\\datadog_checks_base\\datadog_checks\\base\\data\\#{agent_requirements_in}" + static_reqs_out_folder = "#{windows_safe_path(project_dir)}\\" + static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in + compiled_reqs_file_path = "#{windows_safe_path(install_dir)}\\#{agent_requirements_file}" + else + static_reqs_in_file = "#{project_dir}/datadog_checks_base/datadog_checks/base/data/#{agent_requirements_in}" + static_reqs_out_folder = "#{project_dir}/" + static_reqs_out_file = static_reqs_out_folder + filtered_agent_requirements_in + compiled_reqs_file_path = "#{install_dir}/#{agent_requirements_file}" + end - specific_build_env = windows_target? ? win_specific_build_env : nix_specific_build_env - build_env = windows_target? ? win_build_env : nix_build_env - cwd = windows_target? ? "#{windows_safe_path(project_dir)}\\datadog_checks_base" : "#{project_dir}/datadog_checks_base" + specific_build_env = windows_target? ? win_specific_build_env : nix_specific_build_env + build_env = windows_target? ? win_build_env : nix_build_env + cwd = windows_target? ? "#{windows_safe_path(project_dir)}\\datadog_checks_base" : "#{project_dir}/datadog_checks_base" + + # Creating a hash containing the requirements and requirements file path associated to every lib + requirements_custom = Hash.new() + specific_build_env.each do |lib, env| + lib_compiled_req_file_path = (windows_target? ? "#{windows_safe_path(install_dir)}\\" : "#{install_dir}/") + "agent_#{lib}_requirements-py2.txt" + requirements_custom[lib] = { + "req_lines" => Array.new, + "req_file_path" => static_reqs_out_folder + lib + "-py2.in", + "compiled_req_file_path" => lib_compiled_req_file_path, + } + end - specific_build_env.each do |lib, env| - lib_compiled_req_file_path = (windows_target? ? "#{windows_safe_path(install_dir)}\\" : "#{install_dir}/") + "agent_#{lib}_requirements-py2.txt" - requirements_custom[lib] = { - "req_lines" => Array.new, - "req_file_path" => static_reqs_out_folder + lib + "-py2.in", - "compiled_req_file_path" => lib_compiled_req_file_path, - } - end + # Remove any excluded requirements from the static-environment req file + requirements = Array.new + block "Create filtered requirements" do File.open("#{static_reqs_in_file}", 'r+').readlines().each do |line| next if excluded_packages.any? { |package_regex| line.match(package_regex) } if line.start_with?('psycopg[binary]') && !windows_target? - line.sub! 'psycopg[binary]', 'psycopg[c]' + line.sub! 'psycopg[binary]', 'psycopg[c]' end # Keeping the custom env requirements lines apart to install them with a specific env requirements_custom.each do |lib, lib_req| @@ -215,67 +214,71 @@ # Adding pympler for memory debug purposes requirements.push("pympler==0.7") - # Render the filtered requirements file + end + + # Render the filtered requirements file + erb source: "static_requirements.txt.erb", + dest: "#{static_reqs_out_file}", + mode: 0640, + vars: { requirements: requirements } + + # Render the filtered libraries that are to be built with different env var + requirements_custom.each do |lib, lib_req| erb source: "static_requirements.txt.erb", - dest: "#{static_reqs_out_file}", + dest: "#{lib_req["req_file_path"]}", mode: 0640, - vars: { requirements: requirements } - - # Render the filtered libraries that are to be built with different env var - requirements_custom.each do |lib, lib_req| - erb source: "static_requirements.txt.erb", - dest: "#{lib_req["req_file_path"]}", - mode: 0640, - vars: { requirements: lib_req["req_lines"] } - end - - # Increasing pip max retries (default: 5 times) and pip timeout (default 15 seconds) to avoid blocking network errors - pip_max_retries = 20 - pip_timeout = 20 - - # Use pip-compile to create the final requirements file. Notice when we invoke `pip` through `python -m pip <...>`, - # there's no need to refer to `pip`, the interpreter will pick the right script. - command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => build_env, :cwd => cwd - command "#{python} -m pip install datadog_checks_base --no-deps --no-index --find-links=#{wheel_build_dir}" - command "#{python} -m piptools compile --generate-hashes --output-file #{compiled_reqs_file_path} #{static_reqs_out_file} " \ - "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => build_env - # Pip-compiling seperately each lib that needs a custom build installation - specific_build_env.each do |lib, env| - command "#{python} -m piptools compile --generate-hashes --output-file #{requirements_custom[lib]["compiled_req_file_path"]} #{requirements_custom[lib]["req_file_path"]} " \ - "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => env - end + vars: { requirements: lib_req["req_lines"] } + end - # - # Install static-environment requirements that the Agent and all checks will use - # + # Increasing pip max retries (default: 5 times) and pip timeout (default 15 seconds) to avoid blocking network errors + pip_max_retries = 20 + pip_timeout = 20 + + # Use pip-compile to create the final requirements file. Notice when we invoke `pip` through `python -m pip <...>`, + # there's no need to refer to `pip`, the interpreter will pick the right script. + command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => build_env, :cwd => cwd + command "#{python} -m pip install datadog_checks_base --no-deps --no-index --find-links=#{wheel_build_dir}" + command "#{python} -m piptools compile --generate-hashes --output-file #{compiled_reqs_file_path} #{static_reqs_out_file} " \ + "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => build_env + # Pip-compiling seperately each lib that needs a custom build installation + specific_build_env.each do |lib, env| + command "#{python} -m piptools compile --generate-hashes --output-file #{requirements_custom[lib]["compiled_req_file_path"]} #{requirements_custom[lib]["req_file_path"]} " \ + "--pip-args \"--retries #{pip_max_retries} --timeout #{pip_timeout}\"", :env => env + end - # First we install the dependencies that need specific flags - specific_build_env.each do |lib, env| - command "#{python} -m pip install --no-deps --require-hashes -r #{requirements_custom[lib]["compiled_req_file_path"]}", :env => env - # Remove the file after use so it is not shipped - delete "#{requirements_custom[lib]["compiled_req_file_path"]}" - end + # + # Install static-environment requirements that the Agent and all checks will use + # - # Then we install the rest (already installed libraries will be ignored) with the main flags - command "#{python} -m pip install --no-deps --require-hashes -r #{compiled_reqs_file_path}", :env => build_env + # First we install the dependencies that need specific flags + specific_build_env.each do |lib, env| + command "#{python} -m pip install --no-deps --require-hashes -r #{requirements_custom[lib]["compiled_req_file_path"]}", :env => env # Remove the file after use so it is not shipped - delete "#{compiled_reqs_file_path}" + delete "#{requirements_custom[lib]["compiled_req_file_path"]}" + end - # - # Install Core integrations - # + # Then we install the rest (already installed libraries will be ignored) with the main flags + command "#{python} -m pip install --no-deps --require-hashes -r #{compiled_reqs_file_path}", :env => build_env + # Remove the file after use so it is not shipped + delete "#{compiled_reqs_file_path}" - # Create a constraint file after installing all the core dependencies and before any integration - # This is then used as a constraint file by the integration command to avoid messing with the agent's python environment - command "#{pip} freeze > #{install_dir}/#{final_constraints_file}" + # + # Install Core integrations + # - if windows_target? - cached_wheels_dir = "#{windows_safe_path(wheel_build_dir)}\\.cached" - else - cached_wheels_dir = "#{wheel_build_dir}/.cached" - end - checks_to_install = Array.new + # Create a constraint file after installing all the core dependencies and before any integration + # This is then used as a constraint file by the integration command to avoid messing with the agent's python environment + command "#{pip} freeze > #{install_dir}/#{final_constraints_file}" + if windows_target? + cached_wheels_dir = "#{windows_safe_path(wheel_build_dir)}\\.cached" + else + cached_wheels_dir = "#{wheel_build_dir}/.cached" + end + + checks_to_install = Array.new + + block "Collect integrations to install" do # Go through every integration package in `integrations-core`, build and install Dir.glob("#{project_dir}/*").each do |check_dir| check = check_dir.split('/').last @@ -314,169 +317,144 @@ checks_to_install.push(check) end + end + installed_list = Array.new + cache_bucket = ENV.fetch('INTEGRATION_WHEELS_CACHE_BUCKET', '') + block "Install cached wheels" do tasks_dir_in = windows_safe_path(Dir.pwd) - cache_bucket = ENV.fetch('INTEGRATION_WHEELS_CACHE_BUCKET', '') - cache_branch = `cd .. && inv release.get-release-json-value base_branch`.strip + cache_branch = (shellout! "inv release.get-release-json-value base_branch", cwd: File.expand_path('..', tasks_dir_in)).stdout.strip # On windows, `aws` actually executes Ruby's AWS SDK, but we want the Python one awscli = if windows_target? then '"c:\Program files\python39\scripts\aws"' else 'aws' end if cache_bucket != '' mkdir cached_wheels_dir - command "inv -e agent.get-integrations-from-cache " \ - "--python 2 --bucket #{cache_bucket} " \ - "--branch #{cache_branch || 'main'} " \ - "--integrations-dir #{windows_safe_path(project_dir)} " \ - "--target-dir #{cached_wheels_dir} " \ - "--integrations #{checks_to_install.join(',')} " \ - "--awscli #{awscli}", - :cwd => tasks_dir_in + shellout! "inv -e agent.get-integrations-from-cache " \ + "--python 2 --bucket #{cache_bucket} " \ + "--branch #{cache_branch || 'main'} " \ + "--integrations-dir #{windows_safe_path(project_dir)} " \ + "--target-dir #{cached_wheels_dir} " \ + "--integrations #{checks_to_install.join(',')} " \ + "--awscli #{awscli}", + :cwd => tasks_dir_in # install all wheels from cache in one pip invocation to speed things up if windows_target? - command "#{python} -m pip install --no-deps --no-index " \ - "--find-links #{windows_safe_path(cached_wheels_dir)} -r #{windows_safe_path(cached_wheels_dir)}\\found.txt" + shellout! "#{python} -m pip install --no-deps --no-index " \ + "--find-links #{windows_safe_path(cached_wheels_dir)} -r #{windows_safe_path(cached_wheels_dir)}\\found.txt" else - command "#{pip} install --no-deps --no-index " \ - " --find-links #{cached_wheels_dir} -r #{cached_wheels_dir}/found.txt" + shellout! "#{pip} install --no-deps --no-index " \ + " --find-links #{cached_wheels_dir} -r #{cached_wheels_dir}/found.txt" end end - block do - # we have to do this operation in block, so that it can access files created by the - # inv agent.get-integrations-from-cache command - - # get list of integration wheels already installed from cache - installed_list = Array.new - if cache_bucket != '' - if windows_target? - installed_out = `#{python} -m pip list --format json` - else - installed_out = `#{pip} list --format json` - end - if $?.exitstatus == 0 - installed = JSON.parse(installed_out) - installed.each do |package| - package.each do |key, value| - if key == "name" && value.start_with?("datadog-") - installed_list.push(value["datadog-".length..-1]) - end + # get list of integration wheels already installed from cache + if cache_bucket != '' + if windows_target? + installed_out = (shellout! "#{python} -m pip list --format json").stdout + else + installed_out = (shellout! "#{pip} list --format json").stdout + end + if $?.exitstatus == 0 + installed = JSON.parse(installed_out) + installed.each do |package| + package.each do |key, value| + if key == "name" && value.start_with?("datadog-") + installed_list.push(value["datadog-".length..-1]) end end - else - raise "Failed to list pip installed packages" end + else + raise "Failed to list pip installed packages" end + end + end - checks_to_install.each do |check| - check_dir = File.join(project_dir, check) - check_conf_dir = "#{conf_dir}/#{check}.d" - # For each conf file, if it already exists, that means the `datadog-agent` software def - # wrote it first. In that case, since the agent's confs take precedence, skip the conf - - # Copy the check config to the conf directories - conf_file_example = "#{check_dir}/datadog_checks/#{check}/data/conf.yaml.example" - if File.exist? conf_file_example - mkdir check_conf_dir - copy conf_file_example, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/conf.yaml.example" - end - - # Copy the default config, if it exists - conf_file_default = "#{check_dir}/datadog_checks/#{check}/data/conf.yaml.default" - if File.exist? conf_file_default - mkdir check_conf_dir - copy conf_file_default, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/conf.yaml.default" - end - - # Copy the metric file, if it exists - metrics_yaml = "#{check_dir}/datadog_checks/#{check}/data/metrics.yaml" - if File.exist? metrics_yaml - mkdir check_conf_dir - copy metrics_yaml, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/metrics.yaml" - end - - # We don't have auto_conf on windows yet - auto_conf_yaml = "#{check_dir}/datadog_checks/#{check}/data/auto_conf.yaml" - if File.exist? auto_conf_yaml - mkdir check_conf_dir - copy auto_conf_yaml, "#{check_conf_dir}/" unless File.exist? "#{check_conf_dir}/auto_conf.yaml" - end - - # Copy SNMP profiles - profile_folders = ['profiles', 'default_profiles'] - profile_folders.each do |profile_folder| - folder_path = "#{check_dir}/datadog_checks/#{check}/data/#{profile_folder}" - if File.exist? folder_path - copy folder_path, "#{check_conf_dir}/" - end - end - - # pip < 21.2 replace underscores by dashes in package names per https://pip.pypa.io/en/stable/news/#v21-2 - # whether or not this might switch back in the future is not guaranteed, so we check for both name - # with dashes and underscores - if installed_list.include?(check) || installed_list.include?(check.gsub('_', '-')) - next - end - - if windows_target? - command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => win_build_env, :cwd => "#{windows_safe_path(project_dir)}\\#{check}" - command "#{python} -m pip install datadog-#{check} --no-deps --no-index --find-links=#{wheel_build_dir}" - else - command "#{pip} wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => nix_build_env, :cwd => "#{project_dir}/#{check}" - command "#{pip} install datadog-#{check} --no-deps --no-index --find-links=#{wheel_build_dir}" - end - if cache_bucket != '' && ENV.fetch('INTEGRATION_WHEELS_SKIP_CACHE_UPLOAD', '') == '' && cache_branch != nil - command "inv -e agent.upload-integration-to-cache " \ - "--python 2 --bucket #{cache_bucket} " \ - "--branch #{cache_branch} " \ - "--integrations-dir #{windows_safe_path(project_dir)} " \ - "--build-dir #{wheel_build_dir} " \ - "--integration #{check} " \ - "--awscli #{awscli}", - :cwd => tasks_dir_in + checks_to_install.each do |check| + check_dir = File.join(project_dir, check) + check_conf_dir = "#{conf_dir}/#{check}.d" + + # For each conf file, if it already exists, that means the `datadog-agent` software def + # wrote it first. In that case, since the agent's confs take precedence, skip the conf + conf_files = ["conf.yaml.example", "conf.yaml.default", "metrics.yaml", "auto_conf.yaml"] + conf_files.each do |filename| + src = windows_safe_path(check_dir,"datadog_checks", check, "data", filename) + if File.exist? src + mkdir check_conf_dir + dest = check_conf_dir + # Requires a block because requires info only available at build time + block "Copy #{src} to #{dest} without overwriting" do + FileUtils.cp_r(src, dest) unless File.exist(windows_safe_path(dest, filename)) end end + end - # From now on we don't need piptools anymore, uninstall its deps so we don't include them in the final artifact - uninstall_buildtime_deps.each do |dep| - if windows_target? - command "#{python} -m pip uninstall -y #{dep}" - else - command "#{pip} uninstall -y #{dep}" - end + # Copy SNMP profiles + profile_folders = ['profiles', 'default_profiles'] + profile_folders.each do |profile_folder| + folder_path = "#{check_dir}/datadog_checks/#{check}/data/#{profile_folder}" + if File.exist? folder_path + copy folder_path, "#{check_conf_dir}/" end end - block do - # We have to run these operations in block, so they get applied after operations - # from the last block - - # Patch applies to only one file: set it explicitly as a target, no need for -p - if windows_target? - patch :source => "create-regex-at-runtime.patch", :target => "#{python_2_embedded}/Lib/site-packages/yaml/reader.py" - patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{python_2_embedded}/Lib/site-packages/psutil/__init__.py" - else - patch :source => "create-regex-at-runtime.patch", :target => "#{install_dir}/embedded/lib/python2.7/site-packages/yaml/reader.py" - patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{install_dir}/embedded/lib/python2.7/site-packages/psutil/__init__.py" - end + # pip < 21.2 replace underscores by dashes in package names per https://pip.pypa.io/en/stable/news/#v21-2 + # whether or not this might switch back in the future is not guaranteed, so we check for both name + # with dashes and underscores + if installed_list.include?(check) || installed_list.include?(check.gsub('_', '-')) + next + end - # Run pip check to make sure the agent's python environment is clean, all the dependencies are compatible - if windows_target? - command "#{python} -m pip check" - else - command "#{pip} check" - end + if windows_target? + command "#{python} -m pip wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => win_build_env, :cwd => "#{windows_safe_path(project_dir)}\\#{check}" + command "#{python} -m pip install datadog-#{check} --no-deps --no-index --find-links=#{wheel_build_dir}" + else + command "#{pip} wheel . --no-deps --no-index --wheel-dir=#{wheel_build_dir}", :env => nix_build_env, :cwd => "#{project_dir}/#{check}" + command "#{pip} install datadog-#{check} --no-deps --no-index --find-links=#{wheel_build_dir}" end + if cache_bucket != '' && ENV.fetch('INTEGRATION_WHEELS_SKIP_CACHE_UPLOAD', '') == '' && cache_branch != nil + command "inv -e agent.upload-integration-to-cache " \ + "--python 2 --bucket #{cache_bucket} " \ + "--branch #{cache_branch} " \ + "--integrations-dir #{windows_safe_path(project_dir)} " \ + "--build-dir #{wheel_build_dir} " \ + "--integration #{check} " \ + "--awscli #{awscli}", + :cwd => tasks_dir_in + end + end - block do - # Removing tests that don't need to be shipped in the embedded folder - if windows_target? - delete "#{python_2_embedded}/Lib/site-packages/Cryptodome/SelfTest/" - else - delete "#{install_dir}/embedded/lib/python2.7/site-packages/Cryptodome/SelfTest/" - end + # From now on we don't need piptools anymore, uninstall its deps so we don't include them in the final artifact + uninstall_buildtime_deps.each do |dep| + if windows_target? + command "#{python} -m pip uninstall -y #{dep}" + else + command "#{pip} uninstall -y #{dep}" end end + # Patch applies to only one file: set it explicitly as a target, no need for -p + if windows_target? + patch :source => "create-regex-at-runtime.patch", :target => "#{python_2_embedded}/Lib/site-packages/yaml/reader.py" + patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{python_2_embedded}/Lib/site-packages/psutil/__init__.py" + else + patch :source => "create-regex-at-runtime.patch", :target => "#{install_dir}/embedded/lib/python2.7/site-packages/yaml/reader.py" + patch :source => "remove-maxfile-maxpath-psutil.patch", :target => "#{install_dir}/embedded/lib/python2.7/site-packages/psutil/__init__.py" + end + + # Run pip check to make sure the agent's python environment is clean, all the dependencies are compatible + if windows_target? + command "#{python} -m pip check" + else + command "#{pip} check" + end + + # Removing tests that don't need to be shipped in the embedded folder + if windows_target? + delete "#{python_2_embedded}/Lib/site-packages/Cryptodome/SelfTest/" + else + delete "#{install_dir}/embedded/lib/python2.7/site-packages/Cryptodome/SelfTest/" + end + # Ship `requirements-agent-release.txt` file containing the versions of every check shipped with the agent # Used by the `datadog-agent integration` command to prevent downgrading a check to a version # older than the one shipped in the agent