From 6de62c00e3a08a8aa6f3f631d124cb286fa5ff3a Mon Sep 17 00:00:00 2001 From: Elvis Wianda Date: Thu, 20 Jun 2024 19:39:22 -0400 Subject: [PATCH] Import rules pycross as part of #1360 --- .bazelrc | 4 +- python/private/pycross_staging/BUILD.bazel | 64 + python/private/pycross_staging/defs.bzl | 24 + .../pycross_staging/extensions/BUILD.bazel | 44 + .../extensions/environments.bzl | 5 + .../pycross_staging/extensions/lock_file.bzl | 5 + .../extensions/lock_import.bzl | 5 + .../pycross_staging/extensions/lock_repos.bzl | 5 + .../pycross_staging/extensions/pycross.bzl | 5 + .../private/pycross_staging/hooks/BUILD.bazel | 5 + .../pycross_staging/private/BUILD.bazel | 291 +++++ .../private/bzlmod/BUILD.bazel | 83 ++ .../private/bzlmod/environments.bzl | 38 + .../private/bzlmod/lock_file.bzl | 54 + .../private/bzlmod/lock_hub_repo.bzl | 26 + .../private/bzlmod/lock_import.bzl | 157 +++ .../private/bzlmod/lock_repos.bzl | 101 ++ .../private/bzlmod/pycross.bzl | 84 ++ .../private/bzlmod/tag_attrs.bzl | 67 ++ .../private/bzlmod/toolchains.bzl | 47 + .../private/cc_toolchain_util.bzl | 331 +++++ .../pycross_staging/private/internal_repo.bzl | 203 ++++ .../private/interpreter_version.bzl | 39 + .../pycross_staging/private/lock_attrs.bzl | 252 ++++ .../pycross_staging/private/lock_file.bzl | 79 ++ .../private/lock_file_repo.bzl | 54 + .../pycross_staging/private/lock_repo.bzl | 31 + .../pycross_staging/private/package_repo.bzl | 224 ++++ .../private/pycross_staging/private/pdm.lock | 1 + .../private/pdm_lock_model.bzl | 110 ++ .../private/poetry_lock_model.bzl | 75 ++ .../pycross_staging/private/providers.bzl | 9 + .../private/pycross_deps.lock.bzl | 393 ++++++ .../private/pycross_deps_core.lock.bzl | 168 +++ .../pycross_staging/private/pypi_file.bzl | 90 ++ .../pycross_staging/private/pyproject.toml | 1 + .../private/repo_venv_utils.bzl | 140 +++ .../private/resolved_lock_repo.bzl | 77 ++ .../private/target_environment.bzl | 111 ++ .../private/toolchain_helpers.bzl | 475 ++++++++ .../pycross_staging/private/tools/BUILD.bazel | 169 +++ .../pycross_staging/private/tools/args.py | 23 + .../private/tools/bzl_lock_generator.py | 35 + .../private/tools/crossenv/BUILD.bazel | 27 + .../tools/crossenv/LICENSE.crossenv.txt | 19 + .../private/tools/crossenv/README.md | 5 + .../private/tools/crossenv/__init__.py | 386 ++++++ .../private/tools/crossenv/__main__.py | 3 + .../tools/crossenv/scripts/BUILD.bazel | 7 + .../tools/crossenv/scripts/_manylinux.py.tmpl | 15 + .../scripts/distutils-sysconfig-patch.py.tmpl | 29 + .../tools/crossenv/scripts/os-patch.py.tmpl | 31 + .../crossenv/scripts/platform-patch.py.tmpl | 23 + .../tools/crossenv/scripts/pywrapper.py.tmpl | 38 + .../tools/crossenv/scripts/site.py.tmpl | 161 +++ .../tools/crossenv/scripts/sys-patch.py.tmpl | 42 + .../crossenv/scripts/sysconfig-patch.py.tmpl | 32 + .../private/tools/crossenv/template.py | 29 + .../private/tools/crossenv/utils.py | 205 ++++ .../private/tools/extract_lock_repos.py | 46 + .../private/tools/lock_model.py | 295 +++++ .../private/tools/namespace_pkgs.py | 107 ++ .../private/tools/namespace_pkgs_test.py | 178 +++ .../private/tools/pdm_translator.py | 417 +++++++ .../private/tools/poetry_translator.py | 296 +++++ .../private/tools/raw_lock_resolver.py | 585 +++++++++ .../private/tools/repairwheel/BUILD.bazel | 14 + .../tools/repairwheel/repair_wheel_hook.py | 34 + .../private/tools/resolved_lock_renderer.py | 716 +++++++++++ .../private/tools/target_environment.py | 213 ++++ .../tools/target_environment_generator.py | 200 ++++ .../private/tools/wheel_builder.py | 1064 +++++++++++++++++ .../private/tools/wheel_installer.py | 161 +++ .../private/pycross_staging/private/util.bzl | 43 + .../pycross_staging/private/wheel_build.bzl | 447 +++++++ .../pycross_staging/private/wheel_library.bzl | 128 ++ .../private/wheel_zipimport_library.bzl | 69 ++ .../private/pycross_staging/repositories.bzl | 29 + python/private/pycross_staging/toolchain.bzl | 57 + python/private/pycross_staging/workspace.bzl | 19 + 80 files changed, 10372 insertions(+), 2 deletions(-) create mode 100644 python/private/pycross_staging/BUILD.bazel create mode 100644 python/private/pycross_staging/defs.bzl create mode 100644 python/private/pycross_staging/extensions/BUILD.bazel create mode 100644 python/private/pycross_staging/extensions/environments.bzl create mode 100644 python/private/pycross_staging/extensions/lock_file.bzl create mode 100644 python/private/pycross_staging/extensions/lock_import.bzl create mode 100644 python/private/pycross_staging/extensions/lock_repos.bzl create mode 100644 python/private/pycross_staging/extensions/pycross.bzl create mode 100644 python/private/pycross_staging/hooks/BUILD.bazel create mode 100644 python/private/pycross_staging/private/BUILD.bazel create mode 100644 python/private/pycross_staging/private/bzlmod/BUILD.bazel create mode 100644 python/private/pycross_staging/private/bzlmod/environments.bzl create mode 100644 python/private/pycross_staging/private/bzlmod/lock_file.bzl create mode 100644 python/private/pycross_staging/private/bzlmod/lock_hub_repo.bzl create mode 100644 python/private/pycross_staging/private/bzlmod/lock_import.bzl create mode 100644 python/private/pycross_staging/private/bzlmod/lock_repos.bzl create mode 100644 python/private/pycross_staging/private/bzlmod/pycross.bzl create mode 100644 python/private/pycross_staging/private/bzlmod/tag_attrs.bzl create mode 100644 python/private/pycross_staging/private/bzlmod/toolchains.bzl create mode 100644 python/private/pycross_staging/private/cc_toolchain_util.bzl create mode 100644 python/private/pycross_staging/private/internal_repo.bzl create mode 100644 python/private/pycross_staging/private/interpreter_version.bzl create mode 100644 python/private/pycross_staging/private/lock_attrs.bzl create mode 100644 python/private/pycross_staging/private/lock_file.bzl create mode 100644 python/private/pycross_staging/private/lock_file_repo.bzl create mode 100644 python/private/pycross_staging/private/lock_repo.bzl create mode 100644 python/private/pycross_staging/private/package_repo.bzl create mode 120000 python/private/pycross_staging/private/pdm.lock create mode 100644 python/private/pycross_staging/private/pdm_lock_model.bzl create mode 100644 python/private/pycross_staging/private/poetry_lock_model.bzl create mode 100644 python/private/pycross_staging/private/providers.bzl create mode 100644 python/private/pycross_staging/private/pycross_deps.lock.bzl create mode 100644 python/private/pycross_staging/private/pycross_deps_core.lock.bzl create mode 100644 python/private/pycross_staging/private/pypi_file.bzl create mode 120000 python/private/pycross_staging/private/pyproject.toml create mode 100644 python/private/pycross_staging/private/repo_venv_utils.bzl create mode 100644 python/private/pycross_staging/private/resolved_lock_repo.bzl create mode 100644 python/private/pycross_staging/private/target_environment.bzl create mode 100644 python/private/pycross_staging/private/toolchain_helpers.bzl create mode 100644 python/private/pycross_staging/private/tools/BUILD.bazel create mode 100644 python/private/pycross_staging/private/tools/args.py create mode 100644 python/private/pycross_staging/private/tools/bzl_lock_generator.py create mode 100644 python/private/pycross_staging/private/tools/crossenv/BUILD.bazel create mode 100644 python/private/pycross_staging/private/tools/crossenv/LICENSE.crossenv.txt create mode 100644 python/private/pycross_staging/private/tools/crossenv/README.md create mode 100644 python/private/pycross_staging/private/tools/crossenv/__init__.py create mode 100644 python/private/pycross_staging/private/tools/crossenv/__main__.py create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/BUILD.bazel create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/_manylinux.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/distutils-sysconfig-patch.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/os-patch.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/platform-patch.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/pywrapper.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/site.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/sys-patch.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/scripts/sysconfig-patch.py.tmpl create mode 100644 python/private/pycross_staging/private/tools/crossenv/template.py create mode 100644 python/private/pycross_staging/private/tools/crossenv/utils.py create mode 100644 python/private/pycross_staging/private/tools/extract_lock_repos.py create mode 100644 python/private/pycross_staging/private/tools/lock_model.py create mode 100644 python/private/pycross_staging/private/tools/namespace_pkgs.py create mode 100644 python/private/pycross_staging/private/tools/namespace_pkgs_test.py create mode 100644 python/private/pycross_staging/private/tools/pdm_translator.py create mode 100644 python/private/pycross_staging/private/tools/poetry_translator.py create mode 100644 python/private/pycross_staging/private/tools/raw_lock_resolver.py create mode 100644 python/private/pycross_staging/private/tools/repairwheel/BUILD.bazel create mode 100644 python/private/pycross_staging/private/tools/repairwheel/repair_wheel_hook.py create mode 100644 python/private/pycross_staging/private/tools/resolved_lock_renderer.py create mode 100644 python/private/pycross_staging/private/tools/target_environment.py create mode 100644 python/private/pycross_staging/private/tools/target_environment_generator.py create mode 100644 python/private/pycross_staging/private/tools/wheel_builder.py create mode 100644 python/private/pycross_staging/private/tools/wheel_installer.py create mode 100644 python/private/pycross_staging/private/util.bzl create mode 100644 python/private/pycross_staging/private/wheel_build.bzl create mode 100644 python/private/pycross_staging/private/wheel_library.bzl create mode 100644 python/private/pycross_staging/private/wheel_zipimport_library.bzl create mode 100644 python/private/pycross_staging/repositories.bzl create mode 100644 python/private/pycross_staging/toolchain.bzl create mode 100644 python/private/pycross_staging/workspace.bzl diff --git a/.bazelrc b/.bazelrc index 3b915864ce..9d7d6f1252 100644 --- a/.bazelrc +++ b/.bazelrc @@ -4,8 +4,8 @@ # (Note, we cannot use `common --deleted_packages` because the bazel version command doesn't support it) # To update these lines, execute # `bazel run @rules_bazel_integration_test//tools:update_deleted_packages` -build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/dupe_requirements,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/pip_repository_entry_points,tests/integration/py_cc_toolchain_registered -query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/dupe_requirements,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/pip_repository_entry_points,tests/integration/py_cc_toolchain_registered +build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/pip_repository_entry_points,tests/integration/py_cc_toolchain_registered +query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/pip_repository_entry_points,tests/integration/py_cc_toolchain_registered test --test_output=errors diff --git a/python/private/pycross_staging/BUILD.bazel b/python/private/pycross_staging/BUILD.bazel new file mode 100644 index 0000000000..f4751220af --- /dev/null +++ b/python/private/pycross_staging/BUILD.bazel @@ -0,0 +1,64 @@ +load("@bazel_skylib//:bzl_library.bzl", "bzl_library") + +# For stardoc to reference the files +exports_files([ + "defs.bzl", + "workspace.bzl", +]) + +toolchain_type( + name = "toolchain_type", + visibility = ["//visibility:public"], +) + +bzl_library( + name = "repositories", + srcs = ["repositories.bzl"], + visibility = ["//visibility:public"], + deps = [ + "//pycross/private:internal_repo", + "//pycross/private:pycross_deps_core_lock", + "//pycross/private:pycross_deps_lock", + "@bazel_tools//tools/build_defs/repo:http.bzl", + "@bazel_tools//tools/build_defs/repo:utils.bzl", + ], +) + +bzl_library( + name = "defs", + srcs = ["defs.bzl"], + visibility = ["//visibility:public"], + deps = [ + "//pycross/private:lock_file", + "//pycross/private:pdm_lock_model", + "//pycross/private:poetry_lock_model", + "//pycross/private:pypi_file", + "//pycross/private:target_environment", + "//pycross/private:wheel_build", + "//pycross/private:wheel_library", + "//pycross/private:wheel_zipimport_library", + ], +) + +bzl_library( + name = "workspace", + srcs = ["workspace.bzl"], + visibility = ["//visibility:public"], + deps = [ + "//pycross/private:lock_file_repo", + "//pycross/private:lock_repo", + "//pycross/private:pdm_lock_model", + "//pycross/private:poetry_lock_model", + "//pycross/private:toolchain_helpers", + ], +) + +bzl_library( + name = "toolchain", + srcs = ["toolchain.bzl"], + visibility = ["//visibility:public"], + deps = [ + "//pycross/private:providers", + "@bazel_skylib//lib:paths", + ], +) diff --git a/python/private/pycross_staging/defs.bzl b/python/private/pycross_staging/defs.bzl new file mode 100644 index 0000000000..1cc2e498d3 --- /dev/null +++ b/python/private/pycross_staging/defs.bzl @@ -0,0 +1,24 @@ +"Public build rule API re-exports" + +load("//pycross/private:lock_attrs.bzl", _package_annotation = "package_annotation") +load("//pycross/private:lock_file.bzl", _pycross_lock_file = "pycross_lock_file") +load("//pycross/private:pdm_lock_model.bzl", _pycross_pdm_lock_model = "pycross_pdm_lock_model") +load("//pycross/private:poetry_lock_model.bzl", _pycross_poetry_lock_model = "pycross_poetry_lock_model") +load("//pycross/private:providers.bzl", _PycrossWheelInfo = "PycrossWheelInfo") +load("//pycross/private:pypi_file.bzl", _pypi_file = "pypi_file") +load("//pycross/private:target_environment.bzl", _pycross_target_environment = "pycross_target_environment") +load("//pycross/private:wheel_build.bzl", _pycross_wheel_build = "pycross_wheel_build") +load("//pycross/private:wheel_library.bzl", _pycross_wheel_library = "pycross_wheel_library") +load("//pycross/private:wheel_zipimport_library.bzl", _pycross_wheel_zipimport_library = "pycross_wheel_zipimport_library") + +PycrossWheelInfo = _PycrossWheelInfo + +package_annotation = _package_annotation +pycross_lock_file = _pycross_lock_file +pycross_pdm_lock_model = _pycross_pdm_lock_model +pycross_poetry_lock_model = _pycross_poetry_lock_model +pycross_target_environment = _pycross_target_environment +pycross_wheel_build = _pycross_wheel_build +pycross_wheel_library = _pycross_wheel_library +pycross_wheel_zipimport_library = _pycross_wheel_zipimport_library +pypi_file = _pypi_file diff --git a/python/private/pycross_staging/extensions/BUILD.bazel b/python/private/pycross_staging/extensions/BUILD.bazel new file mode 100644 index 0000000000..1c351df7d0 --- /dev/null +++ b/python/private/pycross_staging/extensions/BUILD.bazel @@ -0,0 +1,44 @@ +load("@bazel_skylib//:bzl_library.bzl", "bzl_library") + +exports_files([ + "environments.bzl", + "lock_file.bzl", + "lock_import.bzl", + "lock_repos.bzl", + "pycross.bzl", +]) + +bzl_library( + name = "environments", + srcs = ["environments.bzl"], + visibility = ["//visibility:public"], + deps = ["//pycross/private/bzlmod:environments"], +) + +bzl_library( + name = "lock_file", + srcs = ["lock_file.bzl"], + visibility = ["//visibility:public"], + deps = ["//pycross/private/bzlmod:lock_file"], +) + +bzl_library( + name = "lock_import", + srcs = ["lock_import.bzl"], + visibility = ["//visibility:public"], + deps = ["//pycross/private/bzlmod:lock_import"], +) + +bzl_library( + name = "lock_repos", + srcs = ["lock_repos.bzl"], + visibility = ["//visibility:public"], + deps = ["//pycross/private/bzlmod:lock_repos"], +) + +bzl_library( + name = "pycross", + srcs = ["pycross.bzl"], + visibility = ["//visibility:public"], + deps = ["//pycross/private/bzlmod:pycross"], +) diff --git a/python/private/pycross_staging/extensions/environments.bzl b/python/private/pycross_staging/extensions/environments.bzl new file mode 100644 index 0000000000..100a9f1bcd --- /dev/null +++ b/python/private/pycross_staging/extensions/environments.bzl @@ -0,0 +1,5 @@ +"""The environments extension.""" + +load("//pycross/private/bzlmod:environments.bzl", _environments = "environments") + +environments = _environments diff --git a/python/private/pycross_staging/extensions/lock_file.bzl b/python/private/pycross_staging/extensions/lock_file.bzl new file mode 100644 index 0000000000..b843e3f3af --- /dev/null +++ b/python/private/pycross_staging/extensions/lock_file.bzl @@ -0,0 +1,5 @@ +"""The lock_repos extension.""" + +load("//pycross/private/bzlmod:lock_file.bzl", _lock_file = "lock_file") + +lock_file = _lock_file diff --git a/python/private/pycross_staging/extensions/lock_import.bzl b/python/private/pycross_staging/extensions/lock_import.bzl new file mode 100644 index 0000000000..7b910283f2 --- /dev/null +++ b/python/private/pycross_staging/extensions/lock_import.bzl @@ -0,0 +1,5 @@ +"""The lock_import extension.""" + +load("//pycross/private/bzlmod:lock_import.bzl", _lock_import = "lock_import") + +lock_import = _lock_import diff --git a/python/private/pycross_staging/extensions/lock_repos.bzl b/python/private/pycross_staging/extensions/lock_repos.bzl new file mode 100644 index 0000000000..4724869db3 --- /dev/null +++ b/python/private/pycross_staging/extensions/lock_repos.bzl @@ -0,0 +1,5 @@ +"""The lock_repos extension.""" + +load("//pycross/private/bzlmod:lock_repos.bzl", _lock_repos = "lock_repos") + +lock_repos = _lock_repos diff --git a/python/private/pycross_staging/extensions/pycross.bzl b/python/private/pycross_staging/extensions/pycross.bzl new file mode 100644 index 0000000000..3566576f9c --- /dev/null +++ b/python/private/pycross_staging/extensions/pycross.bzl @@ -0,0 +1,5 @@ +"""The lock_import extension.""" + +load("//pycross/private/bzlmod:pycross.bzl", _pycross = "pycross") + +pycross = _pycross diff --git a/python/private/pycross_staging/hooks/BUILD.bazel b/python/private/pycross_staging/hooks/BUILD.bazel new file mode 100644 index 0000000000..311ed7b7d5 --- /dev/null +++ b/python/private/pycross_staging/hooks/BUILD.bazel @@ -0,0 +1,5 @@ +alias( + name = "repair_wheel", + actual = "//pycross/private/tools/repairwheel:repair_wheel_hook", + visibility = ["//visibility:public"], +) diff --git a/python/private/pycross_staging/private/BUILD.bazel b/python/private/pycross_staging/private/BUILD.bazel new file mode 100644 index 0000000000..dee9eb339c --- /dev/null +++ b/python/private/pycross_staging/private/BUILD.bazel @@ -0,0 +1,291 @@ +load("@aspect_bazel_lib//lib:write_source_files.bzl", "write_source_files") +load("@bazel_skylib//:bzl_library.bzl", "bzl_library") +load(":lock_file.bzl", "pycross_lock_file") +load(":pdm_lock_model.bzl", "pycross_pdm_lock_model") +load(":target_environment.bzl", "pycross_target_environment") + +package(default_visibility = ["//pycross:__subpackages__"]) + +bzl_library( + name = "cc_toolchain_util", + srcs = ["cc_toolchain_util.bzl"], + deps = [ + "@bazel_skylib//lib:collections", + "@bazel_tools//tools/build_defs/cc:action_names.bzl", + "@bazel_tools//tools/cpp:toolchain_utils.bzl", + ], +) + +bzl_library( + name = "internal_repo", + srcs = ["internal_repo.bzl"], + deps = [ + ":lock_attrs", + ":repo_venv_utils", + "@bazel_skylib//lib:shell", + ], +) + +bzl_library( + name = "interpreter_version", + srcs = ["interpreter_version.bzl"], + # This isn't a public API, but it's publicly accessible because generated + # toolchain and environment BUILD rules load from it. + visibility = ["//visibility:public"], + deps = [ + "@rules_python//python:versions.bzl", + ], +) + +bzl_library( + name = "lock_file", + srcs = ["lock_file.bzl"], + deps = [ + ":lock_attrs", + ":providers", + ":target_environment", + ":util", + ], +) + +bzl_library( + name = "pdm_lock_model", + srcs = ["pdm_lock_model.bzl"], + deps = [":internal_repo"], +) + +bzl_library( + name = "lock_repo", + srcs = ["lock_repo.bzl"], + deps = [ + ":lock_attrs", + ":package_repo", + ":resolved_lock_repo", + ], +) + +bzl_library( + name = "poetry_lock_model", + srcs = ["poetry_lock_model.bzl"], + deps = [":internal_repo"], +) + +bzl_library( + name = "pycross_deps_core_lock", + deps = [ + "//pycross:defs", + "@bazel_tools//tools/build_defs/repo:http.bzl", + "@bazel_tools//tools/build_defs/repo:utils.bzl", + ], +) + +bzl_library( + name = "pycross_deps_lock", + deps = [ + "//pycross:defs", + "@bazel_tools//tools/build_defs/repo:http.bzl", + "@bazel_tools//tools/build_defs/repo:utils.bzl", + ], +) + +bzl_library( + name = "pypi_file", + srcs = ["pypi_file.bzl"], + deps = ["@bazel_tools//tools/build_defs/repo:utils.bzl"], +) + +bzl_library( + name = "package_repo", + srcs = ["package_repo.bzl"], + deps = [ + ":internal_repo", + "@bazel_skylib//lib:paths", + ], +) + +bzl_library( + name = "repo_venv_utils", + srcs = ["repo_venv_utils.bzl"], + deps = [ + "@bazel_skylib//lib:paths", + ], +) + +bzl_library( + name = "resolved_lock_repo", + srcs = ["resolved_lock_repo.bzl"], + deps = [ + ":internal_repo", + ":lock_attrs", + ":pdm_lock_model", + ":poetry_lock_model", + ], +) + +bzl_library( + name = "target_environment", + srcs = ["target_environment.bzl"], + deps = [ + ":internal_repo", + ], +) + +bzl_library( + name = "toolchain_helpers", + srcs = ["toolchain_helpers.bzl"], + deps = [ + ":lock_attrs", + ":target_environment", + ":util", + "@rules_python//python:versions.bzl", + ], +) + +bzl_library( + name = "wheel_build", + srcs = ["wheel_build.bzl"], + deps = [ + ":cc_toolchain_util", + ":providers", + "@bazel_skylib//lib:paths", + "@bazel_tools//tools/cpp:toolchain_utils.bzl", + "@rules_python//python:py_info_bzl", + ], +) + +bzl_library( + name = "wheel_library", + srcs = ["wheel_library.bzl"], + deps = [ + ":providers", + "@bazel_skylib//lib:paths", + "@rules_python//python:py_info_bzl", + ], +) + +bzl_library( + name = "wheel_zipimport_library", + srcs = ["wheel_zipimport_library.bzl"], + deps = [ + "@bazel_skylib//lib:paths", + "@rules_python//python:py_info_bzl", + ], +) + +# This is the environment we advertise for what should be pure-python +# dependencies used by Pycross +pycross_target_environment( + name = "rules_pycross_deps_target_env", + python_compatible_with = [], + version = "3.8.0", + visibility = ["//visibility:public"], +) + +# NB: pdm.lock and pyproject.toml are symlinked into this directory from +# the root. This is so that this package does not need to depend on the +# root package, since the root utilizes dev dependencies which are not +# always present. +pycross_pdm_lock_model( + name = "rules_pycross_all_deps_lock_model", + all_optional_groups = True, + lock_file = ":pdm.lock", + project_file = ":pyproject.toml", +) + +pycross_pdm_lock_model( + name = "rules_pycross_core_deps_lock_model", + lock_file = ":pdm.lock", + optional_groups = ["core"], + project_file = ":pyproject.toml", +) + +pycross_lock_file( + name = "rules_pycross_all_deps", + out = "updated-pycross_deps.lock.bzl", + disallow_builds = True, + fully_qualified_environment_labels = False, + lock_model_file = ":rules_pycross_all_deps_lock_model", + repo_prefix = "rules_pycross_internal_deps", + target_environments = [":rules_pycross_deps_target_env"], + visibility = ["//:__subpackages__"], +) + +pycross_lock_file( + name = "rules_pycross_core_deps", + out = "updated-pycross_deps_core.lock.bzl", + disallow_builds = True, + fully_qualified_environment_labels = False, + generate_file_map = True, + lock_model_file = ":rules_pycross_core_deps_lock_model", + repo_prefix = "rules_pycross_internal_deps", + target_environments = [":rules_pycross_deps_target_env"], + visibility = ["//:__subpackages__"], +) + +write_source_files( + name = "update_pycross_deps", + diff_test = False, # Diff tests for these files are in the workspace root. + files = { + "pycross_deps.lock.bzl": ":updated-pycross_deps.lock.bzl", + "pycross_deps_core.lock.bzl": ":updated-pycross_deps_core.lock.bzl", + }, +) + +bzl_library( + name = "pycross_deps.lock", + srcs = ["pycross_deps.lock.bzl"], + deps = [ + "//pycross:defs", + "@bazel_tools//tools/build_defs/repo:http.bzl", + "@bazel_tools//tools/build_defs/repo:utils.bzl", + ], +) + +bzl_library( + name = "pycross_deps_core.lock", + srcs = ["pycross_deps_core.lock.bzl"], + deps = [ + "//pycross:defs", + "@bazel_tools//tools/build_defs/repo:http.bzl", + "@bazel_tools//tools/build_defs/repo:utils.bzl", + ], +) + +bzl_library( + name = "lock_attrs", + srcs = ["lock_attrs.bzl"], + deps = ["util"], +) + +bzl_library( + name = "lock_file_repo", + srcs = ["lock_file_repo.bzl"], +) + +bzl_library( + name = "providers", + srcs = ["providers.bzl"], +) + +bzl_library( + name = "updated-pycross_deps.lock", + srcs = ["updated-pycross_deps.lock.bzl"], +) + +bzl_library( + name = "updated-pycross_deps_core.lock", + srcs = ["updated-pycross_deps_core.lock.bzl"], +) + +bzl_library( + name = "util", + srcs = ["util.bzl"], +) + +exports_files( + [ + "pycross_deps_core.lock.bzl", + "pycross_deps.lock.bzl", + ], + visibility = ["//:__subpackages__"], +) diff --git a/python/private/pycross_staging/private/bzlmod/BUILD.bazel b/python/private/pycross_staging/private/bzlmod/BUILD.bazel new file mode 100644 index 0000000000..dda1515212 --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/BUILD.bazel @@ -0,0 +1,83 @@ +load("@bazel_skylib//:bzl_library.bzl", "bzl_library") +load("//pycross/private:util.bzl", "REPO_HTTP_DEPS") + +package(default_visibility = ["//pycross:__subpackages__"]) + +bzl_library( + name = "environments", + srcs = ["environments.bzl"], + deps = [ + ":tag_attrs", + "//pycross/private:toolchain_helpers", + "@rules_pycross_internal//:defaults.bzl", + ], +) + +bzl_library( + name = "pycross", + srcs = ["pycross.bzl"], + deps = [ + ":tag_attrs", + "//pycross/private:internal_repo", + "//pycross/private:pycross_deps.lock", + "//pycross/private:pycross_deps_core.lock", + ], +) + +bzl_library( + name = "lock_import", + srcs = ["lock_import.bzl"], + deps = [ + ":lock_hub_repo", + ":tag_attrs", + "//pycross/private:internal_repo", + "//pycross/private:pdm_lock_model", + "//pycross/private:poetry_lock_model", + "//pycross/private:pypi_file", + "//pycross/private:resolved_lock_repo", + "@bazel_features//:features", + ] + REPO_HTTP_DEPS, +) + +bzl_library( + name = "tag_attrs", + srcs = ["tag_attrs.bzl"], + deps = [ + "//pycross/private:lock_attrs", + ], +) + +bzl_library( + name = "toolchains", + srcs = ["toolchains.bzl"], + deps = [ + "//pycross/private:toolchain_helpers", + ], +) + +bzl_library( + name = "lock_repos", + srcs = ["lock_repos.bzl"], + deps = [ + ":tag_attrs", + "//pycross/private:package_repo", + "//pycross/private:pypi_file", + "@bazel_features//:features", + "@lock_import_repos_hub//:locks.bzl", + ] + REPO_HTTP_DEPS, +) + +bzl_library( + name = "lock_file", + srcs = ["lock_file.bzl"], + deps = [ + "//pycross/private:internal_repo", + "//pycross/private:lock_file_repo", + "//pycross/private:pypi_file", + ] + REPO_HTTP_DEPS, +) + +bzl_library( + name = "lock_hub_repo", + srcs = ["lock_hub_repo.bzl"], +) diff --git a/python/private/pycross_staging/private/bzlmod/environments.bzl b/python/private/pycross_staging/private/bzlmod/environments.bzl new file mode 100644 index 0000000000..35b440882f --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/environments.bzl @@ -0,0 +1,38 @@ +"""The environments extension creates target environment definitions.""" + +load( + "@rules_pycross_internal//:defaults.bzl", + default_glibc_version = "glibc_version", + default_macos_version = "macos_version", + default_platforms = "platforms", + default_python_versions = "python_versions", +) +load("//pycross/private:toolchain_helpers.bzl", "pycross_environments_repo") +load(":tag_attrs.bzl", "CREATE_ENVIRONMENTS_ATTRS") + +def _environments_impl(module_ctx): + for module in module_ctx.modules: + for tag in module.tags.create_for_python_toolchains: + pycross_environments_repo( + name = tag.name, + python_toolchains_repo = "@python_versions", + pythons_hub_repo = "@pythons_hub", + platforms = tag.platforms or default_platforms, + requested_python_versions = tag.python_versions or default_python_versions, + glibc_version = tag.glibc_version or default_glibc_version, + macos_version = tag.macos_version or default_macos_version, + ) + +environments = module_extension( + doc = "Create target environments.", + implementation = _environments_impl, + tag_classes = { + "create_for_python_toolchains": tag_class( + attrs = dict( + name = attr.string( + mandatory = True, + ), + ) | CREATE_ENVIRONMENTS_ATTRS, + ), + }, +) diff --git a/python/private/pycross_staging/private/bzlmod/lock_file.bzl b/python/private/pycross_staging/private/bzlmod/lock_file.bzl new file mode 100644 index 0000000000..9390b14433 --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/lock_file.bzl @@ -0,0 +1,54 @@ +"""The lock_file_repo extension creates repositories for an original-style Pycross .bzl lock.""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") +load("//pycross/private:internal_repo.bzl", "exec_internal_tool") +load("//pycross/private:lock_file_repo.bzl", "pycross_lock_file_repo") +load("//pycross/private:pypi_file.bzl", "pypi_file") + +def _lock_file_impl(module_ctx): + # Pre-pathify labels + tool = module_ctx.path(Label("@rules_pycross//pycross/private/tools:extract_lock_repos.py")) + for module in module_ctx.modules: + for tag in module.tags.instantiate: + module_ctx.path(tag.lock_file) + + # Create all repos for inputs + for module in module_ctx.modules: + for tag in module.tags.instantiate: + path = module_ctx.path(tag.lock_file) + result = exec_internal_tool(module_ctx, tool, [path], quiet = True) + repos = json.decode(result.stdout) + + # Create the file repos + for repo in repos: + if repo["type"] == "http_file": + http_file(**repo["attrs"]) + elif repo["type"] == "pypi_file": + pypi_file(**repo["attrs"]) + else: + fail("Unknown repository type: " + repo["type"]) + + # Create the packages repo + pycross_lock_file_repo(name = tag.name, lock_file = tag.lock_file) + +# Tag classes +_instantiate_tag = tag_class( + doc = "Create a repo given the Pycross-generated lock file.", + attrs = dict( + name = attr.string( + doc = "The repo name.", + mandatory = True, + ), + lock_file = attr.label( + doc = "The lock file created by pycross_lock_file.", + mandatory = True, + ), + ), +) + +lock_file = module_extension( + implementation = _lock_file_impl, + tag_classes = dict( + instantiate = _instantiate_tag, + ), +) diff --git a/python/private/pycross_staging/private/bzlmod/lock_hub_repo.bzl b/python/private/pycross_staging/private/bzlmod/lock_hub_repo.bzl new file mode 100644 index 0000000000..1788f1a301 --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/lock_hub_repo.bzl @@ -0,0 +1,26 @@ +"""A simple hub repo that stores a list of resolved lock files.""" + +_root_build = """\ +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "locks.bzl", +]) +""" + +def _lock_hub_repo_impl(rctx): + bzl_lines = ["locks = {"] + for repo_name in sorted(rctx.attr.repo_files): + repo_file = rctx.attr.repo_files[repo_name] + bzl_lines.append(' "{}": Label("{}"),'.format(repo_name, repo_file)) + bzl_lines.append("}") + + rctx.file("locks.bzl", "\n".join(bzl_lines) + "\n") + rctx.file("BUILD.bazel", _root_build) + +lock_hub_repo = repository_rule( + implementation = _lock_hub_repo_impl, + attrs = { + "repo_files": attr.string_dict(), + }, +) diff --git a/python/private/pycross_staging/private/bzlmod/lock_import.bzl b/python/private/pycross_staging/private/bzlmod/lock_import.bzl new file mode 100644 index 0000000000..b96166950d --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/lock_import.bzl @@ -0,0 +1,157 @@ +"""The lock_import extension.""" + +load("@bazel_features//:features.bzl", "bazel_features") +load("//pycross/private:lock_attrs.bzl", "package_annotation") +load("//pycross/private:pdm_lock_model.bzl", "lock_repo_model_pdm") +load("//pycross/private:poetry_lock_model.bzl", "lock_repo_model_poetry") +load("//pycross/private:resolved_lock_repo.bzl", "resolved_lock_repo") +load(":lock_hub_repo.bzl", "lock_hub_repo") +load(":tag_attrs.bzl", "COMMON_ATTRS", "COMMON_IMPORT_ATTRS", "PACKAGE_ATTRS", "PDM_IMPORT_ATTRS", "POETRY_IMPORT_ATTRS") + +def _generate_resolved_lock_repo(lock_info, serialized_lock_model): + repo_name = lock_info.repo_name + args = { + "annotations": {}, + "default_alias_single_version": lock_info.default_alias_single_version, + "disallow_builds": lock_info.disallow_builds, + "local_wheels": lock_info.local_wheels, + "lock_model": serialized_lock_model, + "name": repo_name, + "target_environments": lock_info.environments, + } + + for package_name, package in lock_info.packages.items(): + args["annotations"][package_name] = package_annotation( + always_build = package.always_build, + build_dependencies = package.build_dependencies, + build_target = str(package.build_target) if package.build_target else None, + ignore_dependencies = package.ignore_dependencies, + install_exclude_globs = package.install_exclude_globs, + ) + + resolved_lock_repo(**args) + return "@{}//:lock.json".format(repo_name) + +def _check_unique_lock_repo(owners, module, tag): + if tag.repo in owners: + fail("lock repo '{}' wanted by module '{}' already created by module '{}'".format( + tag.repo, + module.name, + owners[tag.repo], + )) + owners[tag.repo] = module.name + +def _check_proper_tag_repo(owners, module, tag, tag_desc): + owner = owners.get(tag.repo) + if owner == None: + fail( + "{} declared by module '{}' attached to non-existent lock repo '{}'".format( + tag_desc, + module.name, + tag.repo, + ), + ) + elif owner != module.name: + fail( + "{} declared by module '{}' attached to lock repo '{}' owned by other module '{}".format( + tag_desc, + module.name, + tag.repo, + owner, + ), + ) + +def _check_proper_package_repo(owners, module, tag): + _check_proper_tag_repo(owners, module, tag, "package '{}'".format(tag.name)) + +def _check_package_entry_not_set(owners, lock_info, tag): + if tag.name in lock_info.packages: + fail("Multiple package entries for package '{}' in lock repo '{}' owned by module '{}'".format(tag.name, tag.repo, owners[tag.repo])) + +def _lock_struct(mctx, tag): + environment_files = [] + for env_file in tag.target_environments: + data = json.decode(mctx.read(env_file)) + if "environments" in data: + # This is an environment index file. Add its entries to our result. + environment_files.extend([env_file.relative(entry) for entry in data["environments"]]) + else: + environment_files.append(env_file) + environment_files = sorted(environment_files) + + # Pre-pathify environment files after we've expanded indexes + for env_file in environment_files: + mctx.path(env_file) + + return struct( + repo_name = tag.repo, + default_alias_single_version = tag.default_alias_single_version, + environments = environment_files, + local_wheels = tag.local_wheels, + disallow_builds = tag.disallow_builds, + packages = {}, + ) + +def _lock_import_impl(module_ctx): + lock_owners = {} + lock_repos = {} + lock_model_structs = {} + resolved_lock_files = {} + + # A first pass initialize lock structures and make sure none of the repo names are duplicated. + for module in module_ctx.modules: + for tag in module.tags.import_pdm + module.tags.import_poetry: + _check_unique_lock_repo(lock_owners, module, tag) + lock_repos[tag.repo] = _lock_struct(module_ctx, tag) + + # Iterate over the various from_pdm and from_poetry tags and create lock models + for module in module_ctx.modules: + for tag in module.tags.import_pdm: + lock_model_structs[tag.repo] = lock_repo_model_pdm(**{attr: getattr(tag, attr) for attr in PDM_IMPORT_ATTRS}) + for tag in module.tags.import_poetry: + lock_model_structs[tag.repo] = lock_repo_model_poetry(**{attr: getattr(tag, attr) for attr in POETRY_IMPORT_ATTRS}) + + # Add package attributes + for module in module_ctx.modules: + for tag in module.tags.package: + _check_proper_package_repo(lock_owners, module, tag) + repo_info = lock_repos[tag.repo] + _check_package_entry_not_set(lock_owners, repo_info, tag) + repo_info.packages[tag.name] = tag + + # Generate the resolved lock repos + for repo_name, repo_info in lock_repos.items(): + resolved_lock_repo_file = _generate_resolved_lock_repo(repo_info, lock_model_structs[repo_name]) + resolved_lock_files[repo_info.repo_name] = resolved_lock_repo_file + + lock_hub_repo( + name = "lock_import_repos_hub", + repo_files = resolved_lock_files, + ) + + if bazel_features.external_deps.extension_metadata_has_reproducible: + return module_ctx.extension_metadata(reproducible = True) + return module_ctx.extension_metadata() + +# Tag classes +_import_pdm_tag = tag_class( + doc = "Import a PDM lock file.", + attrs = PDM_IMPORT_ATTRS | COMMON_IMPORT_ATTRS | COMMON_ATTRS, +) +_import_poetry_tag = tag_class( + doc = "Import a Poetry lock file.", + attrs = POETRY_IMPORT_ATTRS | COMMON_IMPORT_ATTRS | COMMON_ATTRS, +) +_package_tag = tag_class( + doc = "Specify package-specific settings.", + attrs = PACKAGE_ATTRS | COMMON_ATTRS, +) + +lock_import = module_extension( + implementation = _lock_import_impl, + tag_classes = dict( + import_pdm = _import_pdm_tag, + import_poetry = _import_poetry_tag, + package = _package_tag, + ), +) diff --git a/python/private/pycross_staging/private/bzlmod/lock_repos.bzl b/python/private/pycross_staging/private/bzlmod/lock_repos.bzl new file mode 100644 index 0000000000..58312fb9c9 --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/lock_repos.bzl @@ -0,0 +1,101 @@ +"""The lock_repos extension.""" + +load("@bazel_features//:features.bzl", "bazel_features") +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") +load("@lock_import_repos_hub//:locks.bzl", lock_import_locks = "locks") +load("//pycross/private:package_repo.bzl", "package_repo") +load("//pycross/private:pypi_file.bzl", "pypi_file") +load(":tag_attrs.bzl", "CREATE_REPOS_ATTRS") + +# buildifier: disable=print +def _print_warn(msg): + print("WARNING:", msg) + +def _lock_repos_impl(module_ctx): + all_locks = lock_import_locks # Some day there may be others. + all_remote_files = {} + + # Pre-pathify all lock files to minimize restart time. + for lock_file in all_locks.values(): + module_ctx.path(lock_file) + + create_tag = None + for module in module_ctx.modules: + for tag in module.tags.create: + if module.name != "rules_pycross" and not module.is_root: + _print_warn("Ignoring repos.create tag from non-root, non-pycross module {}".format(module.name)) + continue + + # Root module has precedence + if create_tag == None: + create_tag = tag + + if create_tag == None: + # This shouldn't happen since rules_pycross registers a default tag. + fail("BUG: no repos.create tag found!") + + # Generate the lock repos and any remote package repos + for repo_name, lock_file in all_locks.items(): + resolved_lock_file = module_ctx.path(lock_file) + resolved_lock = json.decode(module_ctx.read(resolved_lock_file)) + + repo_remote_files = {} + for key, file in resolved_lock.get("remote_files", {}).items(): + if key in all_remote_files: + # We already have an entry for this key, so use that. + # TODO: add some preference for http entries vs. pypi_file entries. + repo_remote_files[key] = all_remote_files[key] + continue + + # Use the key as our repo name, but replace its / with _ + remote_file_repo = "pypi_{}".format(key.replace("/", "_").replace("+", "_")) + remote_file_label = "@{}//file".format(remote_file_repo) + + urls = file.get("urls", []) + if urls: + # We have URLs so we'll use an http_file repo. + http_file( + name = remote_file_repo, + urls = urls, + sha256 = file["sha256"], + downloaded_file_path = file["name"], + ) + else: + # No URLs; use a pypi_file repo. + pypi_file_attrs = dict( + name = remote_file_repo, + package_name = file["package_name"], + package_version = file["package_version"], + filename = file["name"], + sha256 = file["sha256"], + ) + if create_tag.pypi_index: + pypi_file_attrs["pypi_index"] = create_tag.pypi_index + + pypi_file(**pypi_file_attrs) + + repo_remote_files[key] = remote_file_label + all_remote_files[key] = remote_file_label + + package_repo( + name = repo_name, + resolved_lock_file = lock_file, + repo_map = repo_remote_files, + ) + + if bazel_features.external_deps.extension_metadata_has_reproducible: + return module_ctx.extension_metadata(reproducible = True) + return module_ctx.extension_metadata() + +# Tag classes +_create_tag = tag_class( + doc = "Create declared Pycross repos.", + attrs = CREATE_REPOS_ATTRS, +) + +lock_repos = module_extension( + implementation = _lock_repos_impl, + tag_classes = dict( + create = _create_tag, + ), +) diff --git a/python/private/pycross_staging/private/bzlmod/pycross.bzl b/python/private/pycross_staging/private/bzlmod/pycross.bzl new file mode 100644 index 0000000000..8e9743e5fe --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/pycross.bzl @@ -0,0 +1,84 @@ +"""Pycross internal deps.""" + +load("//pycross/private:internal_repo.bzl", "create_internal_repo") +load("//pycross/private:pycross_deps.lock.bzl", pypi_all_repositories = "repositories") +load("//pycross/private:pycross_deps_core.lock.bzl", core_files = "FILES") +load(":tag_attrs.bzl", "CREATE_ENVIRONMENTS_ATTRS", "REGISTER_TOOLCHAINS_ATTRS") + +# buildifier: disable=print +def _print_warn(msg): + print("WARNING:", msg) + +def _pycross_impl(module_ctx): + environments_tag = None + interpreter_tag = None + toolchains_tag = None + + for module in module_ctx.modules: + if module.name != "rules_pycross" and not module.is_root: + _print_warn("Ignoring `pycross` extension usage from non-root, non-rules_pycross module {}".format(module.name)) + continue + + if not environments_tag: + for tag in module.tags.configure_environments: + environments_tag = tag + break + + if not interpreter_tag: + for tag in module.tags.configure_interpreter: + interpreter_tag = tag + break + + if not toolchains_tag: + for tag in module.tags.configure_toolchains: + toolchains_tag = tag + break + + python_interpreter_target = None + python_defs_file = None + + if interpreter_tag.python_interpreter_target: + python_interpreter_target = interpreter_tag.python_interpreter_target + + if interpreter_tag.python_defs_file: + python_defs_file = interpreter_tag.python_defs_file + + if not python_interpreter_target or not python_defs_file: + fail( + "Both python_interpreter_target and python_defs_file must be set", + ) + + pypi_all_repositories() + + environments_attrs = {k: getattr(environments_tag, k) for k in dir(environments_tag)} + toolchains_attrs = {k: getattr(toolchains_tag, k) for k in dir(toolchains_tag)} + + create_internal_repo( + python_interpreter_target = python_interpreter_target, + python_defs_file = python_defs_file, + wheels = core_files, + **(environments_attrs | toolchains_attrs) + ) + +pycross = module_extension( + doc = "Configure rules_pycross.", + implementation = _pycross_impl, + tag_classes = { + "configure_environments": tag_class( + attrs = CREATE_ENVIRONMENTS_ATTRS, + ), + "configure_interpreter": tag_class( + attrs = { + "python_defs_file": attr.label( + doc = "A label to a .bzl file that provides py_binary and py_test.", + ), + "python_interpreter_target": attr.label( + doc = "The label to a python executable to use for invoking internal tools.", + ), + }, + ), + "configure_toolchains": tag_class( + attrs = REGISTER_TOOLCHAINS_ATTRS, + ), + }, +) diff --git a/python/private/pycross_staging/private/bzlmod/tag_attrs.bzl b/python/private/pycross_staging/private/bzlmod/tag_attrs.bzl new file mode 100644 index 0000000000..d1ba9a388f --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/tag_attrs.bzl @@ -0,0 +1,67 @@ +"""Shared attr definitions""" + +load( + "//pycross/private:lock_attrs.bzl", + _CREATE_ENVIRONMENTS_ATTRS = "CREATE_ENVIRONMENTS_ATTRS", + _CREATE_REPOS_ATTRS = "CREATE_REPOS_ATTRS", + _PDM_IMPORT_ATTRS = "PDM_IMPORT_ATTRS", + _POETRY_IMPORT_ATTRS = "POETRY_IMPORT_ATTRS", + _REGISTER_TOOLCHAINS_ATTRS = "REGISTER_TOOLCHAINS_ATTRS", +) + +# Attrs common to all tags +COMMON_ATTRS = dict( + repo = attr.string( + doc = "The repository name", + mandatory = True, + ), +) + +# Attrs common to the import_* tags +COMMON_IMPORT_ATTRS = dict( + default_alias_single_version = attr.bool( + doc = "Generate aliases for all packages that have a single version in the lock file.", + ), + target_environments = attr.label_list( + # TODO: expand doc + doc = "A list of target environment descriptors.", + default = [ + "@pycross_environments//:environments", + ], + ), + local_wheels = attr.label_list( + doc = "A list of local .whl files to consider when processing lock files.", + ), + disallow_builds = attr.bool( + doc = "If True, only pre-built wheels are allowed.", + ), +) + +# Attrs for the package tag +PACKAGE_ATTRS = dict( + name = attr.string( + doc = "The package key (name or name@version).", + mandatory = True, + ), + build_target = attr.label( + doc = "An optional override build target to use when and if this package needs to be built from source.", + ), + always_build = attr.bool( + doc = "If True, don't use pre-built wheels for this package.", + ), + build_dependencies = attr.string_list( + doc = "A list of additional package keys (name or name@version) to use when building this package from source.", + ), + ignore_dependencies = attr.string_list( + doc = "A list of package keys (name or name@version) to drop from this package's set of declared dependencies.", + ), + install_exclude_globs = attr.string_list( + doc = "A list of globs for files to exclude during installation.", + ), +) + +CREATE_ENVIRONMENTS_ATTRS = _CREATE_ENVIRONMENTS_ATTRS +CREATE_REPOS_ATTRS = _CREATE_REPOS_ATTRS +PDM_IMPORT_ATTRS = _PDM_IMPORT_ATTRS +POETRY_IMPORT_ATTRS = _POETRY_IMPORT_ATTRS +REGISTER_TOOLCHAINS_ATTRS = _REGISTER_TOOLCHAINS_ATTRS diff --git a/python/private/pycross_staging/private/bzlmod/toolchains.bzl b/python/private/pycross_staging/private/bzlmod/toolchains.bzl new file mode 100644 index 0000000000..60b8b98c50 --- /dev/null +++ b/python/private/pycross_staging/private/bzlmod/toolchains.bzl @@ -0,0 +1,47 @@ +"""Internal extension to create pycross toolchains.""" + +load( + "@rules_pycross_internal//:defaults.bzl", + "register_toolchains", + default_platforms = "platforms", + default_python_versions = "python_versions", +) +load("//pycross/private:toolchain_helpers.bzl", "pycross_toolchains_repo") + +def _toolchains_impl(module_ctx): + creator = None + for module in module_ctx.modules: + for tag in module.tags.create_for_python_toolchains: + # This extension is a singleton. + if creator: + fail("toolchains.create_for_python_toolchains already called by module {}".format(creator)) + + creator = module.name + if register_toolchains: + pycross_toolchains_repo( + name = tag.name, + python_toolchains_repo = "@python_versions", + pythons_hub_repo = "@pythons_hub", + requested_python_versions = tag.python_versions, + platforms = tag.platforms, + ) + else: + _empty_repo(name = tag.name) + +toolchains = module_extension( + doc = "Create toolchains.", + implementation = _toolchains_impl, + tag_classes = { + "create_for_python_toolchains": tag_class( + attrs = { + "name": attr.string(), + "platforms": attr.string_list(default = default_platforms), + "python_versions": attr.string_list(default = default_python_versions), + }, + ), + }, +) + +_empty_repo = repository_rule( + implementation = lambda repository_ctx: repository_ctx.file("BUILD.bazel"), +) diff --git a/python/private/pycross_staging/private/cc_toolchain_util.bzl b/python/private/pycross_staging/private/cc_toolchain_util.bzl new file mode 100644 index 0000000000..30e01ac588 --- /dev/null +++ b/python/private/pycross_staging/private/cc_toolchain_util.bzl @@ -0,0 +1,331 @@ +""" Adopted from rules_foreign_cc.""" + +load("@bazel_skylib//lib:collections.bzl", "collections") +load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "ACTION_NAMES") +load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain") + +CxxToolsInfo = provider( + doc = "Paths to the C/C++ tools, taken from the toolchain", + fields = dict( + cc = "C compiler", + cxx = "C++ compiler", + cxx_linker_static = "C++ linker to link static library", + cxx_linker_executable = "C++ linker to link executable", + ), +) + +CxxFlagsInfo = provider( + doc = "Flags for the C/C++ tools, taken from the toolchain", + fields = dict( + cc = "C compiler flags", + cxx = "C++ compiler flags", + cxx_linker_shared = "C++ linker flags when linking shared library", + cxx_linker_static = "C++ linker flags when linking static library", + cxx_linker_executable = "C++ linker flags when linking executable", + needs_pic_for_dynamic_libraries = "True if PIC should be enabled for shared libraries", + ), +) + +# Since we're calling an external build system we can't support some +# features that may be enabled on the toolchain - so we disable +# them here when configuring the toolchain flags to pass to the external +# build system. +CC_DISABLED_FEATURES = [ + "fdo_instrument", + "fdo_optimize", + "layering_check", + "module_maps", + "thin_lto", +] + +def _configure_features(ctx, cc_toolchain): + disabled_features = ctx.disabled_features + CC_DISABLED_FEATURES + if not ctx.coverage_instrumented(): + # In coverage mode, cc_common.configure_features() adds coverage related flags, + # such as --coverage to the compiler and linker. However, if this library is not + # instrumented, we don't need to pass those flags, and avoid unnecessary rebuilds. + disabled_features.append("coverage") + return cc_common.configure_features( + ctx = ctx, + cc_toolchain = cc_toolchain, + requested_features = ctx.features, + unsupported_features = disabled_features, + ) + +def _defines_from_deps(ctx): + return depset(transitive = [dep[CcInfo].compilation_context.defines for dep in getattr(ctx.attr, "deps", []) if CcInfo in dep]) + +def get_env_vars(ctx): + """Returns environment variables for C tools + + Args: + ctx: rule context + Returns: + environment variables + """ + cc_toolchain = find_cpp_toolchain(ctx) + feature_configuration = _configure_features( + ctx = ctx, + cc_toolchain = cc_toolchain, + ) + copts = getattr(ctx.attr, "copts", []) + + action_names = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_link_static_library, + ACTION_NAMES.cpp_link_executable, + ] + + vars = dict() + for action_name in action_names: + vars.update(cc_common.get_environment_variables( + feature_configuration = feature_configuration, + action_name = action_name, + variables = cc_common.create_compile_variables( + feature_configuration = feature_configuration, + cc_toolchain = cc_toolchain, + user_compile_flags = copts, + ), + )) + return vars + +def get_tools_info(ctx): + """Takes information about tools paths from cc_toolchain, returns CxxToolsInfo + + Args: + ctx: rule context + """ + cc_toolchain = find_cpp_toolchain(ctx) + feature_configuration = _configure_features( + ctx = ctx, + cc_toolchain = cc_toolchain, + ) + + return CxxToolsInfo( + cc = cc_common.get_tool_for_action( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.c_compile, + ), + cxx = cc_common.get_tool_for_action( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_compile, + ), + cxx_linker_static = cc_common.get_tool_for_action( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_link_static_library, + ), + cxx_linker_executable = cc_common.get_tool_for_action( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_link_executable, + ), + ) + +def get_flags_info(ctx, copts = [], linkopts = [], link_output_file = None): + """Takes information about flags from cc_toolchain, returns CxxFlagsInfo + + Args: + ctx: rule context + copts: compiler options + linkopts: linker options + link_output_file: output file to be specified in the link command line + flags + + Returns: + CxxFlagsInfo: A provider containing Cxx flags + """ + cc_toolchain_ = find_cpp_toolchain(ctx) + feature_configuration = _configure_features( + ctx = ctx, + cc_toolchain = cc_toolchain_, + ) + + copts = (ctx.fragments.cpp.copts + ctx.fragments.cpp.conlyopts + copts) or [] + cxxopts = (ctx.fragments.cpp.copts + ctx.fragments.cpp.cxxopts + copts) or [] + linkopts = (ctx.fragments.cpp.linkopts + linkopts) or [] + defines = _defines_from_deps(ctx) + + flags = CxxFlagsInfo( + cc = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.c_compile, + variables = cc_common.create_compile_variables( + feature_configuration = feature_configuration, + cc_toolchain = cc_toolchain_, + preprocessor_defines = defines, + ), + ), + cxx = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_compile, + variables = cc_common.create_compile_variables( + feature_configuration = feature_configuration, + cc_toolchain = cc_toolchain_, + preprocessor_defines = defines, + add_legacy_cxx_options = True, + ), + ), + cxx_linker_shared = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_link_dynamic_library, + variables = cc_common.create_link_variables( + cc_toolchain = cc_toolchain_, + feature_configuration = feature_configuration, + is_using_linker = True, + is_linking_dynamic_library = True, + must_keep_debug = False, + ), + ), + cxx_linker_static = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_link_static_library, + variables = cc_common.create_link_variables( + cc_toolchain = cc_toolchain_, + feature_configuration = feature_configuration, + is_using_linker = False, + is_linking_dynamic_library = False, + must_keep_debug = False, + output_file = link_output_file, + ), + ), + cxx_linker_executable = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_link_executable, + variables = cc_common.create_link_variables( + cc_toolchain = cc_toolchain_, + feature_configuration = feature_configuration, + is_using_linker = True, + is_linking_dynamic_library = False, + must_keep_debug = False, + ), + ), + ) + return CxxFlagsInfo( + cc = _convert_flags(cc_toolchain_.compiler, _add_if_needed(flags.cc, copts)), + cxx = _convert_flags(cc_toolchain_.compiler, _add_if_needed(flags.cxx, cxxopts)), + cxx_linker_shared = _convert_flags(cc_toolchain_.compiler, _add_if_needed(flags.cxx_linker_shared, linkopts)), + cxx_linker_static = _convert_flags(cc_toolchain_.compiler, flags.cxx_linker_static), + cxx_linker_executable = _convert_flags(cc_toolchain_.compiler, _add_if_needed(flags.cxx_linker_executable, linkopts)), + needs_pic_for_dynamic_libraries = cc_toolchain_.needs_pic_for_dynamic_libraries( + feature_configuration = feature_configuration, + ), + ) + +def _convert_flags(compiler, flags): + """ Rewrites flags depending on the provided compiler. + + MSYS2 may convert leading slashes to the absolute path of the msys root directory, even if MSYS_NO_PATHCONV=1 and MSYS2_ARG_CONV_EXCL="*" + .E.g MSYS2 may convert "/nologo" to "C:/msys64/nologo". + Therefore, as MSVC tool flags can start with either a slash or dash, convert slashes to dashes + + Args: + compiler: The target compiler, e.g. gcc, msvc-cl, mingw-gcc + flags: The flags to convert + + Returns: + list: The converted flags + """ + if compiler == "msvc-cl": + return [flag.replace("/", "-") if flag.startswith("/") else flag for flag in flags] + return flags + +def _add_if_needed(arr, add_arr): + filtered = [] + for to_add in add_arr: + found = False + for existing in arr: + if existing == to_add: + found = True + if not found: + filtered.append(to_add) + return arr + filtered + +def absolutize_path_in_str(workspace_name, root_str, text, force = False): + """Replaces relative paths in [the middle of] 'text', prepending them with 'root_str'. If there is nothing to replace, returns the 'text'. + + We only will replace relative paths starting with either 'external/' or '/', + because we only want to point with absolute paths to external repositories or inside our + current workspace. (And also to limit the possibility of error with such not exact replacing.) + + Args: + workspace_name: workspace name + text: the text to do replacement in + root_str: the text to prepend to the found relative path + force: If true, the `root_str` will always be prepended + + Returns: + string: A formatted string + """ + new_text = _prefix(text, "external/", root_str) + if new_text == text: + new_text = _prefix(text, workspace_name + "/", root_str) + + # Check to see if the text is already absolute on a unix and windows system + is_already_absolute = text.startswith("/") or \ + (len(text) > 2 and text[0].isalpha() and text[1] == ":") + + # absolutize relative by adding our working directory + # this works because we ru on windows under msys now + if force and new_text == text and not is_already_absolute: + new_text = root_str + "/" + text + + return new_text + +def _prefix(text, from_str, prefix): + (before, middle, after) = text.partition(from_str) + if not middle or before.endswith("/"): + return text + return before + prefix + middle + after + +def get_headers(ccinfo): + """Returns a struct containing headers and include_dirs for the given CcInfo. + + Args: + ccinfo: The CcInfo provider + + Returns: + struct: A struct containing headers and include_dirs. + """ + compilation_info = ccinfo.compilation_context + include_dirs = compilation_info.system_includes.to_list() + \ + compilation_info.includes.to_list() + + # do not use quote includes, currently they do not contain + # library-specific information + include_dirs = collections.uniq(include_dirs) + headers = [] + for header in compilation_info.headers.to_list(): + path = header.path + included = False + for dir_ in include_dirs: + if path.startswith(dir_): + included = True + break + if not included: + headers.append(header) + return struct( + headers = headers, + include_dirs = include_dirs, + ) + +def get_libraries(ccinfo): + """Returns a list of libraries for the given CcInfo. + + Args: + ccinfo: The CcInfo provider + + Returns: + struct: A list of libraries. + """ + all_libraries = [] + + def add(lib): + if lib: + all_libraries.append(lib) + + for li in ccinfo.linking_context.linker_inputs.to_list(): + for library_to_link in li.libraries: + add(library_to_link.static_library) + add(library_to_link.pic_static_library) + add(library_to_link.resolved_symlink_dynamic_library or library_to_link.dynamic_library) + add(library_to_link.resolved_symlink_interface_library or library_to_link.interface_library) + return all_libraries diff --git a/python/private/pycross_staging/private/internal_repo.bzl b/python/private/pycross_staging/private/internal_repo.bzl new file mode 100644 index 0000000000..422f9bb3c5 --- /dev/null +++ b/python/private/pycross_staging/private/internal_repo.bzl @@ -0,0 +1,203 @@ +"""Internal repo""" + +load("@bazel_skylib//lib:shell.bzl", "shell") +load(":lock_attrs.bzl", "CREATE_ENVIRONMENTS_ATTRS", "REGISTER_TOOLCHAINS_ATTRS") +load(":repo_venv_utils.bzl", "create_venv", "get_venv_python_executable", "install_venv_wheels") + +INTERNAL_REPO_NAME = "rules_pycross_internal" +LOCK_FILES = { + "build": "//pycross/private:pycross_deps_build.lock.bzl", + "core": "//pycross/private:pycross_deps_core.lock.bzl", + "repairwheel": "//pycross/private:pycross_deps_repairwheel.lock.bzl", +} + +_deps_build = """\ +package(default_visibility = ["//visibility:public"]) + +load("{lock}", "targets") + +targets() +""" + +_root_build = """\ +package(default_visibility = ["//visibility:public"]) + +alias( + name = "installer_whl", + actual = "{installer_whl}", +) + +exports_files([ + "defaults.bzl", + "python.bzl", +]) +""" + +_python_bzl = """\ +load("@rules_python//python:defs.bzl", _py_library = "py_library") +load("{python_defs}", _py_binary = "py_binary", _py_test = "py_test") + +py_binary = _py_binary +py_library = _py_library +py_test = _py_test +""" + +def exec_internal_tool(rctx, tool, args, *, flagfile_param = "--flagfile", flagfile_threshold = 1000, quiet = False): + """ + Execute a script under //pycross/private/tools. + + Args: + rctx: repository context + tool: the script to execute + args: a list of args to pass to the script + flagfile_param: the parameter name used when dumping arguments to a flag file + flagfile_threshold: use a flag file if len(args) >= this value + quiet: The quiet value to pass to rctx.execute. + + Returns: + exec_result + """ + venv_path = rctx.path(Label("@{}//exec_venv:BUILD.bazel".format(INTERNAL_REPO_NAME))).dirname + python_exe = get_venv_python_executable(venv_path) + + # Setup the flagfile if necessary + flagfile = None + if flagfile_param and len(args) >= flagfile_threshold: + flagfile_data = "\n".join([shell.quote(str(arg)) for arg in args]) + flagfile = rctx.path("_internal_flagfile_%s.params" % hash(flagfile_data)) + if flagfile.exists: + rctx.delete(flagfile) + rctx.file(flagfile, flagfile_data) + tool_args = ["--flagfile", str(flagfile)] + else: + tool_args = args + + all_args = [str(python_exe), str(rctx.path(tool))] + tool_args + result = rctx.execute(all_args, quiet = quiet) + + # Clean up the flagfile + if flagfile and flagfile.exists: + rctx.delete(flagfile) + + if result.return_code: + fail("Internal command failed: {}\n{}".format(all_args, result.stderr)) + + return result + +def _get_python_interpreter_attr(rctx): + """A helper function for getting the `python_interpreter` attribute or its default. + + Args: + rctx (repository_ctx): Handle to the rule repository context. + + Returns: + str: The attribute value or its default. + """ + if rctx.attr.python_interpreter: + return rctx.attr.python_interpreter + + if "win" in rctx.os.name: + return "python.exe" + else: + return "python3" + +def _resolve_python_interpreter(rctx): + """Helper function to find the python interpreter from the common attributes + + Args: + rctx: Handle to the rule repository context. + + Returns: + Python interpreter path. + """ + python_interpreter = _get_python_interpreter_attr(rctx) + + if rctx.attr.python_interpreter_target != None: + python_interpreter = rctx.path(rctx.attr.python_interpreter_target) + elif "/" not in python_interpreter: + found_python_interpreter = rctx.which(python_interpreter) + if not found_python_interpreter: + fail("python interpreter `{}` not found in PATH".format(python_interpreter)) + python_interpreter = found_python_interpreter + + return python_interpreter.realpath + +def _installer_whl(wheels): + for label, name in wheels.items(): + if name.startswith("installer-"): + return label + fail("Unable to find `installer` wheel in lock file.") + +def _pip_whl(wheels): + for label, name in wheels.items(): + if name.startswith("pip-"): + return label + fail("Unable to find `pip` wheel in lock file.") + +def _defaults_bzl(rctx): + lines = [] + for key in CREATE_ENVIRONMENTS_ATTRS | REGISTER_TOOLCHAINS_ATTRS: + val = getattr(rctx.attr, key) + + lines.append("{} = {}".format(key, repr(val))) + + return "\n".join(lines) + "\n" + +def _pycross_internal_repo_impl(rctx): + python_executable = _resolve_python_interpreter(rctx) + wheel_paths = sorted([rctx.path(w) for w in rctx.attr.wheels.keys()], key = lambda k: str(k)) + pycross_path = rctx.path(Label("//:BUILD.bazel")).dirname + + venv_path = rctx.path("exec_venv") + pip_whl = _pip_whl(rctx.attr.wheels) + if rctx.attr.install_wheels: + create_venv(rctx, python_executable, venv_path, [pycross_path]) + install_venv_wheels(rctx, venv_path, pip_whl, wheel_paths) + else: + create_venv(rctx, python_executable, venv_path, [pycross_path] + wheel_paths) + + # All deps + rctx.file( + "deps/BUILD.bazel", + _deps_build.format(lock = Label("//pycross/private:pycross_deps.lock.bzl")), + ) + + # python.bzl + if rctx.attr.python_defs_file: + python_defs = rctx.attr.python_defs_file + else: + python_defs = Label("@rules_python//python:defs.bzl") + rctx.file("python.bzl", _python_bzl.format(python_defs = python_defs)) + + # defaults.bzl + rctx.file("defaults.bzl", _defaults_bzl(rctx)) + + # Root build file + rctx.file("BUILD.bazel", _root_build.format(installer_whl = _installer_whl(rctx.attr.wheels))) + +pycross_internal_repo = repository_rule( + implementation = _pycross_internal_repo_impl, + attrs = { + "install_wheels": attr.bool( + default = True, + ), + "python_defs_file": attr.label( + allow_single_file = True, + ), + "python_interpreter": attr.string(), + "python_interpreter_target": attr.label( + allow_single_file = True, + ), + "wheels": attr.label_keyed_string_dict( + mandatory = True, + allow_files = [".whl"], + ), + } | CREATE_ENVIRONMENTS_ATTRS | REGISTER_TOOLCHAINS_ATTRS, +) + +def create_internal_repo(wheels = {}, **kwargs): + pycross_internal_repo( + name = INTERNAL_REPO_NAME, + wheels = {wheel_label: wheel_name for wheel_name, wheel_label in wheels.items()}, + **kwargs + ) diff --git a/python/private/pycross_staging/private/interpreter_version.bzl b/python/private/pycross_staging/private/interpreter_version.bzl new file mode 100644 index 0000000000..e302beadd4 --- /dev/null +++ b/python/private/pycross_staging/private/interpreter_version.bzl @@ -0,0 +1,39 @@ +"""Provides a config flag that returns the micro-level version of the selected rules_python toolchain.""" + +load("@rules_python//python:versions.bzl", "TOOL_VERSIONS") + +def _rules_python_interpreter_version_impl(ctx): + return [ + config_common.FeatureFlagInfo(value = ctx.attr.version), + ] + +_rules_python_interpreter_version = rule( + implementation = _rules_python_interpreter_version_impl, + attrs = { + "version": attr.string(mandatory = True), + }, +) + +def rules_python_interpreter_version(name, default_version, **kwargs): + """Builds a target that returns the currently-selected rules_pycross toolchain version. + + This value can be used in a config_setting; e.g., + config_setting( + name = "foo", + flag_values = { + "@rules_pycross//pycross/private:rules_python_interpreter_version": "3.12.0", + }, + ) + """ + + selects = { + "@rules_python//python/config_settings:is_python_%s" % version: version + for version in sorted(TOOL_VERSIONS) + } + selects["//conditions:default"] = default_version + + _rules_python_interpreter_version( + name = name, + version = select(selects), + **kwargs + ) diff --git a/python/private/pycross_staging/private/lock_attrs.bzl b/python/private/pycross_staging/private/lock_attrs.bzl new file mode 100644 index 0000000000..977671950a --- /dev/null +++ b/python/private/pycross_staging/private/lock_attrs.bzl @@ -0,0 +1,252 @@ +"""Common attr handling for things that generate lock files.""" + +load(":util.bzl", "BZLMOD") + +DEFAULT_MACOS_VERSION = "12.0" + +# Use https://github.com/mayeut/pep600_compliance to keep this reasonable. +DEFAULT_GLIBC_VERSION = "2.28" + +CREATE_ENVIRONMENTS_ATTRS = dict( + python_versions = attr.string_list( + doc = ( + "The list of Python versions to support in by default in Pycross builds. " + + "These strings will be X.Y or X.Y.Z depending on how versions were registered " + + "with rules_python. By default all registered versions are supported." + ), + ), + platforms = attr.string_list( + doc = ( + "The list of Python platforms to support in by default in Pycross builds. " + + "See https://github.com/bazelbuild/rules_python/blob/main/python/versions.bzl " + + "for the list of supported platforms per Python version. By default all supported " + + "platforms for each registered version are supported." + ), + ), + glibc_version = attr.string( + doc = ( + "The maximum glibc version to accept for Bazel platforms that match the " + + "@platforms//os:linux constraint. Must be in the format '2.X', and greater than 2.5. " + + "All versions from 2.5 through this version will be supported. For example, if this " + + "value is set to 2.15, wheels tagged manylinux_2_5, manylinux_2_6, ..., " + + "manylinux_2_15 will be accepted. Defaults to '{}' if unspecified.".format(DEFAULT_GLIBC_VERSION) + ), + ), + macos_version = attr.string( + doc = ( + "The maximum macOS version to accept for Bazel platforms that match the " + + "@platforms//os:osx constraint. Must be in the format 'X.Y' with X >= 10. " + + "All versions from 10.4 through this version will be supported. For example, if this " + + "value is set to 12.0, wheels tagged macosx_10_4, macosx_10_5, ..., macosx_11_0, " + + "macosx_12_0 will be accepted. Defaults to '{}' if unspecified.".format(DEFAULT_MACOS_VERSION) + ), + ), +) + +REGISTER_TOOLCHAINS_ATTRS = dict( + register_toolchains = attr.bool( + doc = "Register toolchains for all rules_python-registered interpreters.", + default = True, + ), +) + +RESOLVE_ATTRS = dict( + target_environments = attr.label_list( + doc = "A list of pycross_target_environment labels.", + allow_files = [".json"], + ), + local_wheels = attr.label_list( + doc = "A list of wheel files.", + allow_files = [".whl"], + ), + remote_wheels = attr.string_dict( + doc = "A mapping of remote wheels to their sha256 hashes.", + ), + default_alias_single_version = attr.bool( + doc = "Generate aliases for all packages that have a single version in the lock file.", + ), + annotations = attr.string_dict( + doc = "Optional annotations to apply to packages.", + ), + disallow_builds = attr.bool( + doc = "Do not allow pycross_wheel_build targets in the final lock file (i.e., require wheels).", + ), + always_include_sdist = attr.bool( + doc = "Always include an entry for a package's sdist if one exists.", + ), +) + +CREATE_REPOS_ATTRS = dict( + pypi_index = attr.string( + doc = "The PyPI-compatible index to use (must support the JSON API).", + ), +) + +RENDER_ATTRS = dict( + repo_prefix = attr.string( + doc = "The prefix to apply to repository targets. Defaults to the lock file target name.", + default = "", + ), + generate_file_map = attr.bool( + doc = "Generate a FILES dict containing a mapping of filenames to repo labels.", + ), +) | CREATE_REPOS_ATTRS + +PDM_IMPORT_ATTRS = dict( + lock_file = attr.label( + doc = "The pdm.lock file.", + allow_single_file = True, + mandatory = True, + ), + project_file = attr.label( + doc = "The pyproject.toml file.", + allow_single_file = True, + mandatory = True, + ), + default = attr.bool( + doc = "Whether to install dependencies from the default group.", + default = True, + ), + optional_groups = attr.string_list( + doc = "List of optional dependency groups to install.", + ), + all_optional_groups = attr.bool( + doc = "Install all optional dependencies.", + ), + development_groups = attr.string_list( + doc = "List of development dependency groups to install.", + ), + all_development_groups = attr.bool( + doc = "Install all dev dependencies.", + ), + require_static_urls = attr.bool( + doc = "Require that the lock file is created with --static-urls.", + default = True, + ), +) + +POETRY_IMPORT_ATTRS = dict( + lock_file = attr.label( + doc = "The poetry.lock file.", + allow_single_file = True, + mandatory = True, + ), + project_file = attr.label( + doc = "The pyproject.toml file.", + allow_single_file = True, + mandatory = True, + ), +) + +def handle_resolve_attrs(attrs, environment_files_and_labels, local_wheel_names_and_labels): + """ + Parse resolve attrs and return a list of arguments. + + Args: + attrs: ctx.attr or repository_ctx.attr + environment_files_and_labels: a list of 2-tuples, each containing an + environment file and its corresponding label. + local_wheel_names_and_labels: a list of 2-tuples, each containing an + wheel name and its corresponding label. + + Returns: + a list of arguments. + """ + args = [] + + for env_file, env_label in environment_files_and_labels: + args.extend(["--target-environment", env_file, env_label]) + + for remote_wheel_url, sha256 in attrs.remote_wheels.items(): + args.extend(["--remote-wheel", remote_wheel_url, sha256]) + + if attrs.default_alias_single_version: + args.append("--default-alias-single-version") + + if attrs.disallow_builds: + args.append("--disallow-builds") + + if attrs.always_include_sdist: + args.append("--always-include-sdist") + + for wheel_name, wheel_label in local_wheel_names_and_labels: + args.extend(["--local-wheel", wheel_name, wheel_label]) + + return args + +def handle_render_attrs(attrs): + """ + Parse render attrs and return a list of arguments. + + Args: + attrs: ctx.attr or repository_ctx.attr + + Returns: + a list of arguments. + """ + + # If building locks for pycross itself, we don't want a repo name prefix on labels in the + # generated .bzl file. We can figure that out by comparing our workspace against the root workspace. + if Label("@@//:invalid").workspace_name == Label("//:invalid").workspace_name: + pycross_repo_name = "" + elif BZLMOD: + pycross_repo_name = "@@" + Label("//:invalid").workspace_name + else: + pycross_repo_name = "@" + Label("//:invalid").workspace_name + + args = ["--pycross-repo-name", pycross_repo_name] + + if attrs.repo_prefix: + repo_prefix = attrs.repo_prefix + else: + repo_prefix = attrs.name.lower().replace("-", "_") + + args.extend(["--repo-prefix", repo_prefix]) + + if attrs.generate_file_map: + args.append("--generate-file-map") + + return args + handle_create_repos_attrs(attrs) + +def handle_create_repos_attrs(attrs): + """ + Parse repository materializing attrs and return a list of arguments. + + Args: + attrs: ctx.attr or repository_ctx.attr + + Returns: + a list of arguments. + """ + args = [] + + if attrs.pypi_index: + args.extend(["--pypi-index", attrs.pypi_index]) + + return args + +def package_annotation( + always_build = False, + build_dependencies = [], + build_target = None, + ignore_dependencies = [], + install_exclude_globs = []): + """Annotations to apply to individual packages. + + Args: + always_build (bool, optional): If True, don't use pre-build wheels for this package. + build_dependencies (list, optional): A list of additional package keys (name or name@version) to use when building this package from source. + build_target (str, optional): An optional override build target to use when and if this package needs to be built from source. + ignore_dependencies (list, optional): A list of package keys (name or name@version) to drop from this package's set of declared dependencies. + install_exclude_globs (list, optional): A list of globs for files to exclude during installation. + + Returns: + str: A json encoded string of the provided content. + """ + return json.encode(struct( + always_build = always_build, + build_dependencies = build_dependencies, + build_target = build_target, + ignore_dependencies = ignore_dependencies, + install_exclude_globs = install_exclude_globs, + )) diff --git a/python/private/pycross_staging/private/lock_file.bzl b/python/private/pycross_staging/private/lock_file.bzl new file mode 100644 index 0000000000..c27ddeba2b --- /dev/null +++ b/python/private/pycross_staging/private/lock_file.bzl @@ -0,0 +1,79 @@ +"""Implementation of the pycross_lock_file rule.""" + +load(":lock_attrs.bzl", "RENDER_ATTRS", "RESOLVE_ATTRS", "handle_render_attrs", "handle_resolve_attrs") +load(":util.bzl", "BZLMOD") + +def fully_qualified_label(ctx, label): + prefix = "@@" if BZLMOD else "@" + return "%s%s//%s:%s" % (prefix, label.workspace_name or ctx.workspace_name, label.package, label.name) + +def _pycross_lock_file_impl(ctx): + out = ctx.outputs.out + + args = ctx.actions.args().use_param_file("--flagfile=%s") + + args.add("--lock-model-file", ctx.file.lock_model_file) + args.add("--output", out) + + annotations = {p: json.decode(a) for p, a in ctx.attr.annotations.items()} + annotations_file = ctx.actions.declare_file("{}.annotations.json".format(ctx.attr.name)) + ctx.actions.write(annotations_file, json.encode(annotations)) + args.add("--annotations-file", annotations_file) + + def qualify(label): + if ctx.attr.fully_qualified_environment_labels: + return fully_qualified_label(ctx, label) + else: + return label + + def whl_name_and_label(whl_file): + if not whl_file.owner: + fail("Could not determine owning label for local wheel: %s" % whl_file) + return whl_file.basename, whl_file.owner + + environment_files_and_labels = [(t.path, qualify(t.owner)) for t in ctx.files.target_environments] + wheel_names_and_labels = [whl_name_and_label(f) for f in ctx.files.local_wheels] + args.add_all(handle_resolve_attrs(ctx.attr, environment_files_and_labels, wheel_names_and_labels)) + args.add_all(handle_render_attrs(ctx.attr)) + + ctx.actions.run( + inputs = ( + ctx.files.lock_model_file + + ctx.files.target_environments + + [annotations_file] + ), + outputs = [out], + executable = ctx.executable._tool, + arguments = [args], + ) + + return [ + DefaultInfo( + files = depset([out]), + ), + ] + +pycross_lock_file = rule( + implementation = _pycross_lock_file_impl, + attrs = dict( + lock_model_file = attr.label( + doc = "The lock model JSON file.", + allow_single_file = [".json"], + mandatory = True, + ), + fully_qualified_environment_labels = attr.bool( + doc = "Generate fully-qualified environment labels.", + default = True, + ), + out = attr.output( + doc = "The output file.", + mandatory = True, + ), + _tool = attr.label( + default = Label("//pycross/private/tools:bzl_lock_generator"), + cfg = "exec", + executable = True, + ), + **(RENDER_ATTRS | RESOLVE_ATTRS) + ), +) diff --git a/python/private/pycross_staging/private/lock_file_repo.bzl b/python/private/pycross_staging/private/lock_file_repo.bzl new file mode 100644 index 0000000000..d2de46a1a9 --- /dev/null +++ b/python/private/pycross_staging/private/lock_file_repo.bzl @@ -0,0 +1,54 @@ +"""Implementation of the pycross_lock_file_repo rule.""" + +def _pycross_lock_file_repo_impl(rctx): + lock_file_label = rctx.attr.lock_file + + rctx.file(rctx.path("requirements.bzl"), """\ +load("{lock_file}", "PINS", "repositories") + +def requirement(pkg): + # Convert given name into normalized package name. + # https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization + pkg = pkg.replace("_", "-").replace(".", "-").lower() + for i in range(len(pkg)): + if "--" in pkg: + pkg = pkg.replace("--", "-") + else: + break + return "@{repo_name}//deps:%s" % pkg + +all_requirements = ["@{repo_name}//deps:%s" % v for v in PINS.values()] + +install_deps = repositories +""".format(lock_file = lock_file_label, repo_name = rctx.attr.name)) + + rctx.file( + rctx.path("BUILD.bazel"), + """\ +package(default_visibility = ["//visibility:public"]) + +exports_files(["requirements.bzl"]) +""", + ) + + rctx.file( + rctx.path("deps/BUILD.bazel"), + """\ +package(default_visibility = ["//visibility:public"]) + +load("{lock_file}", "targets") + +targets() +""".format(lock_file = lock_file_label), + ) + +pycross_lock_file_repo = repository_rule( + implementation = _pycross_lock_file_repo_impl, + attrs = { + "lock_file": attr.label( + doc = "The generated bzl lock file.", + allow_single_file = [".bzl"], + mandatory = True, + ), + }, +) diff --git a/python/private/pycross_staging/private/lock_repo.bzl b/python/private/pycross_staging/private/lock_repo.bzl new file mode 100644 index 0000000000..a611137fc7 --- /dev/null +++ b/python/private/pycross_staging/private/lock_repo.bzl @@ -0,0 +1,31 @@ +"""Implementation of the pycross_lock_repo macro.""" + +load(":lock_attrs.bzl", "CREATE_REPOS_ATTRS", "RESOLVE_ATTRS") +load(":package_repo.bzl", "package_repo") +load(":resolved_lock_repo.bzl", "resolved_lock_repo") + +def pycross_lock_repo(*, name, lock_model, **kwargs): + """Create a repo containing packages described by an imported lock. + + Args: + name: the repo name. + lock_model: the serialized lock model struct. Use `lock_repo_model_pdm` or `lock_repo_model_poetry`. + **kwargs: additional args to pass to `resolved_lock_repo` and `package_repo`. + """ + + render_args = {} + resolve_args = {"lock_model": lock_model} + for arg in list(kwargs): + if arg in CREATE_REPOS_ATTRS: + render_args[arg] = kwargs.pop(arg) + elif arg in RESOLVE_ATTRS: + resolve_args[arg] = kwargs.pop(arg) + + if kwargs: + fail("Unexpected args: {}".format(kwargs)) + + resolved_repo_name = name + "_resolved" + resolved_lock_label = "@{}//:lock.json".format(resolved_repo_name) + + resolved_lock_repo(name = resolved_repo_name, **resolve_args) + package_repo(name = name, resolved_lock_file = resolved_lock_label, write_install_deps = True) diff --git a/python/private/pycross_staging/private/package_repo.bzl b/python/private/pycross_staging/private/package_repo.bzl new file mode 100644 index 0000000000..e835d4af00 --- /dev/null +++ b/python/private/pycross_staging/private/package_repo.bzl @@ -0,0 +1,224 @@ +"""An internal repo rule that wraps a pycross lock structure. + +The file structure is as follows: +- WORKSPACE.bazel - The workspace root marker. +- BUILD.bazel - The root build file. +- defs.bzl - A defs file that provides an `install_deps` macro in some contexts. May be empty. +- requirements.bzl - A defs file that provides the traditional `requirement` and `all_requirements`. +- _lock/BUILD.bazel - Contains instantiations of all of the definitions in `lock.bzl`. +- _lock/lock.bzl - The rendered lock file. This is where most of the "meat" is. +- _sdist/BUILD.bazel - Version-aware aliases to package sdist targets. +- _wheel/BUILD.bazel - Version-aware aliases to package wheel targets. +- /BUILD.bazel - Contains aliases to targets under //_lock. Most notably, an alias named + is what most people will want to import. + +From a target perspective: +- //:package - The pycross_wheel_library target. +- //package:sdist - The package's sdist file. +- //package:wheel - The package's wheel file. +- //_sdist:package@version - The sdist for a specific version of package. +- //_wheel:package@version - The wheel for a specific version of package. + +The idea is that, for a repo named "pypi", something will depend on e.g. `@pypi//:numpy` or `@pypi//:pandas`. + +The package names in the root of the repo are all normalized per +https://packaging.python.org/en/latest/specifications/name-normalization. +""" + +load("@bazel_skylib//lib:paths.bzl", "paths") +load(":internal_repo.bzl", "exec_internal_tool") +load(":lock_attrs.bzl", "CREATE_REPOS_ATTRS", "handle_create_repos_attrs") + +_install_deps_bzl = """\ +load("//_lock:lock.bzl", _install_deps = "repositories") + +install_deps = _install_deps +""" + +_workspace = """\ +# DO NOT EDIT: automatically generated WORKSPACE file for package_repo rule +workspace(name = "{repo_name}") +""" + +_lock_build = """\ +package(default_visibility = ["//:__subpackages__"]) + +load("//_lock:lock.bzl", "targets") + +targets() +""" + +def _pin_build(package): + package_key = package["key"] + lines = [ + 'package(default_visibility = ["//visibility:public"])', + "", + "alias(", + ' name = "wheel",', + ' actual = "//_lock:_wheel_{}",'.format(package_key), + ")", + "", + ] + + if package.get("sdist_file", {}).get("key"): + lines.extend([ + "alias(", + ' name = "sdist",', + ' actual = "//_lock:_sdist_{}",'.format(package_key), + ")", + "", + ]) + + return "\n".join(lines) + "\n" + +def _wheel_build(packages): + lines = [ + 'package(default_visibility = ["//visibility:public"])', + "", + ] + for pkg in packages: + package_key = pkg["key"] + lines.extend([ + "alias(", + ' name = "{}",'.format(package_key), + ' actual = "//_lock:_wheel_{}",'.format(package_key), + ")", + "", + ]) + + return "\n".join(lines) + "\n" + +def _sdist_build(packages): + lines = [ + 'package(default_visibility = ["//visibility:public"])', + "", + ] + for pkg in packages: + package_key = pkg["key"] + lines.extend([ + "alias(", + ' name = "{}",'.format(package_key), + ' actual = "//_lock:_sdist_{}",'.format(package_key), + ")", + "", + ]) + + return "\n".join(lines) + "\n" + +def _root_build(pins): + lines = [ + 'package(default_visibility = ["//visibility:public"])', + "", + 'exports_files(["defs.bzl", "requirements.bzl"])', + "", + ] + + for pin_name, pin_target in pins.items(): + lines.extend([ + "alias(", + ' name = "{}",'.format(pin_name), + ' actual = "//_lock:{}",'.format(pin_target), + ")", + "", + ]) + + return "\n".join(lines) + "\n" + +_requirement_func = """\ +def requirement(pkg): + # Convert given name into normalized package name. + # https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization + pkg = pkg.replace("_", "-").replace(".", "-").lower() + for i in range(len(pkg)): + if "--" in pkg: + pkg = pkg.replace("--", "-") + else: + break + return "@@{repo_name}//:%s" % pkg +""" + +def _requirements_bzl(rctx, pins): + lines = [ + _requirement_func.format(repo_name = rctx.name), + "", + "# All pinned requirements", + "all_requirements = [", + ] + for pin in pins: + lines.append(' "@@{repo_name}//:{pin}",'.format(repo_name = rctx.name, pin = pin)) + lines.append("]") + lines.extend([ + "", + "# All wheel requirements", + "all_whl_requirements = [", + ]) + for pin in pins.values(): + lines.append(' "@@{repo_name}//_wheel:{pin}",'.format(repo_name = rctx.name, pin = pin)) + lines.append("]") + + return "\n".join(lines) + "\n" + +def _generate_lock_bzl(rctx, lock_json_path, lock_bzl_path): + args = [ + "--pycross-repo-name", + "@rules_pycross", + "--no-pins", + "--repo-prefix", + rctx.attr.name.lower().replace("-", "_"), + "--resolved-lock", + lock_json_path, + "--output", + lock_bzl_path, + ] + handle_create_repos_attrs(rctx.attr) + + for file_key, label in rctx.attr.repo_map.items(): + args.extend(["--repo", file_key, label]) + + exec_internal_tool( + rctx, + Label("//pycross/private/tools:resolved_lock_renderer.py"), + args, + ) + +def _package_repo_impl(rctx): + # To ensure that none of the extra files and directories in the root conflict with actual packages, they all + # either contain a period or start with an underscore. This works because Python package names cannot start + # with `_`, and any periods in the name would be replaced with `-` during name normalization. Theoretically + # https://pypi.org/project/workspace/ would conflict with the repo's WORKSPACE file on a case-insensitive + # filesystem, so we instead write WORKSPACE.bazel. A package named `WORKSPACE.bazel` would be normalized to + # `workspace-bazel`. + + lock_json_path = rctx.path(rctx.attr.resolved_lock_file) + lock_bzl_path = rctx.path("_lock/lock.bzl") + + lock = json.decode(rctx.read(lock_json_path)) + packages = lock["packages"].values() + + rctx.file("WORKSPACE.bazel", _workspace.format(repo_name = rctx.name)) + rctx.file("_lock/BUILD.bazel", _lock_build) + rctx.file("_sdist/BUILD.bazel", _sdist_build(packages)) + rctx.file("_wheel/BUILD.bazel", _wheel_build(packages)) + + if rctx.attr.write_install_deps: + rctx.file("defs.bzl", _install_deps_bzl) + else: + rctx.file("defs.bzl") # Empty file + + rctx.file("requirements.bzl", _requirements_bzl(rctx, lock["pins"])) + + _generate_lock_bzl(rctx, lock_json_path, lock_bzl_path) + + for pin, pin_target in lock["pins"].items(): + package = lock["packages"][pin_target] + rctx.file(paths.join(pin, "BUILD.bazel"), _pin_build(package)) + + rctx.file("BUILD.bazel", _root_build(lock["pins"])) + +package_repo = repository_rule( + implementation = _package_repo_impl, + attrs = dict( + resolved_lock_file = attr.label(mandatory = True), + repo_map = attr.string_dict(), + write_install_deps = attr.bool(), + ) | CREATE_REPOS_ATTRS, +) diff --git a/python/private/pycross_staging/private/pdm.lock b/python/private/pycross_staging/private/pdm.lock new file mode 120000 index 0000000000..c30b80d010 --- /dev/null +++ b/python/private/pycross_staging/private/pdm.lock @@ -0,0 +1 @@ +../../pdm.lock \ No newline at end of file diff --git a/python/private/pycross_staging/private/pdm_lock_model.bzl b/python/private/pycross_staging/private/pdm_lock_model.bzl new file mode 100644 index 0000000000..bd6ee62b43 --- /dev/null +++ b/python/private/pycross_staging/private/pdm_lock_model.bzl @@ -0,0 +1,110 @@ +"""Implementation of the pycross_pdm_lock_model rule.""" + +load(":internal_repo.bzl", "exec_internal_tool") +load(":lock_attrs.bzl", "PDM_IMPORT_ATTRS") + +TRANSLATOR_TOOL = Label("//pycross/private/tools:pdm_translator.py") + +def _handle_args(attrs, project_file, lock_file, output): + args = [] + args.extend(["--project-file", project_file]) + args.extend(["--lock-file", lock_file]) + args.extend(["--output", output]) + + if attrs.default: + args.append("--default") + + for group in attrs.optional_groups: + args.extend(["--optional-group", group]) + + if attrs.all_optional_groups: + args.append("--all-optional-groups") + + for group in attrs.development_groups: + args.extend(["--development-group", group]) + + if attrs.all_development_groups: + args.append("--all-development-groups") + + if attrs.require_static_urls: + args.append("--require-static-urls") + + return args + +def _pycross_pdm_lock_model_impl(ctx): + out = ctx.actions.declare_file(ctx.attr.name + ".json") + + args = ctx.actions.args().use_param_file("--flagfile=%s") + args.add_all( + _handle_args( + ctx.attr, + ctx.file.project_file.path, + ctx.file.lock_file.path, + out.path, + ), + ) + + ctx.actions.run( + inputs = ( + ctx.files.project_file + + ctx.files.lock_file + ), + outputs = [out], + executable = ctx.executable._tool, + arguments = [args], + ) + + return [ + DefaultInfo( + files = depset([out]), + ), + ] + +pycross_pdm_lock_model = rule( + implementation = _pycross_pdm_lock_model_impl, + attrs = { + "_tool": attr.label( + default = Label("//pycross/private/tools:pdm_translator"), + cfg = "exec", + executable = True, + ), + } | PDM_IMPORT_ATTRS, +) + +def lock_repo_model_pdm(*, project_file, lock_file, default = True, optional_groups = [], all_optional_groups = False, development_groups = [], all_development_groups = False, require_static_urls = True): + return json.encode(dict( + model_type = "pdm", + project_file = str(project_file), + lock_file = str(lock_file), + default = default, + optional_groups = optional_groups, + all_optional_groups = all_optional_groups, + development_groups = development_groups, + all_development_groups = all_development_groups, + require_static_urls = require_static_urls, + )) + +def repo_create_pdm_model(rctx, params, output): + """Run the pdm lock translator. + + Args: + rctx: The repository_ctx or module_ctx object. + params: a struct or dict containing the same attrs as the pycross_pdm_lock_model rule. + output: the output file. + """ + if type(params) == "dict": + attrs = struct(**params) + else: + attrs = params + args = _handle_args( + attrs, + str(rctx.path(Label(attrs.project_file))), + str(rctx.path(Label(attrs.lock_file))), + output, + ) + + exec_internal_tool( + rctx, + TRANSLATOR_TOOL, + args, + ) diff --git a/python/private/pycross_staging/private/poetry_lock_model.bzl b/python/private/pycross_staging/private/poetry_lock_model.bzl new file mode 100644 index 0000000000..ffda441ed5 --- /dev/null +++ b/python/private/pycross_staging/private/poetry_lock_model.bzl @@ -0,0 +1,75 @@ +"""Implementation of the pycross_poetry_lock_model rule.""" + +load(":internal_repo.bzl", "exec_internal_tool") +load(":lock_attrs.bzl", "POETRY_IMPORT_ATTRS") + +TRANSLATOR_TOOL = Label("//pycross/private/tools:poetry_translator.py") + +def _pycross_poetry_lock_model_impl(ctx): + out = ctx.actions.declare_file(ctx.attr.name + ".json") + + args = ctx.actions.args().use_param_file("--flagfile=%s") + args.add("--project-file", ctx.file.project_file) + args.add("--lock-file", ctx.file.lock_file) + args.add("--output", out) + + ctx.actions.run( + inputs = ( + ctx.files.project_file + + ctx.files.lock_file + ), + outputs = [out], + executable = ctx.executable._tool, + arguments = [args], + ) + + return [ + DefaultInfo( + files = depset([out]), + ), + ] + +pycross_poetry_lock_model = rule( + implementation = _pycross_poetry_lock_model_impl, + attrs = { + "_tool": attr.label( + default = Label("//pycross/private/tools:poetry_translator"), + cfg = "exec", + executable = True, + ), + } | POETRY_IMPORT_ATTRS, +) + +def lock_repo_model_poetry(*, project_file, lock_file): + return json.encode(dict( + model_type = "poetry", + project_file = str(project_file), + lock_file = str(lock_file), + )) + +def repo_create_poetry_model(rctx, params, output): + """Run the poetry lock translator. + + Args: + rctx: The repository_ctx or module_ctx object. + params: a struct or dict containing the same attrs as the pycross_poetry_lock_model rule. + output: the output file. + """ + if type(params) == "dict": + attrs = struct(**params) + else: + attrs = params + args = [ + "--project-file", + str(rctx.path(Label(attrs.project_file))), + "--lock-file", + str(rctx.path(Label(attrs.lock_file))), + "--output", + output, + ] + + exec_internal_tool( + rctx, + TRANSLATOR_TOOL, + args, + ) diff --git a/python/private/pycross_staging/private/providers.bzl b/python/private/pycross_staging/private/providers.bzl new file mode 100644 index 0000000000..842adfdbd1 --- /dev/null +++ b/python/private/pycross_staging/private/providers.bzl @@ -0,0 +1,9 @@ +"""Pycross providers.""" + +PycrossWheelInfo = provider( + doc = "Information about a Python wheel.", + fields = { + "name_file": "File: A file containing the canonical name of the wheel.", + "wheel_file": "File: The wheel file itself.", + }, +) diff --git a/python/private/pycross_staging/private/pycross_deps.lock.bzl b/python/private/pycross_staging/private/pycross_deps.lock.bzl new file mode 100644 index 0000000000..0229b8608d --- /dev/null +++ b/python/private/pycross_staging/private/pycross_deps.lock.bzl @@ -0,0 +1,393 @@ +# This file is generated by rules_pycross. +# It is not intended for manual editing. +"""Pycross-generated dependency targets.""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") +load("//pycross:defs.bzl", "pycross_wheel_library") + +PINS = { + "build": "build@1.0.3", + "dacite": "dacite@1.6.0", + "installer": "installer@0.7.0", + "packaging": "packaging@23.2", + "pip": "pip@23.3.1", + "poetry-core": "poetry-core@1.8.1", + "repairwheel": "repairwheel@0.3.1", + "tomli": "tomli@2.0.1", +} + +# buildifier: disable=unnamed-macro +def targets(): + """Generated package targets.""" + + for pin_name, pin_target in PINS.items(): + native.alias( + name = pin_name, + actual = ":" + pin_target, + ) + + native.config_setting( + name = "_env_rules_pycross_deps_target_env", + ) + + # buildifier: disable=unused-variable + _target = select({ + ":_env_rules_pycross_deps_target_env": "//pycross/private:rules_pycross_deps_target_env", + }) + + native.alias( + name = "_wheel_altgraph@0.17.4", + actual = "@rules_pycross_internal_deps_wheel_altgraph_0.17.4_py2.py3_none_any//file", + ) + + pycross_wheel_library( + name = "altgraph@0.17.4", + wheel = ":_wheel_altgraph@0.17.4", + ) + + _build_1_0_3_deps = [ + ":importlib-metadata@7.1.0", + ":packaging@23.2", + ":pyproject-hooks@1.1.0", + ":tomli@2.0.1", + ] + + native.alias( + name = "_wheel_build@1.0.3", + actual = "@rules_pycross_internal_deps_wheel_build_1.0.3_py3_none_any//file", + ) + + pycross_wheel_library( + name = "build@1.0.3", + deps = _build_1_0_3_deps, + wheel = ":_wheel_build@1.0.3", + ) + + native.alias( + name = "_wheel_dacite@1.6.0", + actual = "@rules_pycross_internal_deps_wheel_dacite_1.6.0_py3_none_any//file", + ) + + pycross_wheel_library( + name = "dacite@1.6.0", + wheel = ":_wheel_dacite@1.6.0", + ) + + _delvewheel_1_6_0_deps = [ + ":pefile@2023.2.7", + ] + + native.alias( + name = "_wheel_delvewheel@1.6.0", + actual = "@rules_pycross_internal_deps_wheel_delvewheel_1.6.0_py3_none_any//file", + ) + + pycross_wheel_library( + name = "delvewheel@1.6.0", + deps = _delvewheel_1_6_0_deps, + wheel = ":_wheel_delvewheel@1.6.0", + ) + + _importlib_metadata_7_1_0_deps = [ + ":zipp@3.19.2", + ] + + native.alias( + name = "_wheel_importlib-metadata@7.1.0", + actual = "@rules_pycross_internal_deps_wheel_importlib_metadata_7.1.0_py3_none_any//file", + ) + + pycross_wheel_library( + name = "importlib-metadata@7.1.0", + deps = _importlib_metadata_7_1_0_deps, + wheel = ":_wheel_importlib-metadata@7.1.0", + ) + + native.alias( + name = "_wheel_installer@0.7.0", + actual = "@rules_pycross_internal_deps_wheel_installer_0.7.0_py3_none_any//file", + ) + + pycross_wheel_library( + name = "installer@0.7.0", + wheel = ":_wheel_installer@0.7.0", + ) + + _macholib_1_16_3_deps = [ + ":altgraph@0.17.4", + ] + + native.alias( + name = "_wheel_macholib@1.16.3", + actual = "@rules_pycross_internal_deps_wheel_macholib_1.16.3_py2.py3_none_any//file", + ) + + pycross_wheel_library( + name = "macholib@1.16.3", + deps = _macholib_1_16_3_deps, + wheel = ":_wheel_macholib@1.16.3", + ) + + native.alias( + name = "_wheel_packaging@23.2", + actual = "@rules_pycross_internal_deps_wheel_packaging_23.2_py3_none_any//file", + ) + + pycross_wheel_library( + name = "packaging@23.2", + wheel = ":_wheel_packaging@23.2", + ) + + native.alias( + name = "_wheel_pefile@2023.2.7", + actual = "@rules_pycross_internal_deps_wheel_pefile_2023.2.7_py3_none_any//file", + ) + + pycross_wheel_library( + name = "pefile@2023.2.7", + wheel = ":_wheel_pefile@2023.2.7", + ) + + native.alias( + name = "_wheel_pip@23.3.1", + actual = "@rules_pycross_internal_deps_wheel_pip_23.3.1_py3_none_any//file", + ) + + pycross_wheel_library( + name = "pip@23.3.1", + wheel = ":_wheel_pip@23.3.1", + ) + + native.alias( + name = "_wheel_poetry-core@1.8.1", + actual = "@rules_pycross_internal_deps_wheel_poetry_core_1.8.1_py3_none_any//file", + ) + + pycross_wheel_library( + name = "poetry-core@1.8.1", + wheel = ":_wheel_poetry-core@1.8.1", + ) + + native.alias( + name = "_wheel_pyelftools@0.31", + actual = "@rules_pycross_internal_deps_wheel_pyelftools_0.31_py3_none_any//file", + ) + + pycross_wheel_library( + name = "pyelftools@0.31", + wheel = ":_wheel_pyelftools@0.31", + ) + + native.alias( + name = "_wheel_pyproject-hooks@1.1.0", + actual = "@rules_pycross_internal_deps_wheel_pyproject_hooks_1.1.0_py3_none_any//file", + ) + + pycross_wheel_library( + name = "pyproject-hooks@1.1.0", + wheel = ":_wheel_pyproject-hooks@1.1.0", + ) + + _repairwheel_0_3_1_deps = [ + ":delvewheel@1.6.0", + ":macholib@1.16.3", + ":packaging@23.2", + ":pefile@2023.2.7", + ":pyelftools@0.31", + ] + + native.alias( + name = "_wheel_repairwheel@0.3.1", + actual = "@rules_pycross_internal_deps_wheel_repairwheel_0.3.1_py3_none_any//file", + ) + + pycross_wheel_library( + name = "repairwheel@0.3.1", + deps = _repairwheel_0_3_1_deps, + wheel = ":_wheel_repairwheel@0.3.1", + ) + + native.alias( + name = "_wheel_tomli@2.0.1", + actual = "@rules_pycross_internal_deps_wheel_tomli_2.0.1_py3_none_any//file", + ) + + pycross_wheel_library( + name = "tomli@2.0.1", + wheel = ":_wheel_tomli@2.0.1", + ) + + native.alias( + name = "_wheel_zipp@3.19.2", + actual = "@rules_pycross_internal_deps_wheel_zipp_3.19.2_py3_none_any//file", + ) + + pycross_wheel_library( + name = "zipp@3.19.2", + wheel = ":_wheel_zipp@3.19.2", + ) + +# buildifier: disable=unnamed-macro +def repositories(): + """Generated package repositories.""" + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_altgraph_0.17.4_py2.py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/4d/3f/3bc3f1d83f6e4a7fcb834d3720544ca597590425be5ba9db032b2bf322a2/altgraph-0.17.4-py2.py3-none-any.whl", + ], + sha256 = "642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff", + downloaded_file_path = "altgraph-0.17.4-py2.py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_build_1.0.3_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/93/dd/b464b728b866aaa62785a609e0dd8c72201d62c5f7c53e7c20f4dceb085f/build-1.0.3-py3-none-any.whl", + ], + sha256 = "589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f", + downloaded_file_path = "build-1.0.3-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_dacite_1.6.0_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/06/9d/11a073172d889e9e0d0ad270a1b468876c82d759af7864a8095dfc73f46d/dacite-1.6.0-py3-none-any.whl", + ], + sha256 = "4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f", + downloaded_file_path = "dacite-1.6.0-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_delvewheel_1.6.0_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/24/f7/35e5657954452f6c221969e67a7c69433b1f230dc076cf49e5a9186a7fd4/delvewheel-1.6.0-py3-none-any.whl", + ], + sha256 = "15be02e749caacafdd51c283175a041a3f467484a1a96fc2d36340ced6869bff", + downloaded_file_path = "delvewheel-1.6.0-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_importlib_metadata_7.1.0_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/2d/0a/679461c511447ffaf176567d5c496d1de27cbe34a87df6677d7171b2fbd4/importlib_metadata-7.1.0-py3-none-any.whl", + ], + sha256 = "30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570", + downloaded_file_path = "importlib_metadata-7.1.0-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_installer_0.7.0_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/e5/ca/1172b6638d52f2d6caa2dd262ec4c811ba59eee96d54a7701930726bce18/installer-0.7.0-py3-none-any.whl", + ], + sha256 = "05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53", + downloaded_file_path = "installer-0.7.0-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_macholib_1.16.3_py2.py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/d1/5d/c059c180c84f7962db0aeae7c3b9303ed1d73d76f2bfbc32bc231c8be314/macholib-1.16.3-py2.py3-none-any.whl", + ], + sha256 = "0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c", + downloaded_file_path = "macholib-1.16.3-py2.py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_packaging_23.2_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", + ], + sha256 = "8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", + downloaded_file_path = "packaging-23.2-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_pefile_2023.2.7_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/55/26/d0ad8b448476d0a1e8d3ea5622dc77b916db84c6aa3cb1e1c0965af948fc/pefile-2023.2.7-py3-none-any.whl", + ], + sha256 = "da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6", + downloaded_file_path = "pefile-2023.2.7-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_pip_23.3.1_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/47/6a/453160888fab7c6a432a6e25f8afe6256d0d9f2cbd25971021da6491d899/pip-23.3.1-py3-none-any.whl", + ], + sha256 = "55eb67bb6171d37447e82213be585b75fe2b12b359e993773aca4de9247a052b", + downloaded_file_path = "pip-23.3.1-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_poetry_core_1.8.1_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/99/bc/058b8ff87871fce6615ad032d62c773272f243266b110f7b86d146cf78d8/poetry_core-1.8.1-py3-none-any.whl", + ], + sha256 = "194832b24f3283e01c5402eae71a6aae850ecdfe53f50a979c76bf7aa5010ffa", + downloaded_file_path = "poetry_core-1.8.1-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_pyelftools_0.31_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/f8/64/711030d9fe9ccaf6ee3ab1bcf4801c6bb3d0e585af18824a50b016b4f39c/pyelftools-0.31-py3-none-any.whl", + ], + sha256 = "f52de7b3c7e8c64c8abc04a79a1cf37ac5fb0b8a49809827130b858944840607", + downloaded_file_path = "pyelftools-0.31-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_pyproject_hooks_1.1.0_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/ae/f3/431b9d5fe7d14af7a32340792ef43b8a714e7726f1d7b69cc4e8e7a3f1d7/pyproject_hooks-1.1.0-py3-none-any.whl", + ], + sha256 = "7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2", + downloaded_file_path = "pyproject_hooks-1.1.0-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_repairwheel_0.3.1_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/9a/c4/a22f2a9807661d4f9375a46738695c6f2441821b78c09d3943c93875267a/repairwheel-0.3.1-py3-none-any.whl", + ], + sha256 = "d42b00b4ce9a2a398771ddd9a554cf58390e2a982d2b1376384970b2651f3688", + downloaded_file_path = "repairwheel-0.3.1-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_tomli_2.0.1_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", + ], + sha256 = "939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + downloaded_file_path = "tomli-2.0.1-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_zipp_3.19.2_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/20/38/f5c473fe9b90c8debdd29ea68d5add0289f1936d6f923b6b9cc0b931194c/zipp-3.19.2-py3-none-any.whl", + ], + sha256 = "f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c", + downloaded_file_path = "zipp-3.19.2-py3-none-any.whl", + ) diff --git a/python/private/pycross_staging/private/pycross_deps_core.lock.bzl b/python/private/pycross_staging/private/pycross_deps_core.lock.bzl new file mode 100644 index 0000000000..d36baa72bc --- /dev/null +++ b/python/private/pycross_staging/private/pycross_deps_core.lock.bzl @@ -0,0 +1,168 @@ +# This file is generated by rules_pycross. +# It is not intended for manual editing. +"""Pycross-generated dependency targets.""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") +load("//pycross:defs.bzl", "pycross_wheel_library") + +PINS = { + "dacite": "dacite@1.6.0", + "installer": "installer@0.7.0", + "packaging": "packaging@23.2", + "pip": "pip@23.3.1", + "poetry-core": "poetry-core@1.8.1", + "tomli": "tomli@2.0.1", +} + +FILES = { + "dacite-1.6.0-py3-none-any.whl": Label("@rules_pycross_internal_deps_wheel_dacite_1.6.0_py3_none_any//file:dacite-1.6.0-py3-none-any.whl"), + "installer-0.7.0-py3-none-any.whl": Label("@rules_pycross_internal_deps_wheel_installer_0.7.0_py3_none_any//file:installer-0.7.0-py3-none-any.whl"), + "packaging-23.2-py3-none-any.whl": Label("@rules_pycross_internal_deps_wheel_packaging_23.2_py3_none_any//file:packaging-23.2-py3-none-any.whl"), + "pip-23.3.1-py3-none-any.whl": Label("@rules_pycross_internal_deps_wheel_pip_23.3.1_py3_none_any//file:pip-23.3.1-py3-none-any.whl"), + "poetry_core-1.8.1-py3-none-any.whl": Label("@rules_pycross_internal_deps_wheel_poetry_core_1.8.1_py3_none_any//file:poetry_core-1.8.1-py3-none-any.whl"), + "tomli-2.0.1-py3-none-any.whl": Label("@rules_pycross_internal_deps_wheel_tomli_2.0.1_py3_none_any//file:tomli-2.0.1-py3-none-any.whl"), +} + +# buildifier: disable=unnamed-macro +def targets(): + """Generated package targets.""" + + for pin_name, pin_target in PINS.items(): + native.alias( + name = pin_name, + actual = ":" + pin_target, + ) + + native.config_setting( + name = "_env_rules_pycross_deps_target_env", + ) + + # buildifier: disable=unused-variable + _target = select({ + ":_env_rules_pycross_deps_target_env": "//pycross/private:rules_pycross_deps_target_env", + }) + + native.alias( + name = "_wheel_dacite@1.6.0", + actual = "@rules_pycross_internal_deps_wheel_dacite_1.6.0_py3_none_any//file", + ) + + pycross_wheel_library( + name = "dacite@1.6.0", + wheel = ":_wheel_dacite@1.6.0", + ) + + native.alias( + name = "_wheel_installer@0.7.0", + actual = "@rules_pycross_internal_deps_wheel_installer_0.7.0_py3_none_any//file", + ) + + pycross_wheel_library( + name = "installer@0.7.0", + wheel = ":_wheel_installer@0.7.0", + ) + + native.alias( + name = "_wheel_packaging@23.2", + actual = "@rules_pycross_internal_deps_wheel_packaging_23.2_py3_none_any//file", + ) + + pycross_wheel_library( + name = "packaging@23.2", + wheel = ":_wheel_packaging@23.2", + ) + + native.alias( + name = "_wheel_pip@23.3.1", + actual = "@rules_pycross_internal_deps_wheel_pip_23.3.1_py3_none_any//file", + ) + + pycross_wheel_library( + name = "pip@23.3.1", + wheel = ":_wheel_pip@23.3.1", + ) + + native.alias( + name = "_wheel_poetry-core@1.8.1", + actual = "@rules_pycross_internal_deps_wheel_poetry_core_1.8.1_py3_none_any//file", + ) + + pycross_wheel_library( + name = "poetry-core@1.8.1", + wheel = ":_wheel_poetry-core@1.8.1", + ) + + native.alias( + name = "_wheel_tomli@2.0.1", + actual = "@rules_pycross_internal_deps_wheel_tomli_2.0.1_py3_none_any//file", + ) + + pycross_wheel_library( + name = "tomli@2.0.1", + wheel = ":_wheel_tomli@2.0.1", + ) + +# buildifier: disable=unnamed-macro +def repositories(): + """Generated package repositories.""" + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_dacite_1.6.0_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/06/9d/11a073172d889e9e0d0ad270a1b468876c82d759af7864a8095dfc73f46d/dacite-1.6.0-py3-none-any.whl", + ], + sha256 = "4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f", + downloaded_file_path = "dacite-1.6.0-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_installer_0.7.0_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/e5/ca/1172b6638d52f2d6caa2dd262ec4c811ba59eee96d54a7701930726bce18/installer-0.7.0-py3-none-any.whl", + ], + sha256 = "05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53", + downloaded_file_path = "installer-0.7.0-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_packaging_23.2_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", + ], + sha256 = "8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", + downloaded_file_path = "packaging-23.2-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_pip_23.3.1_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/47/6a/453160888fab7c6a432a6e25f8afe6256d0d9f2cbd25971021da6491d899/pip-23.3.1-py3-none-any.whl", + ], + sha256 = "55eb67bb6171d37447e82213be585b75fe2b12b359e993773aca4de9247a052b", + downloaded_file_path = "pip-23.3.1-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_poetry_core_1.8.1_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/99/bc/058b8ff87871fce6615ad032d62c773272f243266b110f7b86d146cf78d8/poetry_core-1.8.1-py3-none-any.whl", + ], + sha256 = "194832b24f3283e01c5402eae71a6aae850ecdfe53f50a979c76bf7aa5010ffa", + downloaded_file_path = "poetry_core-1.8.1-py3-none-any.whl", + ) + + maybe( + http_file, + name = "rules_pycross_internal_deps_wheel_tomli_2.0.1_py3_none_any", + urls = [ + "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", + ], + sha256 = "939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + downloaded_file_path = "tomli-2.0.1-py3-none-any.whl", + ) diff --git a/python/private/pycross_staging/private/pypi_file.bzl b/python/private/pycross_staging/private/pypi_file.bzl new file mode 100644 index 0000000000..39f3f55c0f --- /dev/null +++ b/python/private/pycross_staging/private/pypi_file.bzl @@ -0,0 +1,90 @@ +"""Rule to download files from pypi.""" + +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "update_attrs") + +_PYPI_FILE_BUILD = """\ +package(default_visibility = ["//visibility:public"]) +filegroup( + name = "file", + srcs = ["{}"], +) +""" + +def _pypi_file_impl(ctx): + """Implementation of the pypi_file rule.""" + + index_url = ctx.attr.index + if not index_url.endswith("/"): + index_url = index_url + "/" + + index_url += "pypi/{}/{}/json".format( + ctx.attr.package_name, + ctx.attr.package_version, + ) + + ctx.download( + index_url, + "pypi_metadata.json", + ) + metadata = json.decode(ctx.read("pypi_metadata.json")) + + if not ctx.attr.keep_metadata: + ctx.delete("pypi_metadata.json") + + release_files = metadata.get("urls", []) + url = None + for release_file in release_files: + if release_file["filename"] == ctx.attr.filename: + url = release_file["url"] + break + + if not url: + fail( + "File {} does not exist for version {} of package {} in index {}".format( + ctx.attr.filename, + ctx.attr.package_version, + ctx.attr.package_name, + ctx.attr.index, + ), + ) + + download_info = ctx.download( + url, + "file/" + ctx.attr.filename, + ctx.attr.sha256, + ) + ctx.file("file/BUILD.bazel", _PYPI_FILE_BUILD.format(ctx.attr.filename)) + + return update_attrs(ctx.attr, _pypi_file_attrs.keys(), {"sha256": download_info.sha256}) + +_pypi_file_attrs = { + "filename": attr.string( + doc = "The name of the file to download.", + mandatory = True, + ), + "index": attr.string( + doc = "The base URL of the PyPI-compatible package index to use. Defaults to pypi.org.", + default = "https://pypi.org", + ), + "keep_metadata": attr.bool( + doc = "Whether to store the pypi_metadata.json file for debugging.", + ), + "package_name": attr.string( + doc = "The package name.", + mandatory = True, + ), + "package_version": attr.string( + doc = "The package version.", + mandatory = True, + ), + "sha256": attr.string( + doc = "The expected SHA-256 of the file downloaded.", + mandatory = True, + ), +} + +pypi_file = repository_rule( + implementation = _pypi_file_impl, + attrs = _pypi_file_attrs, + doc = "Downloads a file from a PyPI-compatible package index.", +) diff --git a/python/private/pycross_staging/private/pyproject.toml b/python/private/pycross_staging/private/pyproject.toml new file mode 120000 index 0000000000..00c904eb84 --- /dev/null +++ b/python/private/pycross_staging/private/pyproject.toml @@ -0,0 +1 @@ +../../pyproject.toml \ No newline at end of file diff --git a/python/private/pycross_staging/private/repo_venv_utils.bzl b/python/private/pycross_staging/private/repo_venv_utils.bzl new file mode 100644 index 0000000000..16cbdea089 --- /dev/null +++ b/python/private/pycross_staging/private/repo_venv_utils.bzl @@ -0,0 +1,140 @@ +"""Repo rule tools for creating/managing venvs.""" + +load("@bazel_skylib//lib:paths.bzl", "paths") + +def get_venv_site_path(venv_path): + """ + Find and return the site-packages path under venv_path. + + Args: + venv_path: the virtual env path. + + Returns: + The site-packages path. + """ + + # First, try windows. + site_path = venv_path.get_child("Lib", "site-packages") + if site_path.exists: + return site_path + + # If that doesn't work, try posix: lib/python3.X/site-packages + lib_path = venv_path.get_child("lib") + if not lib_path.exists: + fail("Cannot find lib path") + + # We don't know the Python version, so we just find a directory that starts with "python". + python_path = None + for child in lib_path.readdir(): + if child.basename.startswith("python"): + python_path = child + if not python_path: + fail("Cannot find python path") + + site_path = python_path.get_child("site-packages") + if not site_path.exists: + fail("Cannot find site-packages path") + + return site_path + +def get_venv_python_executable(venv_path): + """ + Find and return the python executable under venv_path. + + Args: + venv_path: the virtual env path. + + Returns: + The python executable path. + """ + + # posix + python_exe = venv_path.get_child("bin", "python") + if python_exe.exists: + return python_exe + + # windows + python_exe = venv_path.get_child("Scripts", "python.exe") + if python_exe.exists: + return python_exe + + fail("Unable to find the python executable") + +_venv_build = """\ +package(default_visibility = ["//visibility:public"]) + +alias( + name = "python", + actual = "{venv_python_exe}", +) +""" + +def create_venv(rctx, python_executable, venv_path, path_entries = []): + """ + Create a virtual environment. + + The environment will have a BUILD file with a `python` target pointing to the python executable. + + Args: + rctx: the repository_context. + python_executable: the python_executable to use. + venv_path: the path to the environment to create, relative to the repository. + path_entries: optional list of PYTHONPATH entries to add. + + Returns: + A struct containing + python_executable: the path to the python executable within the environment + site_path: the path to the `site-packages` directory + """ + venv_path = rctx.path(venv_path) + venv_args = [ + str(python_executable), + "-m", + "venv", + "--without-pip", + str(venv_path), + ] + result = rctx.execute(venv_args) + if result.return_code: + fail("venv creation exited with {}".format(result.return_code())) + + if not venv_path.exists: + fail("Failed to create virtual environment.") + + exe_path = get_venv_python_executable(venv_path) + site_path = get_venv_site_path(venv_path) + + if path_entries: + pth_text = "\n".join([str(entry) for entry in path_entries]) + "\n" + rctx.file(site_path.get_child("path.pth"), pth_text) + + relative_exe_path = paths.relativize(str(exe_path), str(venv_path)) + rctx.file(venv_path.get_child("BUILD.bazel"), _venv_build.format(venv_python_exe = relative_exe_path)) + + return struct( + python_executable = exe_path, + site_path = site_path, + ) + +def install_venv_wheels(rctx, venv_path, pip_whl, wheels): + """ + Install wheels into the virtual env. + + Args: + rctx: the repository_context. + venv_path: the path to the environment to create, relative to the repository. + pip_whl: the path to a `pip` wheel used to install other wheels. + wheels: the wheels to install. + """ + venv_path = rctx.path(venv_path) + python_exe = get_venv_python_executable(venv_path) + env = dict(PYTHONPATH = str(rctx.path(pip_whl))) + wheel_paths = [str(rctx.path(wheel)) for wheel in wheels] + result = rctx.execute([ + str(python_exe), + "-m", + "pip", + "install", + ] + wheel_paths, environment = env) + if result.return_code: + fail("wheel install failed: {}".format(result.stderr)) diff --git a/python/private/pycross_staging/private/resolved_lock_repo.bzl b/python/private/pycross_staging/private/resolved_lock_repo.bzl new file mode 100644 index 0000000000..c94b6b6121 --- /dev/null +++ b/python/private/pycross_staging/private/resolved_lock_repo.bzl @@ -0,0 +1,77 @@ +"""Implementation of the resolved_lock_repo rule. + +`resolved_lock_repo` takes an importable third-party lock (PDM or Poetry) and: +1. runs that lock type's translator to generate a "raw" lock structure. +2. runs `raw_lock_resolver` to generate a resolved lock structure. + +The output of #2 is stored as `//:lock.json` and consumed by `package_repo`. +""" + +load(":internal_repo.bzl", "exec_internal_tool") +load(":lock_attrs.bzl", "RESOLVE_ATTRS", "handle_resolve_attrs") +load(":pdm_lock_model.bzl", "repo_create_pdm_model", PDM_TRANSLATOR_TOOL = "TRANSLATOR_TOOL") +load(":poetry_lock_model.bzl", "repo_create_poetry_model", POETRY_TRANSLATOR_TOOL = "TRANSLATOR_TOOL") + +_RESOLVER_TOOL = Label("//pycross/private/tools:raw_lock_resolver.py") + +_ROOT_BUILD = """\ +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "raw_lock.json", + "lock.json", +]) +""" + +def _generate_lock_model_file(rctx): + model_params = json.decode(rctx.attr.lock_model) + if model_params["model_type"] == "pdm": + repo_create_pdm_model(rctx, model_params, "raw_lock.json") + elif model_params["model_type"] == "poetry": + repo_create_poetry_model(rctx, model_params, "raw_lock.json") + else: + fail("Invalid model type: " + model_params["model_type"]) + +def _generate_annotations_file(rctx): + annotations = {p: json.decode(a) for p, a in rctx.attr.annotations.items()} + rctx.file("annotations.json", json.encode(annotations)) + +def _generate_lock_file(rctx): + environment_files_and_labels = [(rctx.path(t), str(t)) for t in rctx.attr.target_environments] + wheel_names_and_labels = [(rctx.path(local_wheel).basename, str(local_wheel)) for local_wheel in rctx.attr.local_wheels] + args = handle_resolve_attrs(rctx.attr, environment_files_and_labels, wheel_names_and_labels) + args.append("--always-include-sdist") + args.extend(["--lock-model-file", "raw_lock.json"]) + args.extend(["--annotations-file", "annotations.json"]) + args.extend(["--output", "lock.json"]) + + exec_internal_tool( + rctx, + _RESOLVER_TOOL, + args, + ) + +def _resolved_lock_repo_impl(rctx): + rctx.file(rctx.path("BUILD.bazel"), _ROOT_BUILD) + rctx.report_progress("Generating raw_lock.json") + _generate_lock_model_file(rctx) + rctx.report_progress("Generating annotations.json") + _generate_annotations_file(rctx) + rctx.report_progress("Generating lock.json") + _generate_lock_file(rctx) + rctx.report_progress() + +resolved_lock_repo = repository_rule( + implementation = _resolved_lock_repo_impl, + attrs = dict( + lock_model = attr.string( + mandatory = True, + ), + # For pre-pathifying labels + _tools = attr.label_list(default = [ + _RESOLVER_TOOL, + PDM_TRANSLATOR_TOOL, + POETRY_TRANSLATOR_TOOL, + ]), + ) | RESOLVE_ATTRS, +) diff --git a/python/private/pycross_staging/private/target_environment.bzl b/python/private/pycross_staging/private/target_environment.bzl new file mode 100644 index 0000000000..e9cec07746 --- /dev/null +++ b/python/private/pycross_staging/private/target_environment.bzl @@ -0,0 +1,111 @@ +"""Implementation of the pycross_target_environment rule.""" + +load(":internal_repo.bzl", "exec_internal_tool") + +def _target_python_impl(ctx): + f = ctx.actions.declare_file(ctx.attr.name + ".json") + + args = ctx.actions.args().use_param_file("--flagfile=%s") + args.add("create") + args.add("--name", ctx.attr.name) + args.add("--output", f) + args.add("--implementation", ctx.attr.implementation) + args.add("--version", ctx.attr.version) + + for abi in ctx.attr.abis: + args.add("--abi", abi) + + for platform in ctx.attr.platforms: + args.add("--platform", platform) + + for constraint in ctx.attr.python_compatible_with: + args.add("--python-compatible-with", str(constraint.label)) + + for flag, value in ctx.attr.flag_values.items(): + args.add_all("--flag-value", [str(flag.label), value]) + + for key, val in ctx.attr.envornment_markers.items(): + args.add_all("--environment-marker", [key, val]) + + if ctx.attr.config_setting: + args.add("--config-setting-target", str(ctx.attr.config_setting.label)) + + ctx.actions.run( + outputs = [f], + executable = ctx.executable._tool, + arguments = [args], + ) + + return [ + DefaultInfo( + files = depset([f]), + ), + ] + +pycross_target_environment = rule( + implementation = _target_python_impl, + attrs = { + "abis": attr.string_list( + doc = "A list of PEP 425 abi tags. Defaults to ['none'].", + default = ["none"], + ), + "config_setting": attr.label( + doc = "Optional config_setting target to select this environment.", + ), + "envornment_markers": attr.string_dict( + doc = "Environment marker overrides.", + ), + "flag_values": attr.label_keyed_string_dict( + doc = ( + "A list of flag values that, when satisfied, indicates this " + + "target_platform should be selected (together with python_compatible_with)." + ), + ), + "implementation": attr.string( + doc = ( + "The PEP 425 implementation abbreviation. " + + "Defaults to 'cp' for CPython." + ), + default = "cp", + ), + "platforms": attr.string_list( + doc = "A list of PEP 425 platform tags. Defaults to ['any'].", + default = ["any"], + ), + "python_compatible_with": attr.label_list( + doc = ( + "A list of constraints that, when satisfied, indicates this " + + "target_platform should be selected (together with flag_values)." + ), + ), + "version": attr.string( + doc = "The python version.", + mandatory = True, + ), + "_tool": attr.label( + default = Label("//pycross/private/tools:target_environment_generator"), + cfg = "exec", + executable = True, + ), + }, +) + +def repo_batch_create_target_environments(rctx, env_settings_list): + """ + Create many target environment JSON files. + + Args: + rctx: repository_ctx + env_settings_list: a list of dicts containing fields described in target_environment_generator.py's Input. + """ + + env_file = rctx.path("_env_input.json") + rctx.file(env_file, json.encode(env_settings_list)) + + exec_internal_tool( + rctx, + Label("//pycross/private/tools:target_environment_generator.py"), + ["batch-create", "--input", str(env_file)], + ) + + rctx.delete(env_file) diff --git a/python/private/pycross_staging/private/toolchain_helpers.bzl b/python/private/pycross_staging/private/toolchain_helpers.bzl new file mode 100644 index 0000000000..403ea04be6 --- /dev/null +++ b/python/private/pycross_staging/private/toolchain_helpers.bzl @@ -0,0 +1,475 @@ +"""Helpers for creating Pycross environments and toolchains""" + +load("@rules_python//python:versions.bzl", "MINOR_MAPPING", "PLATFORMS", "TOOL_VERSIONS") +load(":lock_attrs.bzl", "DEFAULT_GLIBC_VERSION", "DEFAULT_MACOS_VERSION") +load(":target_environment.bzl", "repo_batch_create_target_environments") +load(":util.bzl", "BZLMOD") + +def _repo_label(repo_name, label): + if BZLMOD: + return "@@{}{}".format(repo_name, label) + else: + return "@{}{}".format(repo_name, label) + +def _get_micro_version(version): + if version in MINOR_MAPPING: + return MINOR_MAPPING[version] + elif version in TOOL_VERSIONS: + return version + + fail("Unknown Python version: {}".format(version)) + +def _get_version_components(version): + parts = version.split(".") + if len(parts) < 2: + fail("Invalid Python version; must be format X.Y or X.Y.Z: %s" % str(version)) + + return int(parts[0]), int(parts[1]) + +def _get_abi(version): + major, micro = _get_version_components(version) + return "cp{}{}".format(major, micro) + +def _get_env_platforms(py_platform, glibc_version, macos_version): + glibc_major, glibc_micro = _get_version_components(glibc_version) + macos_major, macos_micro = _get_version_components(macos_version) + + if macos_major < 10: + fail("macos version must be >= 10") + if (glibc_major, glibc_micro) < (2, 5) or (glibc_major, glibc_micro) >= (3, 0): + fail("glibc version must be >= 2.5 and < 3.0") + + platform_info = PLATFORMS[py_platform] + arch = platform_info.arch + if py_platform.endswith("linux-gnu"): + return ["linux_{}".format(arch)] + [ + "manylinux_2_{}_{}".format(i, arch) + for i in range(5, glibc_micro + 1) + ] + elif py_platform.endswith("darwin"): + return ["macosx_{}_{}_{}".format(macos_major, macos_micro, arch)] + elif py_platform.endswith("windows-msvc"): + return ["win_amd64"] + + fail("Unknown platform: {}".format(py_platform)) + +def _dedupe_versions(versions): + """Returns a list of versions deduped by resolved minor version.""" + + # E.g., if '3.10' and '3.10.6' are both passed, we only want '3.10.6'. Otherwise we'll run into + # ambiguous select() criteria. + unique_versions = {} + for version in sorted(versions): + micro_version = _get_micro_version(version) + + # In sorted order, 3.10.6 will override 3.10. + unique_versions[micro_version] = version + + return sorted(unique_versions.values()) + +def _canonical_prefix(python_toolchains_repo_name): + # We assume that python_toolchains_repo_name points to the `python_versions` repo + # that rules_python generates. From there, we strip of `python_versions` and return + # the remainder as the prefix. + if not python_toolchains_repo_name.endswith("python_versions"): + fail( + "Expected python_toolchains_repo_name to end with 'python_versions', " + + "but it does not: " + python_toolchains_repo_name, + ) + return python_toolchains_repo_name[:-len("python_versions")] + +def _compute_environments( + repo_name, + python_versions, + platforms, + glibc_version, + macos_version): + environments = [] + + if not platforms: + platforms = sorted(PLATFORMS.keys()) + + for version in _dedupe_versions(python_versions): + micro_version = _get_micro_version(version) + + version_info = TOOL_VERSIONS[micro_version] + available_version_platforms = version_info["sha256"].keys() + selected_platforms = [p for p in platforms if p in available_version_platforms] + + for target_platform in selected_platforms: + env_platforms = _get_env_platforms(target_platform, glibc_version, macos_version) + target_env_name = "python_{}_{}".format(version, target_platform) + target_env_json = target_env_name + ".json" + + config_setting_name = "{}_config".format(target_env_name) + environments.append( + dict( + name = target_env_name, + output = target_env_json, + implementation = "cp", + config_setting_name = config_setting_name, + config_setting_target = _repo_label(repo_name, "//:{}".format(config_setting_name)), + target_compatible_with = list(PLATFORMS[target_platform].compatible_with), + version = micro_version, + abis = [_get_abi(micro_version)], + platforms = env_platforms, + ), + ) + + return environments + +def _compute_toolchains( + python_toolchains_repo_name, + is_multi_version_layout, + python_versions, + platforms): + toolchains = [] + + if not platforms: + platforms = sorted(PLATFORMS.keys()) + + for version in _dedupe_versions(python_versions): + micro_version = _get_micro_version(version) + underscore_version = version.replace(".", "_") + + tc_provider_name = "python_{}".format(version) + tc_target_config_name = "{}_target_config".format(tc_provider_name) + tc_name = "{}_tc".format(tc_provider_name) + + if BZLMOD: + # With bzlmod we need to construct the canonical repository names for version interpreters. + runtime = "@@{}python_{}//:py3_runtime".format( + _canonical_prefix(python_toolchains_repo_name), + underscore_version, + ) + elif is_multi_version_layout: + # These other modes are WORKSPACE and should eventually be dropped. + runtime = "@{}_{}//:py3_runtime".format( + python_toolchains_repo_name, + underscore_version, + ) + else: + runtime = "@{}//:py3_runtime".format(python_toolchains_repo_name) + + toolchains.append( + dict( + name = tc_name, + provider_name = tc_provider_name, + target_config_name = tc_target_config_name, + runtime = runtime, + version = micro_version, + ), + ) + return toolchains + +def _is_multi_version_layout(rctx, python_toolchain_repo): + # Ideally we'd just check whether pip.bzl exists, but `path(Label())` + # unfortunately raises an exception. + repo_build_file = python_toolchain_repo.relative("//:BUILD.bazel") + repo_dir = rctx.path(repo_build_file).dirname + return repo_dir.get_child("pip.bzl").exists + +def _get_single_python_version(rctx, python_toolchain_repo): + defs_bzl_file = python_toolchain_repo.relative("//:defs.bzl") + content = rctx.read(defs_bzl_file) + for line in content.splitlines(): + if line.strip().startswith("python_version"): + # We found a line that is like `python_version = "3.11.6",` + # Split by the equal sign and get the version. + _, version_side = line.split("=") + quoted_version = version_side.strip(" ,") + version = quoted_version.strip("'\"") # strip quotes + return version + + fail("Unable to determine version from " + defs_bzl_file) + +def _get_multi_python_versions(rctx, python_toolchain_repo): + pip_bzl_file = python_toolchain_repo.relative("//:pip.bzl") + content = rctx.read(pip_bzl_file) + + versions = [] + for line in content.splitlines(): + if line.strip().startswith("python_versions"): + # We found a line that is like `python_versions = ["3.11.6", "3.12.0"],` + # Split by the equal sign and parse the array. + _, version_side = line.split("=") + version_list = version_side.strip(" ,") + version_list_contents = version_list.strip("[]") + quoted_versions = version_list_contents.split(",") + for version in quoted_versions: + version = version.strip() # strip whitespace + version = version.strip("'\"") # strip quotes + versions.append(version) + + break + + if not versions: + fail("Unable to determine versions from " + pip_bzl_file) + + return versions + +def _get_default_python_version_bzlmod(rctx, pythons_hub_repo): + interpreters_bzl_file = Label("@@{}//:interpreters.bzl".format(pythons_hub_repo.workspace_name)) + build_content = rctx.read(interpreters_bzl_file) + + for line in build_content.splitlines(): + if line.startswith("DEFAULT_PYTHON_VERSION"): + _, val = line.split("=") + val = val.strip(" \"'") + return val + + fail("Unable to determine default version for python hub repo '{}'".format(pythons_hub_repo)) + +def _get_default_python_version_workspace(rctx, python_toolchain_repo, versions): + # Figure out the default version + default_version = None + for version in versions: + underscore_version = version.replace(".", "_") + toolchain_bzl_file = Label("@{}_{}_toolchains//:BUILD.bazel".format(python_toolchain_repo.workspace_name, underscore_version)) + content = rctx.read(toolchain_bzl_file) + + if "py_toolchain_suite" in content: + # Handle rules_python 0.30+ + # Default version toolchains have set_python_version_constraint set to "False". + for line in content.lower().splitlines(): + if "set_python_version_constraint" in line: + if "false" in line: + default_version = version + break + if default_version: + break + + else: + # Handle rules_python versions prior to 0.30. + # Default version toolchains have empty target_settings lists. + if "target_settings" not in content or "target_settings = []" in content: + default_version = version + break + + if not default_version: + fail("Unable to determine default version for python toolchain repo '{}'".format(python_toolchain_repo)) + + return default_version + +# This requires the user to provide a `default_version` value. +_ENVIRONMENTS_BUILD_HEADER = """\ +load("{defs}", "pycross_target_environment") +load("{ver}", "rules_python_interpreter_version") + +package(default_visibility = ["//visibility:public"]) + +rules_python_interpreter_version( + name = "_interpreter_version", + default_version = "{{default_version}}", + visibility = ["//visibility:private"], +) +""".format( + defs = Label("//pycross:defs.bzl"), + ver = Label("//pycross/private:interpreter_version.bzl"), +) + +# This requires the user to provide a `default_version` value. +_TOOLCHAINS_BUILD_HEADER = """\ +load("{toolchain}", "pycross_hermetic_toolchain") +load("{ver}", "rules_python_interpreter_version") + +package(default_visibility = ["//visibility:public"]) + +rules_python_interpreter_version( + name = "_interpreter_version", + default_version = "{{default_version}}", + visibility = ["//visibility:private"], +) +""".format( + toolchain = Label("//pycross:toolchain.bzl"), + ver = Label("//pycross/private:interpreter_version.bzl"), +) + +_ENVIRONMENT_TEMPLATE = """\ +config_setting( + name = {config_setting_name}, + constraint_values = {target_compatible_with}, + flag_values = {{":_interpreter_version": {version}}}, +) +""" + +# exec_interpreter and target_interpreter below are both set to the same +# target. We rely on `cfg = 'exec'` and `cfg = 'target'` in the +# pycross_hermetic_toolchain label definitions to pick the correct values. + +_TOOLCHAIN_TEMPLATE = """\ +pycross_hermetic_toolchain( + name = {provider_name}, + exec_interpreter = {runtime}, + target_interpreter = {runtime}, +) + +config_setting( + name = {target_config_name}, + flag_values = {{":_interpreter_version": {version}}}, +) + +toolchain( + name = {name}, + target_settings = [{target_config_name}], + toolchain = {provider_name}, + toolchain_type = "%s", +) +""" % Label("//pycross:toolchain_type") + +def _get_requested_python_versions(rctx, registered_python_versions): + """ + Returns Python versions filtered to what the user requested. + """ + if not rctx.attr.requested_python_versions: + return registered_python_versions + + not_found_python_versions = [] + python_versions = [] + for requested_version in rctx.attr.requested_python_versions: + if requested_version in registered_python_versions: + python_versions.append(requested_version) + else: + not_found_python_versions.append(requested_version) + if not_found_python_versions: + fail("Requested Python versions are not registered: {} (registered versions: {})".format(not_found_python_versions, registered_python_versions)) + + return python_versions + +def _get_python_version_info(rctx): + """ + Returns a struct containing python versions and the default interpreter version. + """ + python_repo = rctx.attr.python_toolchains_repo + is_multi_version_layout = _is_multi_version_layout(rctx, python_repo) + if is_multi_version_layout: + registered_python_versions = _get_multi_python_versions(rctx, python_repo) + python_versions = _get_requested_python_versions(rctx, registered_python_versions) + + if rctx.attr.pythons_hub_repo: + default_version = _get_default_python_version_bzlmod(rctx, rctx.attr.pythons_hub_repo) + else: + default_version = _get_default_python_version_workspace(rctx, python_repo, registered_python_versions) + else: + default_version = _get_single_python_version(rctx, python_repo) + python_versions = [default_version] + + return struct( + python_versions = python_versions, + default_version = default_version, + default_micro_version = _get_micro_version(default_version), + is_multi_version_layout = is_multi_version_layout, + ) + +def _pycross_toolchain_repo_impl(rctx): + version_info = _get_python_version_info(rctx) + computed_toolchains = _compute_toolchains( + python_toolchains_repo_name = rctx.attr.python_toolchains_repo.workspace_name, + is_multi_version_layout = version_info.is_multi_version_layout, + python_versions = version_info.python_versions, + platforms = rctx.attr.platforms, + ) + + toolchains_build_sections = [_TOOLCHAINS_BUILD_HEADER.format(default_version = version_info.default_micro_version)] + for tc in computed_toolchains: + toolchains_build_sections.append(_TOOLCHAIN_TEMPLATE.format(**{k: repr(v) for k, v in tc.items()})) + + rctx.file(rctx.path("BUILD.bazel"), "\n".join(toolchains_build_sections)) + +pycross_toolchains_repo = repository_rule( + implementation = _pycross_toolchain_repo_impl, + attrs = { + "platforms": attr.string_list(), + "python_toolchains_repo": attr.label(), + "pythons_hub_repo": attr.label(), + "requested_python_versions": attr.string_list(), + }, +) + +def _pycross_environment_repo_impl(rctx): + version_info = _get_python_version_info(rctx) + computed_environments = _compute_environments( + repo_name = rctx.name, + python_versions = version_info.python_versions, + platforms = rctx.attr.platforms, + glibc_version = rctx.attr.glibc_version or DEFAULT_GLIBC_VERSION, + macos_version = rctx.attr.macos_version or DEFAULT_MACOS_VERSION, + ) + + repo_batch_create_target_environments(rctx, computed_environments) + + root_build_sections = [_ENVIRONMENTS_BUILD_HEADER.format(default_version = version_info.default_micro_version)] + for env in computed_environments: + root_build_sections.append(_ENVIRONMENT_TEMPLATE.format(**{k: repr(v) for k, v in env.items()})) + + root_build_sections.append("filegroup(") + root_build_sections.append(' name = "environments",') + root_build_sections.append(" srcs = [") + for env in computed_environments: + root_build_sections.append(" {},".format(repr(env["output"]))) + root_build_sections.append(" ]") + root_build_sections.append(")") + + rctx.file(rctx.path("BUILD.bazel"), "\n".join(root_build_sections)) + + defs_lines = ["environments = ["] + for env in computed_environments: + defs_lines.append(' Label("//:{}"),'.format(env["output"])) + defs_lines.append("]") + + rctx.file(rctx.path("defs.bzl"), "\n".join(defs_lines)) + + index_struct = { + "environments": [ + "//:{}".format(env["output"]) + for env in computed_environments + ], + } + rctx.file(rctx.path("environments"), json.encode_indent(index_struct, indent = " ") + "\n") + +pycross_environments_repo = repository_rule( + implementation = _pycross_environment_repo_impl, + attrs = { + "glibc_version": attr.string(), + "macos_version": attr.string(), + "platforms": attr.string_list(), + "python_toolchains_repo": attr.label(), + "pythons_hub_repo": attr.label(), + "requested_python_versions": attr.string_list(), + }, +) + +def pycross_register_for_python_toolchains( + name, + python_toolchains_repo, + platforms = None, + glibc_version = None, + macos_version = None): + """ + Register target environments and toolchains for a given list of Python versions. + + Args: + name: the toolchain repo name. + python_toolchains_repo: a label to the registered rules_python tolchain repo. + platforms: an optional list of platforms to support (e.g., "x86_64-unknown-linux-gnu"). + By default, all platforms supported by rules_python are registered. + glibc_version: the maximum supported GLIBC version. + macos_version: the maximum supported macOS version. + """ + toolchain_repo_name = "{}_toolchains".format(name) + + pycross_environments_repo( + name = name, + python_toolchains_repo = python_toolchains_repo, + platforms = platforms, + glibc_version = glibc_version, + macos_version = macos_version, + ) + + pycross_toolchains_repo( + name = toolchain_repo_name, + python_toolchains_repo = python_toolchains_repo, + platforms = platforms, + ) + + native.register_toolchains("@{}_toolchains//...".format(name)) diff --git a/python/private/pycross_staging/private/tools/BUILD.bazel b/python/private/pycross_staging/private/tools/BUILD.bazel new file mode 100644 index 0000000000..7c20d4130f --- /dev/null +++ b/python/private/pycross_staging/private/tools/BUILD.bazel @@ -0,0 +1,169 @@ +load("@rules_pycross_internal//:python.bzl", "py_binary", "py_library") +load("@rules_python//python:defs.bzl", "py_test") +load("//pycross/private:wheel_zipimport_library.bzl", "pycross_wheel_zipimport_library") + +package(default_visibility = ["//pycross:__subpackages__"]) + +py_library( + name = "args", + srcs = [ + "args.py", + ], + imports = ["../../.."], +) + +py_binary( + name = "bzl_lock_generator", + srcs = ["bzl_lock_generator.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":raw_lock_resolver", + ":resolved_lock_renderer", + ], +) + +py_binary( + name = "extract_lock_repos", + srcs = ["extract_lock_repos.py"], + imports = ["../../.."], +) + +py_binary( + name = "raw_lock_resolver", + srcs = ["raw_lock_resolver.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":lock_model", + ":target_environment", + "@rules_pycross_internal//deps:packaging", + "@rules_pycross_internal//deps:pip", + ], +) + +py_binary( + name = "resolved_lock_renderer", + srcs = ["resolved_lock_renderer.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":lock_model", + ], +) + +pycross_wheel_zipimport_library( + name = "installer", + wheel = "@rules_pycross_internal//:installer_whl", +) + +py_library( + name = "target_environment", + srcs = [ + "target_environment.py", + ], + imports = ["../../.."], + deps = [ + "@rules_pycross_internal//deps:pip", + ], +) + +py_library( + name = "lock_model", + srcs = ["lock_model.py"], + imports = ["../../.."], + deps = [ + ":target_environment", + "@rules_pycross_internal//deps:dacite", + "@rules_pycross_internal//deps:packaging", + ], +) + +py_library( + name = "namespace_pkgs", + srcs = [ + "namespace_pkgs.py", + ], + imports = ["../../.."], +) + +py_test( + name = "namespace_pkgs_test", + size = "small", + srcs = [ + "namespace_pkgs_test.py", + ], + tags = ["unit"], + deps = [ + ":namespace_pkgs", + ], +) + +py_binary( + name = "pdm_translator", + srcs = ["pdm_translator.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":lock_model", + "@rules_pycross_internal//deps:tomli", + ], +) + +py_binary( + name = "poetry_translator", + srcs = ["poetry_translator.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":lock_model", + "@rules_pycross_internal//deps:poetry-core", + "@rules_pycross_internal//deps:tomli", + ], +) + +py_binary( + name = "target_environment_generator", + srcs = ["target_environment_generator.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":target_environment", + "@rules_pycross_internal//deps:dacite", + "@rules_pycross_internal//deps:packaging", + "@rules_pycross_internal//deps:pip", + ], +) + +py_binary( + name = "wheel_builder", + srcs = ["wheel_builder.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":target_environment", + "//pycross/private/tools/crossenv", + "@rules_pycross_internal//deps:build", + "@rules_pycross_internal//deps:packaging", + "@rules_pycross_internal//deps:tomli", + ], +) + +py_binary( + name = "wheel_installer", + srcs = ["wheel_installer.py"], + imports = ["../../.."], + visibility = ["//visibility:public"], + deps = [ + ":args", + ":installer", + ":namespace_pkgs", + ], +) diff --git a/python/private/pycross_staging/private/tools/args.py b/python/private/pycross_staging/private/tools/args.py new file mode 100644 index 0000000000..bb523a70c3 --- /dev/null +++ b/python/private/pycross_staging/private/tools/args.py @@ -0,0 +1,23 @@ +import shlex +from argparse import ArgumentParser + + +class FlagFileArgumentParser(ArgumentParser): + """An ArgumentParser that supports a --flagfile parameter. + + If --flagfile is passed, the file specified file is read and its lines are interpreted + as command line arguments. Assumes Bazel's "shell" param file semantics. + """ + + def parse_known_args(self, args=None, namespace=None): + flagfile_parser = ArgumentParser() + flagfile_parser.add_argument("--flagfile", type=open) + ff_namespace, args = flagfile_parser.parse_known_args(args) + if ff_namespace.flagfile: + with ff_namespace.flagfile as f: + additional_args = shlex.split(f.read()) + args.extend(additional_args) + + # Pass the original namespace, if given, not the intermediate flagfile + # namespace. + return super().parse_known_args(args, namespace) diff --git a/python/private/pycross_staging/private/tools/bzl_lock_generator.py b/python/private/pycross_staging/private/tools/bzl_lock_generator.py new file mode 100644 index 0000000000..908eed25ca --- /dev/null +++ b/python/private/pycross_staging/private/tools/bzl_lock_generator.py @@ -0,0 +1,35 @@ +import os +from pathlib import Path +from typing import Any + +from pycross.private.tools import raw_lock_resolver, resolved_lock_renderer +from pycross.private.tools.args import FlagFileArgumentParser + + +def parse_flags() -> Any: + parser = FlagFileArgumentParser(description="Generate pycross dependency bzl file.") + + raw_lock_resolver.add_shared_flags(parser) + resolved_lock_renderer.add_shared_flags(parser) + parser.add_argument( + "--output", + type=Path, + required=True, + help="The path to the output JSON file.", + ) + + return parser.parse_args() + + +def main(args: Any) -> None: + resolved_lock = raw_lock_resolver.resolve(args) + with open(args.output, "w") as f: + resolved_lock_renderer.render(resolved_lock, args, f) + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main(parse_flags()) diff --git a/python/private/pycross_staging/private/tools/crossenv/BUILD.bazel b/python/private/pycross_staging/private/tools/crossenv/BUILD.bazel new file mode 100644 index 0000000000..28bb777200 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/BUILD.bazel @@ -0,0 +1,27 @@ +load("@rules_pycross_internal//:python.bzl", "py_binary", "py_library") + +package(default_visibility = ["//pycross/private/tools:__subpackages__"]) + +py_library( + name = "crossenv_lib", + srcs = [ + "__init__.py", + "template.py", + "utils.py", + ], + data = [ + "//pycross/private/tools/crossenv/scripts", + ], + imports = ["../../../.."], +) + +py_binary( + name = "crossenv", + srcs = [ + "__init__.py", + "__main__.py", + ], + imports = ["../../../.."], + main = "__main__.py", + deps = [":crossenv_lib"], +) diff --git a/python/private/pycross_staging/private/tools/crossenv/LICENSE.crossenv.txt b/python/private/pycross_staging/private/tools/crossenv/LICENSE.crossenv.txt new file mode 100644 index 0000000000..6f0b9aa438 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/LICENSE.crossenv.txt @@ -0,0 +1,19 @@ +Copyright (c) 2018 Benjamin Fogle + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/python/private/pycross_staging/private/tools/crossenv/README.md b/python/private/pycross_staging/private/tools/crossenv/README.md new file mode 100644 index 0000000000..dc0288ca8d --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/README.md @@ -0,0 +1,5 @@ +This package contains a derivative of https://github.com/benfogle/crossenv, +originally written by Ben Fogle. See +[LICENSE.crossenv.txt](LICENSE.crossenv.txt) for its original licensing terms. + +It's been largely adapted for use with these Bazel rules, but many of the original concepts remain. diff --git a/python/private/pycross_staging/private/tools/crossenv/__init__.py b/python/private/pycross_staging/private/tools/crossenv/__init__.py new file mode 100644 index 0000000000..e759da3f2a --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/__init__.py @@ -0,0 +1,386 @@ +import dataclasses +import json +import os +import platform +import pprint +import re +import subprocess +import sys +import sysconfig +import tempfile +from pathlib import Path +from typing import Any, Dict, List, Optional + +from pycross.private.tools.crossenv import utils + +SYSCONFIG_DATA_NAME = "_pycross_sysconfig_data" + +# A tiny source file that gets the compiler to output the glibc version +# it's going to link against without us having to actually execute the +# resulting binary (which may be for a different architecture). +# +# There may be a better way. +GLIBC_TELLER = """\ +#include +#define XSTR(x) STR(x) +#define STR(x) #x + +#if defined(__GLIBC__) && defined(__GLIBC_MINOR__) +#pragma message "GLIBC" "=" XSTR(__GLIBC__) "." XSTR(__GLIBC_MINOR__) +#endif +""" + + +@dataclasses.dataclass +class Uname: + machine: str + release: str + sysname: str + + +@dataclasses.dataclass +class TargetContext: + abiflags: Optional[str] + effective_glibc: Optional[str] + home: str + macosx_deployment_target: Optional[str] + manylinux_tags: List[str] + multiarch: Optional[str] + platform: str # e.g. apple-aarch64 + project_base: str # bin dir of target executable + sysconfigdata_name: str + sysconfigdata_path: str + sysconfig_platform: str # e.g. apple-x86_64 + uname_machine: str # from uname; e.g. x86_64 + uname_release: str # from uname + uname_sysname: str # from uname + + +@dataclasses.dataclass +class Context: + exec_python_executable: str # The exec python + exec_stdlib: str # e.g. .../lib/python3.9 + lib_path: str # Where our patching scripts are written (i.e., venv/lib) + target: TargetContext + + +def guess_target_platform(host_gnu_type: str) -> str: + # It was probably natively compiled, but not necessarily for this + # architecture. Guess from HOST_GNU_TYPE. + # TODO: Handle windows somehow (ha, ha) + host = host_gnu_type.lower().split("-") + if len(host) == 4: # i.e., aarch64-unknown-linux-gnu + plat, machine = [host[2], host[0]] + elif len(host) == 3: # i.e., aarch64-linux-gnu, unlikely. + plat, machine = [host[1], host[0]] + else: + raise ValueError( + f"Cannot determine target platform from HOST_GNU_TYPE: {host_gnu_type}" + ) + + if plat == "apple": + plat = "darwin" + + # On macos, aarch64 is called arm64 for whatever reason + if plat == "darwin" and machine == "aarch64": + machine = "arm64" + + return f"{plat}-{machine}" + + +def guess_uname( + target_platform: str, + host_gnu_type: str, + uname_machine: Optional[str], + macosx_deployment_target: Optional[str], +) -> Uname: + uname_release = "0.0.0" + uname_sysname = "" + + # target_platform is _probably_ something like linux-x86_64, but it can + # vary. + target_info = target_platform.split("-") + if not target_info: + uname_sysname = sys.platform + elif len(target_info) >= 1: + uname_sysname = target_info[0] + + if uname_machine is None: + if len(target_info) > 1 and target_info[-1] == "powerpc64le": + # Test that this is still a special case when we can. + # On uname.machine=ppc64le, _PYTHON_HOST_PLATFORM is linux-powerpc64le + uname_machine = "ppc64le" + elif len(target_info) > 1: + uname_machine = target_info[-1] + else: + uname_machine = host_gnu_type.split("-")[0] + + if macosx_deployment_target: + try: + major, minor = macosx_deployment_target.split(".") + major, minor = int(major), int(minor) + except ValueError: + raise ValueError( + f"Unexpected value {macosx_deployment_target} for MACOSX_DEPLOYMENT_TARGET" + ) + if major == 10: + uname_release = "%s.0.0" % (minor + 4) + elif major == 11: + uname_release = "%s.0.0" % (minor + 20) + else: + raise ValueError( + f"Unexpected major version {major} for MACOSX_DEPLOYMENT_TARGET" + ) + + return Uname(machine=uname_machine, release=uname_release, sysname=uname_sysname) + + +def guess_sysconfig_platform( + uname: Uname, target_platform: str, macosx_deployment_target: Optional[str] +) -> str: + if uname.sysname.lower() == "darwin": + return "macosx-{}-{}".format( + macosx_deployment_target, + uname.machine, + ) + elif uname.sysname == "linux": + # Use self.host_machine here as powerpc64le gets converted + # to ppc64le in self.host_machine + return f"linux-{uname.machine}" + else: + return target_platform + + +def determine_glibc_version(sysconfig_vars: Dict[str, Any]) -> Optional[str]: + cmd = sysconfig_vars["CC"].split() + sysconfig_vars["CFLAGS"].split() + with tempfile.TemporaryDirectory() as tmp: + tmp = Path(tmp) + teller_src = tmp / "teller.cc" + teller_out = tmp / "teller" + + with open(teller_src, "w") as f: + f.write(GLIBC_TELLER) + + cmd += ["-c", "-o", str(teller_out), str(teller_src)] + # Run the compiler and capture its output + print(" ".join(cmd)) + out = subprocess.check_output(cmd, env=os.environ, stderr=subprocess.STDOUT) + out = out.decode("utf-8") + + m = re.search("GLIBC=([0-9.]+)", out) + if m: + return m.group(1) + + +def build_context( + target_python_exe: str, + lib_path: str, + sysconfig_vars: Dict[str, Any], + sysconfig_data_file: str, + manylinux_tags: List[str], + target_platform: Optional[str], + uname_machine: Optional[str], +) -> Context: + project_base = Path(target_python_exe).absolute().parent + home = project_base.parent # Not sure if this is always correct + + host_gnu_type = sysconfig_vars["HOST_GNU_TYPE"] + macosx_deployment_target = sysconfig_vars.get("MACOSX_DEPLOYMENT_TARGET") + + if target_platform is None: + target_platform = guess_target_platform(host_gnu_type) + + target_uname = guess_uname( + target_platform=target_platform, + host_gnu_type=host_gnu_type, + uname_machine=uname_machine, + macosx_deployment_target=macosx_deployment_target, + ) + + target_sysconfig_platform = guess_sysconfig_platform( + uname=target_uname, + target_platform=target_platform, + macosx_deployment_target=macosx_deployment_target, + ) + + target_context = TargetContext( + abiflags=sysconfig_vars.get("ABIFLAGS"), + effective_glibc=determine_glibc_version(sysconfig_vars), + home=str(home), + macosx_deployment_target=macosx_deployment_target, + manylinux_tags=manylinux_tags, + multiarch=sysconfig_vars.get("MULTIARCH"), + platform=target_platform, + project_base=str(project_base), + sysconfigdata_name=SYSCONFIG_DATA_NAME, + sysconfigdata_path=sysconfig_data_file, + sysconfig_platform=target_sysconfig_platform, + uname_machine=target_uname.machine, + uname_release=target_uname.release, + uname_sysname=target_uname.sysname, + ) + + context = Context( + exec_python_executable=os.path.abspath(sys.executable), + exec_stdlib=os.path.abspath(os.path.dirname(os.__file__)), + lib_path=lib_path, + target=target_context, + ) + + return context + + +def expand_manylinux_tags(tags: List[str]) -> List[str]: + """ + Convert legacy manylinux tags to PEP600, because pip only looks for one + or the other + """ + + manylinux_tags = set(tags) + extra_tags = set() + + # we'll be very strict here: don't assume that manylinux2014 implies + # manylinux1 and so on. + if "manylinux1" in manylinux_tags: + extra_tags.add("manylinux_2_5") + if "manylinux2010" in manylinux_tags: + extra_tags.add("manylinux_2_12") + if "manylinux2014" in manylinux_tags: + extra_tags.add("manylinux_2_17") + if "manylinux_2_5" in manylinux_tags: + extra_tags.add("manylinux1") + if "manylinux_2_12" in manylinux_tags: + extra_tags.add("manylinux2010") + if "manylinux_2_17" in manylinux_tags: + extra_tags.add("manylinux2014") + + manylinux_tags.update(extra_tags) + return sorted(manylinux_tags) + + +def write_sysconfig_data( + sysconfig_data_path: Path, sysconfig_vars: Dict[str, Any] +) -> None: + with open(sysconfig_data_path, "w") as f: + f.write("# Generated by rules_pycross\n") + f.write("build_time_vars = ") + pprint.pprint(sysconfig_vars, stream=f, compact=True) + + +def write_pyvenv_cfg(env_path: Path, target_bin: str) -> None: + with open(env_path / "pyvenv.cfg", "w") as f: + f.writelines( + [ + f"home = {target_bin}\n", + "include-system-site-packages = false\n", + f"version = {platform.python_version()}\n", + ] + ) + + +def build_env( + env_dir: str, + target_python_exe: str, + sysconfig_vars: Dict[str, Any], + manylinux_tags: List[str], +) -> Path: + pyver = "python" + sysconfig.get_config_var("py_version_short") + env_path = Path(env_dir) + lib_path = env_path / "lib" + + # resolve this because Python 3.11 seems to do so itself, and we need both of our representations to be the same. + lib_path = lib_path.resolve() + + site_path = lib_path / pyver / "site-packages" + bin_path = env_path / "bin" + exe = bin_path / pyver + sysconfig_data_file = site_path / (SYSCONFIG_DATA_NAME + ".py") + + bin_path.mkdir(parents=True) + lib_path.mkdir(parents=True) + site_path.mkdir(parents=True) + + write_sysconfig_data(sysconfig_data_file, sysconfig_vars) + context = build_context( + target_python_exe=target_python_exe, + lib_path=str(lib_path), + sysconfig_vars=sysconfig_vars, + sysconfig_data_file=str(sysconfig_data_file), + manylinux_tags=expand_manylinux_tags(manylinux_tags), + target_platform=None, # guess + uname_machine=None, # guess + ) + + write_pyvenv_cfg(env_path, str(context.target.project_base)) + + tmpl = utils.TemplateContext() + tmpl.update(context.__dict__) + utils.install_script("pywrapper.py.tmpl", str(exe), tmpl) + + # Everything in lib_path follows the same pattern + site_scripts = [ + "site.py", + "sys-patch.py", + "os-patch.py", + "platform-patch.py", + "sysconfig-patch.py", + "distutils-sysconfig-patch.py", + ] + + for script in site_scripts: + src = script + ".tmpl" + dst = os.path.join(context.lib_path, script) + utils.install_script(src, dst, tmpl) + + utils.install_script( + "_manylinux.py.tmpl", + os.path.join(str(site_path), "_manylinux.py"), + tmpl, + ) + + # Symlink alternate names to our wrapper + for link_name in ("python", "python3"): + link = bin_path / link_name + if not link.exists(): + link.symlink_to(pyver) + + return env_path + + +def main(): + import argparse + + parser = argparse.ArgumentParser() + + parser.add_argument( + "--sysconfig-json", + help="A JSON file containing sysconfig data.", + ) + parser.add_argument( + "--manylinux", + action="append", + default=[], + help="""Declare compatibility with the given manylinux platform tag to + enable pre-compiled wheels. This argument may be given multiple + times.""", + ) + parser.add_argument( + "--env-dir", + help="Path to the created environment.", + ) + parser.add_argument( + "--target-python", + help="Path to the target Python interpreter executable.", + ) + + args = parser.parse_args() + + with open(args.sysconfig_json, "r") as f: + sysconfig_vars = json.load(f) + + build_env( + env_dir=args.env_dir, + target_python_exe=args.target_python, + sysconfig_vars=sysconfig_vars, + manylinux_tags=args.manylinux, + ) diff --git a/python/private/pycross_staging/private/tools/crossenv/__main__.py b/python/private/pycross_staging/private/tools/crossenv/__main__.py new file mode 100644 index 0000000000..63c6e58a04 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/__main__.py @@ -0,0 +1,3 @@ +from pycross.private.tools.crossenv import main + +main() diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/BUILD.bazel b/python/private/pycross_staging/private/tools/crossenv/scripts/BUILD.bazel new file mode 100644 index 0000000000..1ee7686bd4 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/BUILD.bazel @@ -0,0 +1,7 @@ +package(default_visibility = ["//visibility:private"]) + +filegroup( + name = "scripts", + srcs = glob(["*.tmpl"]), + visibility = ["//pycross/private/tools/crossenv:__subpackages__"], +) diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/_manylinux.py.tmpl b/python/private/pycross_staging/private/tools/crossenv/scripts/_manylinux.py.tmpl new file mode 100644 index 0000000000..30b70cf482 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/_manylinux.py.tmpl @@ -0,0 +1,15 @@ +# Unconditionally disable manylinux support in a cross environment + +_tags = {{repr(target.manylinux_tags)}} + +def manylinux_compatible(tag_major, tag_minor, tag_arch): + import platform + if tag_arch != platform.machine(): + return False + + tag = 'manylinux_%s_%s' % (tag_major, tag_minor) + return tag in _tags + +manylinux1_compatible = {{'manylinux1' in target.manylinux_tags}} +manylinux2010_compatible = {{'manylinux2010' in target.manylinux_tags}} +manylinux2014_compatible = {{'manylinux2014' in target.manylinux_tags}} diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/distutils-sysconfig-patch.py.tmpl b/python/private/pycross_staging/private/tools/crossenv/scripts/distutils-sysconfig-patch.py.tmpl new file mode 100644 index 0000000000..51c52c1928 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/distutils-sysconfig-patch.py.tmpl @@ -0,0 +1,29 @@ +# Patch the things that depend on os.environ or sys +# This is very similar to the sysconfig patch + +# Only patch if get_config_vars was implemented in this module. Python 3.10 +# merged the implementations by importing from sysconfig, so we don't need to +# patch twice. +if get_config_vars.__module__ == __name__: + project_base = {{repr(target.project_base)}} + + try: + __real_init_posix = _init_posix + def _init_posix(): + old = os.environ.get('_PYTHON_SYSCONFIGDATA_NAME') + os.environ['_PYTHON_SYSCONFIGDATA_NAME'] = {{repr(target.sysconfigdata_name)}} + try: + return __real_init_posix() + finally: + if old is None: + del os.environ['_PYTHON_SYSCONFIGDATA_NAME'] + else: + os.environ['_PYTHON_SYSCONFIGDATA_NAME'] = old + except NameError: + # setuptools >=61, removed _init_posix and fixes distutils.sysconfig + # to use sysconfig which we patch in sysconfig-patch.py + pass + + assert not _config_vars, "distutils.sysconfig was set up prior to patching?" + +#vi: ft=python diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/os-patch.py.tmpl b/python/private/pycross_staging/private/tools/crossenv/scripts/os-patch.py.tmpl new file mode 100644 index 0000000000..bef3b19998 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/os-patch.py.tmpl @@ -0,0 +1,31 @@ +from collections import namedtuple + +# Fixup os.uname, which should fix most of platform module +uname_result_type = namedtuple('uname_result', + 'sysname nodename release version machine') +_uname_result = uname_result_type( + {{repr(target.uname_sysname.title())}}, + 'build', + {{repr(target.uname_release)}}, + '', + {{repr(target.uname_machine)}}) + +def uname(): + return _uname_result + +# pip, packaging, and subprocess all use confstr to get the libc version. We do +# not want the host's glibc version to show up, as this affects things like +# manylinux determination. Always return something of the form "name version", +# or pip will fall back to querying ctypes, which I am not brave enough to +# patch. Subprocess will likely end up using less efficient implementations, +# but functionality shouldn't be affected. +_original_confstr = confstr +def confstr(name): + if name == 'CS_GNU_LIBC_VERSION': + version = {{repr(target.effective_glibc)}} + if version is None: + return 'unknown 0.0' + else: + return f'glibc {version}' + else: + return _original_confstr(name) diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/platform-patch.py.tmpl b/python/private/pycross_staging/private/tools/crossenv/scripts/platform-patch.py.tmpl new file mode 100644 index 0000000000..b8968fb80f --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/platform-patch.py.tmpl @@ -0,0 +1,23 @@ +from collections import namedtuple + +platform_uname_result_type = namedtuple('uname_result', + 'system node release version machine processor') +_uname_result = platform_uname_result_type( + {{repr(target.uname_sysname.title())}}, + 'build', + {{repr(target.uname_release)}}, + '', + {{repr(target.uname_machine)}}, + {{repr(target.uname_machine)}}) +def uname(): + return _uname_result + +def libc_ver(*args, **kwargs): + return ('', '') + +def mac_ver(release='', versioninfo=('', '', ''), machine=''): + if release == '': + release = {{repr(target.macosx_deployment_target)}} + if machine == '': + machine = _uname_result.machine + return release, versioninfo, machine diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/pywrapper.py.tmpl b/python/private/pycross_staging/private/tools/crossenv/scripts/pywrapper.py.tmpl new file mode 100644 index 0000000000..4c0976155a --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/pywrapper.py.tmpl @@ -0,0 +1,38 @@ +#!{{exec_python_executable}} -I + +import os +import sys + + +for name in ['_PYTHON_PROJECT_BASE', '_PYTHON_HOST_PLATFORM', + '_PYTHON_SYSCONFIGDATA_NAME', 'PYTHONHOME', 'PYTHONPATH']: + old = '_OLD_' + name + if old not in os.environ and name in os.environ: + os.environ[old] = os.environ[name] + +os.environ['_PYTHON_PROJECT_BASE']={{repr(target.project_base)}} +os.environ['_PYTHON_HOST_PLATFORM']={{repr(target.platform)}} +os.environ['_PYTHON_SYSCONFIGDATA_NAME']={{repr(target.sysconfigdata_name)}} +os.environ['PYTHONHOME']={{repr(target.home)}} + +# In python 3.11, the import machinery imports from math, which breaks +# in our cross environment.. so we inject lib-dynload to the path also +stdlib = {{repr(exec_stdlib)}} +lib_dynload = os.path.join(stdlib, "lib-dynload") + +oldpath = os.environ.get('PYTHONPATH') +newpath = os.pathsep.join([{{repr(lib_path)}}, stdlib, lib_dynload]) +if oldpath: + path = os.pathsep.join([newpath, oldpath]) +else: + path = newpath + +os.environ['PYTHONPATH'] = path + +# In python 3.11, several system packages are frozen by default, which +# prevents us from patching it. Disable it. +if sys.version_info >= (3,11): + sys.argv[1:1] = ["-X", "frozen_modules=off"] + +# This will fix up argv0 so that sys.executable will be correct +os.execv({{repr(exec_python_executable)}}, sys.argv) diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/site.py.tmpl b/python/private/pycross_staging/private/tools/crossenv/scripts/site.py.tmpl new file mode 100644 index 0000000000..6555bef4b6 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/site.py.tmpl @@ -0,0 +1,161 @@ +# Import only what we absolutely need before the path fixup + +# First a guard: If we left these environment variables in place we might +# have messed with another Python installation. It's broken beyond repair +# by this point in the startup process, but we can at least offer a helpful +# warning...and who knows, it might still work, mostly. +# There's also a chance that this is python2 we've messed with, so don't +# import os, which can cause a SyntaxError. +import sys +import posix + +import os +import importlib.machinery +import importlib.abc +import importlib.util +import traceback # used in sys-patch.py.tmpl; needs to be in sys.modules + +# To prevent the above scenario from playing out every time run a script that +# starts with #!/usr/bin/python, we need to remove the environment variables so +# subprocesses won't see them. +for name in ['_PYTHON_PROJECT_BASE', '_PYTHON_HOST_PLATFORM', + '_PYTHON_SYSCONFIGDATA_NAME', 'PYTHONHOME', 'PYTHONPATH']: + prev = '_OLD_' + name + try: + os.environ[name] = os.environ[prev] + except KeyError: + os.environ.pop(name, None) + os.environ.pop(prev, None) + +# Very little is imported right now, which gives us a chance to patch most +# things on import. A custom meta path finder will both patch things up and +# forcibly load sysconfigdata from where we want it. +# +# This is more flexible than manually implementing our patches here. +# Additionally, we need to patch something in distutils, but importing it in +# order to patch will subsequently cause setuptools to complain. + +def _patch_module(module, patch): + # add our patch as if it had been typed just after + # explicit encoding, because we know that utf-8 has been loaded already. + # The default causes an import to happen too early. + with open(patch, 'r', encoding='utf-8') as fp: + src = fp.read() + exec(src, module.__dict__, module.__dict__) + module.__patched__ = True + +def make_loader(original, patch): + if hasattr(original, 'exec_module'): + return CrossenvPatchLoader(original, patch) + else: + return CrossenvPatchLegacyLoader(original, patch) + +class CrossenvPatchLoader(importlib.abc.Loader): + def __init__(self, original, patch): + self.original = original + self.patch = patch + + def create_module(self, spec): + return self.original.create_module(spec) + + def exec_module(self, module): + self.original.exec_module(module) + _patch_module(module, self.patch) + + # runpy module expects a source loader, should we try to run 'python -m + # sysconfig'. This has extra methods, so we'll keep it happy with whatever + # it wants. + def __getattr__(self, name): + return getattr(self.original, name) + +class CrossenvPatchLegacyLoader(importlib.abc.Loader): + def __init__(self, original, patch): + self.original = original + self.patch = patch + + def load_module(self, fullname): + module = self.original.load_module(fullname) + if not hasattr(module, '__patched__'): + _patch_module(module, self.patch) + return module + + def __getattr__(self, name): + return getattr(self.original, name) + +class CrossenvFinder(importlib.abc.MetaPathFinder): + """Mucks with import machinery in two ways: + + 1) loads sysconfigdata from our hard-coded path, regardless of sys.path + 2) intercepts and patches modules as they are loaded + """ + + PATCHES = { + 'sys': '{{lib_path}}/sys-patch.py', + 'os': '{{lib_path}}/os-patch.py', + 'sysconfig': '{{lib_path}}/sysconfig-patch.py', + 'distutils.sysconfig': '{{lib_path}}/distutils-sysconfig-patch.py', + 'distutils.sysconfig_pypy': '{{lib_path}}/distutils-sysconfig-patch.py', + 'platform': '{{lib_path}}/platform-patch.py', + } + + def __init__(self): + # At startup, manually patch things that have already been loaded. We + # can't re-load them because they might be used in many places already. + # This will be sys and os at the very least. + self.manually_patch_loaded() + + def find_spec(self, fullname, path, target=None): + spec = self._find_sysconfigdata(fullname, path, target) + if spec: + return spec + + return self._patch_spec(fullname, path, target) + + def _find_sysconfigdata(self, fullname, path, target): + if fullname == {{repr(target.sysconfigdata_name)}}: + return importlib.util.spec_from_file_location( + fullname, {{repr(target.sysconfigdata_path)}}) + else: + return None + + def _patch_spec(self, fullname, path, target): + """If necessary, set up to patch a module""" + if fullname not in self.PATCHES: + return None + + # query the next finders to see who really would have loaded it + try: + start = sys.meta_path.index(self) + 1 + except ValueError: + return None + + for finder in sys.meta_path[start:]: + spec = finder.find_spec(fullname, path, target) + if spec is not None: + break + else: + return None + + patch = self.PATCHES[fullname] + spec.loader = make_loader(spec.loader, patch) + return spec + + def manually_patch_loaded(self): + for name, module in sys.modules.items(): + if name in self.PATCHES: + _patch_module(module, self.PATCHES[name]) + +# add just before the real path finder +try: + _index = sys.meta_path.index(importlib.machinery.PathFinder) +except ValueError: + _index = 0 +sys.meta_path.insert(_index, CrossenvFinder()) + +# Re-import the real site module, so Python can continue booting. Crossenv is +# ready! We do want to remove sysconfig or any other module that relies on +# patching after site has messed with sys. +del sys.modules['site'] +sys.modules.pop('sysconfig', None) +sys.modules.pop('os', None) +import site diff --git a/python/private/pycross_staging/private/tools/crossenv/scripts/sys-patch.py.tmpl b/python/private/pycross_staging/private/tools/crossenv/scripts/sys-patch.py.tmpl new file mode 100644 index 0000000000..75cb8e54d8 --- /dev/null +++ b/python/private/pycross_staging/private/tools/crossenv/scripts/sys-patch.py.tmpl @@ -0,0 +1,42 @@ +cross_compiling = True + +abiflags = {{repr(target.abiflags)}} +if abiflags is None: + del abiflags + +implementation._multiarch = {{repr(target.multiarch)}} +if implementation._multiarch is None: + del implementation._multiarch + +# Remove cross-python from sys.path. It's not needed after startup. +path.remove({{repr(lib_path)}}) +path.remove({{repr(exec_stdlib)}}) + +# Patch sys.platform +exec_platform = platform +target_platform = {{repr(target.uname_sysname.lower())}} +del platform + +# TODO: improve the target vs. exec criteria in this method. +def __getattr__(name): + if name != 'platform': + raise AttributeError(name) + + import traceback + frames = traceback.extract_stack(limit=2) + assert len(frames) == 2 + frame = frames[0] + + # frozen modules - part of the interpreter itself? Report exec platform. + if frame.filename.startswith(" Dict[str, Any]: + pattern = "_sysconfigdata_*.py*" + maybe = [] + for path in paths: + maybe.extend(path.glob(pattern)) + + if given_file: + sysconfig_paths = [given_file] + else: + found = set() + for filename in maybe: + if os.path.isfile(filename) and os.path.splitext(filename)[1] in ( + ".py", + ".pyc", + ): + found.add(filename) + + # Multiples can happen, but so long as they all have the same + # info we should be okay. Seen in buildroot + # When choosing the correct one, prefer, in order: + # 1) The .py file + # 2) The .pyc file + # 3) Any .opt-*.pyc files + # so sort by the length of the longest extension + sysconfig_paths = sorted(found, key=lambda x: len(str(x).split(".", 1)[1])) + + target_sysconfigdata = None + target_sysconfigdata_file = None + for path in sysconfig_paths: + basename = os.path.basename(path) + name, _ = os.path.splitext(basename) + spec = importlib.util.spec_from_file_location(name, path) + assert spec and spec.loader + syscfg = importlib.util.module_from_spec(spec) + spec.loader.exec_module(syscfg) + if target_sysconfigdata is None: + target_sysconfigdata = syscfg + target_sysconfigdata_file = path + elif target_sysconfigdata.build_time_vars != syscfg.build_time_vars: + raise ValueError( + f"Malformed Python installation: Conflicting build info in {target_sysconfigdata_file} and {path}" + ) + if not target_sysconfigdata: + path_strs = [str(p) for p in sysconfig_paths] + raise FileNotFoundError( + f"No {pattern} found in target paths. Looked in {', '.join(path_strs)}" + ) + + return target_sysconfigdata.build_time_vars diff --git a/python/private/pycross_staging/private/tools/extract_lock_repos.py b/python/private/pycross_staging/private/tools/extract_lock_repos.py new file mode 100644 index 0000000000..264fb30659 --- /dev/null +++ b/python/private/pycross_staging/private/tools/extract_lock_repos.py @@ -0,0 +1,46 @@ +"""Extracts the repositories section from a Pycross-generated lock file. + +We extract the section by exec()ing the file as Python code. +""" +import json +import sys + + +def main(file): + with open(file) as f: + lock_text = f.read() + + # This is where we'll collect the repo definitions. + repos = [] + + # Mimics the maybe func which calls the first argument with *a and **kw. + def maybe(func, *a, **kw): + func(*a, **kw) + + exec_globals = {"__builtins__": None, "maybe": maybe} + + def load(_file, *a, **kw): + # Mimic a load by creating functions named in *a and **kw. + # These functions simply store passed parameters in repos. + for type in list(a) + list(kw): + if type in exec_globals: + continue + + def fn(**kw): + repos.append({"type": type, "attrs": kw}) + + exec_globals[type] = fn + + exec_globals["load"] = load + + # We need to actually call the repositories function in our exec. + lock_text += "\n" + lock_text += "repositories()" + exec(lock_text, exec_globals) + + # Print the results to stdout. + print(json.dumps(repos, indent=2)) + + +if __name__ == "__main__": + main(sys.argv[1]) diff --git a/python/private/pycross_staging/private/tools/lock_model.py b/python/private/pycross_staging/private/tools/lock_model.py new file mode 100644 index 0000000000..cde518cf96 --- /dev/null +++ b/python/private/pycross_staging/private/tools/lock_model.py @@ -0,0 +1,295 @@ +from __future__ import annotations + +import dataclasses +import json +from dataclasses import dataclass, field +from functools import cached_property +from json import JSONEncoder +from typing import Any, Dict, Iterator, List, Optional, Tuple + +from dacite.config import Config +from dacite.core import from_dict +from packaging.utils import ( + NormalizedName, + canonicalize_name, + parse_sdist_filename, + parse_wheel_filename, +) +from packaging.version import Version +from pycross.private.tools.target_environment import TargetEnv + + +class _Encoder(JSONEncoder): + def default(self, o): + def _is_empty(val): + if val is None: + return True + if isinstance(val, (list, dict)): + return len(val) == 0 + return False + + if isinstance(o, (FileKey, PackageKey, Version)): + return str(o) + if dataclasses.is_dataclass(o): + # Omit None values from serialized output. + return {k: v for k, v in o.__dict__.items() if not _is_empty(v)} + return super().default(o) + + +def _stringify_keys(original: Dict[Any, Any]) -> Dict[str, Any]: + """ + Return original with keys stringified. + + The json module's encoder does not support complex key types, such as + PackageKey and FileKey. We stringify these values before passing them to + json. + """ + return {str(key): val for key, val in original.items()} + + +def _dataclass_items(dc) -> Iterator[Tuple[str, Any]]: + for item in dataclasses.fields(dc): + yield item.name, getattr(dc, item.name) + + +@dataclass(frozen=True, order=True) +class PackageKey: + name: NormalizedName + version: Version + + def __init__(self, val) -> None: + name, version = val.split("@", maxsplit=1) + object.__setattr__(self, "name", package_canonical_name(name)) + object.__setattr__(self, "version", Version(version)) + + @staticmethod + def from_parts(name: NormalizedName, version: Version) -> PackageKey: + return PackageKey(f"{name}@{version}") + + def __str__(self) -> str: + return f"{self.name}@{self.version}" + + +@dataclass(frozen=True, order=True) +class FileKey: + name: str + hash_prefix: str + + def __init__(self, val: str) -> None: + name, hash_prefix = val.split("/", maxsplit=1) + object.__setattr__(self, "name", name) + object.__setattr__(self, "hash_prefix", hash_prefix) + + @staticmethod + def from_parts(name: str, hash_prefix: str) -> FileKey: + return FileKey(f"{name}/{hash_prefix}") + + @property + def is_wheel(self) -> bool: + return is_wheel(self.name) + + @property + def is_sdist(self) -> bool: + return not self.is_wheel + + def __str__(self) -> str: + return f"{self.name}/{self.hash_prefix}" + + +@dataclass(frozen=True) +class FileReference: + label: Optional[str] = None + key: Optional[FileKey] = None + + def __post_init__(self): + assert ( + int(self.label is not None) + int(self.key is not None) == 1 + ), "Exactly one of label or key must be specified." + + +@dataclass +class ConfigSetting: + constraint_values: List[str] = field(default_factory=list) + flag_values: Dict[str, str] = field(default_factory=dict) + + +@dataclass(frozen=True) +class EnvironmentReference: + environment_label: str + config_setting: Optional[ConfigSetting] = None + config_setting_label: Optional[str] = None + + def __post_init__(self): + assert ( + int(self.config_setting is not None) + + int(self.config_setting_label is not None) + == 1 + ), "Exactly one of config_setting or config_setting_label must be specified." + + @classmethod + def from_target_env( + cls, environment_label: str, target_env: TargetEnv + ) -> EnvironmentReference: + if target_env.config_setting_target: + return cls( + environment_label=environment_label, + config_setting_label=target_env.config_setting_target, + ) + else: + return cls( + environment_label=environment_label, + config_setting=ConfigSetting( + constraint_values=target_env.python_compatible_with, + flag_values=target_env.flag_values, + ), + ) + + +@dataclass(frozen=True) +class PackageFile: + name: str + sha256: str + urls: Tuple[str, ...] = field(default_factory=tuple) + package_name: Optional[NormalizedName] = None + package_version: Optional[Version] = None + + def __post_init__(self): + assert self.name, "The name field must be specified." + assert self.sha256, "The sha256 field must be specified." + if self.package_name is None or self.package_version is None: + # Derive package name + version from file name + if is_wheel(self.name): + name, version, _, _ = parse_wheel_filename(self.name) + else: + name, version = parse_sdist_filename(self.name) + if self.package_name is None: + object.__setattr__(self, "package_name", name) + if self.package_version is None: + object.__setattr__(self, "package_version", version) + + @property + def is_wheel(self) -> bool: + return is_wheel(self.name) + + @property + def is_sdist(self) -> bool: + return not self.is_wheel + + @cached_property + def key(self) -> FileKey: + return FileKey.from_parts(self.name, self.sha256[:8]) + + +@dataclass(frozen=True) +class PackageDependency: + name: NormalizedName + version: Version + marker: str + + def __post_init__(self): + assert self.name, "The name field must be specified." + assert self.version, "The version field must be specified." + assert ( + self.marker is not None + ), "The marker field must be specified, or an empty string." + + @property + def key(self) -> PackageKey: + return PackageKey.from_parts(self.name, self.version) + + +@dataclass(frozen=True) +class RawPackage: + name: NormalizedName + version: Version + python_versions: str + dependencies: List[PackageDependency] = field(default_factory=list) + files: List[PackageFile] = field(default_factory=list) + + def __post_init__(self): + normalized_name = package_canonical_name(self.name) + assert str(self.name) == str( + normalized_name + ), "The name field should be normalized per PEP 503." + object.__setattr__(self, "name", normalized_name) + + assert self.version, "The version field must be specified." + assert ( + self.python_versions is not None + ), "The python_versions field must be specified, or an empty string." + assert ( + self.dependencies is not None + ), "The dependencies field must be specified as a list." + assert self.files, "The files field must not be empty." + + @property + def key(self) -> PackageKey: + return PackageKey.from_parts(self.name, self.version) + + +@dataclass +class ResolvedPackage: + key: PackageKey + build_dependencies: List[PackageKey] = field(default_factory=list) + common_dependencies: List[PackageKey] = field(default_factory=list) + environment_dependencies: Dict[str, List[PackageKey]] = field(default_factory=dict) + build_target: Optional[str] = None + environment_files: Dict[str, FileReference] = field(default_factory=dict) + sdist_file: Optional[FileReference] = None + install_exclude_globs: List[str] = field(default_factory=list) + + +@dataclass(frozen=True) +class RawLockSet: + packages: Dict[PackageKey, RawPackage] = field(default_factory=dict) + pins: Dict[NormalizedName, PackageKey] = field(default_factory=dict) + + @property + def __dict__(self) -> Dict[str, Any]: + return dict(_dataclass_items(self), packages=_stringify_keys(self.packages)) + + def to_json(self, indent=None) -> str: + return json.dumps(self, sort_keys=True, indent=indent, cls=_Encoder) + + @classmethod + def from_json(cls, data: str) -> RawLockSet: + parsed = json.loads(data) + return from_dict( + RawLockSet, parsed, config=Config(cast=[Tuple, Version, PackageKey]) + ) + + +@dataclass(frozen=True) +class ResolvedLockSet: + environments: Dict[str, EnvironmentReference] = field(default_factory=dict) + packages: Dict[PackageKey, ResolvedPackage] = field(default_factory=dict) + pins: Dict[NormalizedName, PackageKey] = field(default_factory=dict) + remote_files: Dict[FileKey, PackageFile] = field(default_factory=dict) + + @property + def __dict__(self) -> Dict[str, Any]: + return dict( + _dataclass_items(self), + packages=_stringify_keys(self.packages), + remote_files=_stringify_keys(self.remote_files), + ) + + def to_json(self, indent=None) -> str: + return json.dumps(self, sort_keys=True, indent=indent, cls=_Encoder) + + @classmethod + def from_json(cls, data: str) -> ResolvedLockSet: + parsed = json.loads(data) + return from_dict( + ResolvedLockSet, + parsed, + config=Config(cast=[Tuple, Version, FileKey, PackageKey]), + ) + + +def package_canonical_name(name: str) -> NormalizedName: + return canonicalize_name(name) + + +def is_wheel(filename: str) -> bool: + return filename.lower().endswith(".whl") diff --git a/python/private/pycross_staging/private/tools/namespace_pkgs.py b/python/private/pycross_staging/private/tools/namespace_pkgs.py new file mode 100644 index 0000000000..5ddd4e1f85 --- /dev/null +++ b/python/private/pycross_staging/private/tools/namespace_pkgs.py @@ -0,0 +1,107 @@ +"""Utility functions to discover python package types""" +import os +import textwrap +from pathlib import Path # supported in >= 3.4 +from typing import List, Optional, Set + + +def implicit_namespace_packages( + directory: str, ignored_dirnames: Optional[List[str]] = None +) -> Set[Path]: + """Discovers namespace packages implemented using the 'native namespace packages' method. + + AKA 'implicit namespace packages', which has been supported since Python 3.3. + See: https://packaging.python.org/guides/packaging-namespace-packages/#native-namespace-packages + + Args: + directory: The root directory to recursively find packages in. + ignored_dirnames: A list of directories to exclude from the search + + Returns: + The set of directories found under root to be packages using the native namespace method. + """ + namespace_pkg_dirs: Set[Path] = set() + standard_pkg_dirs: Set[Path] = set() + directory_path = Path(directory) + ignored_dirname_paths: List[Path] = [Path(p) for p in ignored_dirnames or ()] + # Traverse bottom-up because a directory can be a namespace pkg because its child contains module files. + for dirpath, dirnames, filenames in map( + lambda t: (Path(t[0]), *t[1:]), os.walk(directory_path, topdown=False) + ): + if "__init__.py" in filenames: + standard_pkg_dirs.add(dirpath) + continue + elif ignored_dirname_paths: + is_ignored_dir = dirpath in ignored_dirname_paths + child_of_ignored_dir = any( + d in dirpath.parents for d in ignored_dirname_paths + ) + if is_ignored_dir or child_of_ignored_dir: + continue + + dir_includes_py_modules = _includes_python_modules(filenames) + parent_of_namespace_pkg = any( + Path(dirpath, d) in namespace_pkg_dirs for d in dirnames + ) + parent_of_standard_pkg = any( + Path(dirpath, d) in standard_pkg_dirs for d in dirnames + ) + parent_of_pkg = parent_of_namespace_pkg or parent_of_standard_pkg + if ( + (dir_includes_py_modules or parent_of_pkg) + and + # The root of the directory should never be an implicit namespace + dirpath != directory_path + ): + namespace_pkg_dirs.add(dirpath) + return namespace_pkg_dirs + + +def add_pkgutil_style_namespace_pkg_init(dir_path: Path) -> None: + """Adds 'pkgutil-style namespace packages' init file to the given directory + + See: https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages + + Args: + dir_path: The directory to create an __init__.py for. + + Raises: + ValueError: If the directory already contains an __init__.py file + """ + ns_pkg_init_filepath = os.path.join(dir_path, "__init__.py") + + if os.path.isfile(ns_pkg_init_filepath): + raise ValueError("%s already contains an __init__.py file." % dir_path) + + with open(ns_pkg_init_filepath, "w") as ns_pkg_init_f: + # See https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages + ns_pkg_init_f.write( + textwrap.dedent( + """\ + # __path__ manipulation added by bazelbuild/rules_python to support namespace pkgs. + __path__ = __import__('pkgutil').extend_path(__path__, __name__) + """ + ) + ) + + +def _includes_python_modules(files: List[str]) -> bool: + """ + In order to only transform directories that Python actually considers namespace pkgs + we need to detect if a directory includes Python modules. + + Which files are loadable as modules is extension based, and the particular set of extensions + varies by platform. + + See: + 1. https://github.com/python/cpython/blob/7d9d25dbedfffce61fc76bc7ccbfa9ae901bf56f/Lib/importlib/machinery.py#L19 + 2. PEP 420 -- Implicit Namespace Packages, Specification - https://www.python.org/dev/peps/pep-0420/#specification + 3. dynload_shlib.c and dynload_win.c in python/cpython. + """ + module_suffixes = { + ".py", # Source modules + ".pyc", # Compiled bytecode modules + ".so", # Unix extension modules + ".pyd", # https://docs.python.org/3/faq/windows.html#is-a-pyd-file-the-same-as-a-dll + } + return any(Path(f).suffix in module_suffixes for f in files) diff --git a/python/private/pycross_staging/private/tools/namespace_pkgs_test.py b/python/private/pycross_staging/private/tools/namespace_pkgs_test.py new file mode 100644 index 0000000000..be2b73b118 --- /dev/null +++ b/python/private/pycross_staging/private/tools/namespace_pkgs_test.py @@ -0,0 +1,178 @@ +import os +import pathlib +import shutil +import tempfile +import unittest +from typing import Optional, Set + +from pycross.private.tools import namespace_pkgs + + +class TempDir: + def __init__(self) -> None: + self.dir = tempfile.mkdtemp() + + def root(self) -> str: + return self.dir + + def add_dir(self, rel_path: str) -> None: + d = pathlib.Path(self.dir, rel_path) + d.mkdir(parents=True) + + def add_file(self, rel_path: str, contents: Optional[str] = None) -> None: + f = pathlib.Path(self.dir, rel_path) + f.parent.mkdir(parents=True, exist_ok=True) + if contents: + with open(str(f), "w") as writeable_f: + writeable_f.write(contents) + else: + f.touch() + + def remove(self) -> None: + shutil.rmtree(self.dir) + + +class TestImplicitNamespacePackages(unittest.TestCase): + def assertPathsEqual(self, actual: Set[pathlib.Path], expected: Set[str]) -> None: + self.assertEqual(actual, {pathlib.Path(p) for p in expected}) + + def test_in_current_directory(self) -> None: + directory = TempDir() + directory.add_file("foo/bar/biz.py") + directory.add_file("foo/bee/boo.py") + directory.add_file("foo/buu/__init__.py") + directory.add_file("foo/buu/bii.py") + cwd = os.getcwd() + os.chdir(directory.root()) + expected = { + "foo", + "foo/bar", + "foo/bee", + } + try: + actual = namespace_pkgs.implicit_namespace_packages(".") + self.assertPathsEqual(actual, expected) + finally: + os.chdir(cwd) + directory.remove() + + def test_finds_correct_namespace_packages(self) -> None: + directory = TempDir() + directory.add_file("foo/bar/biz.py") + directory.add_file("foo/bee/boo.py") + directory.add_file("foo/buu/__init__.py") + directory.add_file("foo/buu/bii.py") + + expected = { + directory.root() + "/foo", + directory.root() + "/foo/bar", + directory.root() + "/foo/bee", + } + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertPathsEqual(actual, expected) + + def test_ignores_empty_directories(self) -> None: + directory = TempDir() + directory.add_file("foo/bar/biz.py") + directory.add_dir("foo/cat") + + expected = { + directory.root() + "/foo", + directory.root() + "/foo/bar", + } + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertPathsEqual(actual, expected) + + def test_empty_case(self) -> None: + directory = TempDir() + directory.add_file("foo/__init__.py") + directory.add_file("foo/bar/__init__.py") + directory.add_file("foo/bar/biz.py") + + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertEqual(actual, set()) + + def test_ignores_non_module_files_in_directories(self) -> None: + directory = TempDir() + directory.add_file("foo/__init__.pyi") + directory.add_file("foo/py.typed") + + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertEqual(actual, set()) + + def test_parent_child_relationship_of_namespace_pkgs(self): + directory = TempDir() + directory.add_file("foo/bar/biff/my_module.py") + directory.add_file("foo/bar/biff/another_module.py") + + expected = { + directory.root() + "/foo", + directory.root() + "/foo/bar", + directory.root() + "/foo/bar/biff", + } + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertPathsEqual(actual, expected) + + def test_parent_child_relationship_of_namespace_and_standard_pkgs(self): + directory = TempDir() + directory.add_file("foo/bar/biff/__init__.py") + directory.add_file("foo/bar/biff/another_module.py") + + expected = { + directory.root() + "/foo", + directory.root() + "/foo/bar", + } + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertPathsEqual(actual, expected) + + def test_parent_child_relationship_of_namespace_and_nested_standard_pkgs(self): + directory = TempDir() + directory.add_file("foo/bar/__init__.py") + directory.add_file("foo/bar/biff/another_module.py") + directory.add_file("foo/bar/biff/__init__.py") + directory.add_file("foo/bar/boof/big_module.py") + directory.add_file("foo/bar/boof/__init__.py") + directory.add_file("fim/in_a_ns_pkg.py") + + expected = { + directory.root() + "/foo", + directory.root() + "/fim", + } + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertPathsEqual(actual, expected) + + def test_recognized_all_nonstandard_module_types(self): + directory = TempDir() + directory.add_file("ayy/my_module.pyc") + directory.add_file("bee/ccc/dee/eee.so") + directory.add_file("eff/jee/aych.pyd") + + expected = { + directory.root() + "/ayy", + directory.root() + "/bee", + directory.root() + "/bee/ccc", + directory.root() + "/bee/ccc/dee", + directory.root() + "/eff", + directory.root() + "/eff/jee", + } + actual = namespace_pkgs.implicit_namespace_packages(directory.root()) + self.assertPathsEqual(actual, expected) + + def test_skips_ignored_directories(self): + directory = TempDir() + directory.add_file("foo/boo/my_module.py") + directory.add_file("foo/bar/another_module.py") + + expected = { + directory.root() + "/foo", + directory.root() + "/foo/bar", + } + actual = namespace_pkgs.implicit_namespace_packages( + directory.root(), + ignored_dirnames=[directory.root() + "/foo/boo"], + ) + self.assertPathsEqual(actual, expected) + + +if __name__ == "__main__": + unittest.main() diff --git a/python/private/pycross_staging/private/tools/pdm_translator.py b/python/private/pycross_staging/private/tools/pdm_translator.py new file mode 100644 index 0000000000..90146d9b7e --- /dev/null +++ b/python/private/pycross_staging/private/tools/pdm_translator.py @@ -0,0 +1,417 @@ +from __future__ import annotations + +import os +import re +from collections import defaultdict +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Dict, List, Set +from urllib.parse import unquote, urlparse + +import tomli +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet +from packaging.utils import NormalizedName +from packaging.version import Version +from pycross.private.tools.args import FlagFileArgumentParser +from pycross.private.tools.lock_model import ( + PackageDependency, + PackageFile, + PackageKey, + RawLockSet, + RawPackage, + package_canonical_name, +) + + +class LockfileIncompatibleException(Exception): + pass + + +class LockfileNotStaticException(Exception): + pass + + +class MismatchedVersionException(Exception): + pass + + +# We support anything in the 4.x range. At least that's the idea. +SUPPORTED_LOCK_VERSIONS = SpecifierSet("~=4.0") +EDITABLE_PATTERN = re.compile("^ *-e +") + + +def get_default_dependencies(lock: Dict[str, Any]) -> List[Requirement]: + deps = lock.get("project", {}).get("dependencies", []) + return [Requirement(dep) for dep in deps] + + +def get_optional_dependencies(lock: Dict[str, Any]) -> Dict[str, List[Requirement]]: + dep_groups = lock.get("project", {}).get("optional-dependencies", {}) + return { + group: [Requirement(dep) for dep in deps] for group, deps in dep_groups.items() + } + + +def get_development_dependencies(lock: Dict[str, Any]) -> Dict[str, List[Requirement]]: + dep_groups = lock.get("tool", {}).get("pdm", {}).get("dev-dependencies", {}) + return { + group: [Requirement(EDITABLE_PATTERN.sub("", dep)) for dep in deps] + for group, deps in dep_groups.items() + } + + +def _print_warn(msg): + print("WARNING:", msg) + + +@dataclass +class PDMPackage: + name: NormalizedName + version: Version + python_versions: SpecifierSet + dependencies: Set[Requirement] + files: Set[PackageFile] + is_local: bool + resolved_dependencies: Set[PackageDependency] + extras: Set[str] + + def __post_init__(self): + self.extras = set(e.lower() for e in self.extras) + + @property + def key(self) -> PackageKey: + return PackageKey.from_parts(self.name, self.version) + + def satisfies(self, req: Requirement) -> bool: + # The left side is already canonicalized. + if self.name != package_canonical_name(req.name): + return False + # Extras are case-insensitive. The left side is already lower-cased. + if not self.extras.issuperset(set(r.lower() for r in req.extras)): + return False + return req.specifier.contains(self.version, prereleases=True) + + def to_lock_package(self) -> RawPackage: + assert not self.is_local, "Local packages have no analogue in pycross lockfile" + dependencies_without_self = sorted( + [dep for dep in self.resolved_dependencies if dep.key != self.key], + key=lambda p: p.key, + ) + return RawPackage( + name=self.name, + version=self.version, + python_versions=str(self.python_versions), + dependencies=dependencies_without_self, + files=sorted(self.files, key=lambda f: f.name), + ) + + def merge(self, other: PDMPackage) -> PDMPackage: + if (self.name, self.version) != (other.name, other.version): + raise ValueError( + f"Can only merge packages with the same name and version, not {self.key} and {other.key}" + ) + if self.python_versions != other.python_versions: + raise ValueError( + f"Can only merge packages that depend on the same Python version, not {self.python_versions} and {other.python_versions}" + ) + + merged_dependencies = set(self.dependencies) | set(other.dependencies) + merged_files = set(self.files) | set(other.files) + merged_is_local = self.is_local or other.is_local + merged_resolved_dependencies = set(self.resolved_dependencies) | set( + other.resolved_dependencies + ) + merged_extras = set(self.extras) | set(other.extras) + + return PDMPackage( + name=self.name, + version=self.version, + python_versions=self.python_versions, + dependencies=merged_dependencies, + files=merged_files, + is_local=merged_is_local, + resolved_dependencies=merged_resolved_dependencies, + extras=merged_extras, + ) + + +def parse_file_info(file_info: Dict[str, Any]) -> PackageFile: + if "file" in file_info: + file_name = file_info["file"] + urls = tuple() + elif "url" in file_info: + url = file_info["url"] + _, file_name = urlparse(url).path.rsplit("/", 1) + file_name = unquote(file_name) + urls = (url,) + else: + raise AssertionError("file entry has no file or url member") + file_hash = file_info["hash"] + assert file_hash.startswith("sha256:") + return PackageFile(name=file_name, sha256=file_hash[7:], urls=urls) + + +def translate( + project_file: Path, + lock_file: Path, + default_group: bool, + optional_groups: List[str], + all_optional_groups: bool, + development_groups: List[str], + all_development_groups: bool, +) -> RawLockSet: + try: + with open(project_file, "rb") as f: + project_dict = tomli.load(f) + except Exception as e: + raise Exception(f"Could not load project file: {project_file}: {e}") + + try: + with open(lock_file, "rb") as f: + lock_dict = tomli.load(f) + except Exception as e: + raise Exception(f"Could not load lock file: {lock_file}: {e}") + + lock_version = lock_dict.get("metadata", {}).get("lock_version") + if not lock_version: + raise LockfileIncompatibleException(f"Lock file at {lock_file} has no version") + if Version(lock_version) not in SUPPORTED_LOCK_VERSIONS: + raise LockfileIncompatibleException( + f"Lock file version {lock_version} not included in {SUPPORTED_LOCK_VERSIONS}" + ) + + requirements: List[Requirement] = [] + + default_dependencies = get_default_dependencies(project_dict) + optional_dependencies = get_optional_dependencies(project_dict) + development_dependencies = get_development_dependencies(project_dict) + + if default_group: + requirements.extend(default_dependencies) + + if all_optional_groups: + optional_groups = list(optional_dependencies) + + if all_development_groups: + development_groups = list(development_dependencies) + + for group_name in optional_groups: + if group_name not in optional_dependencies: + raise Exception(f"Non-existent optional dependency group: {group_name}") + requirements.extend(optional_dependencies[group_name]) + + for group_name in development_groups: + if group_name not in development_dependencies: + raise Exception(f"Non-existent development dependency group: {group_name}") + requirements.extend(development_dependencies[group_name]) + + pinned_package_specs: Dict[NormalizedName, Requirement] = {} + for req in requirements: + pin = package_canonical_name(req.name) + pinned_package_specs[pin] = req + + distinct_packages: Dict[PackageKey, PDMPackage] = {} + # Pull out all Package entries in a pdm-specific model. + for lock_pkg in lock_dict.get("package", []): + package_listed_name = lock_pkg["name"] + package_name = package_canonical_name(package_listed_name) + package_version = lock_pkg["version"] + package_requires_python = lock_pkg.get("requires_python", "") + package_extras = lock_pkg.get("extras", []) + + if package_requires_python == "*": + # Special case for all python versions + package_requires_python = "" + + dependencies = {Requirement(dep) for dep in lock_pkg.get("dependencies", [])} + files = {parse_file_info(f) for f in lock_pkg.get("files", [])} + is_local = "path" in lock_pkg and "files" not in lock_pkg + + package = PDMPackage( + name=package_name, + version=Version(package_version), + python_versions=SpecifierSet(package_requires_python), + dependencies=dependencies, + files=files, + is_local=is_local, + resolved_dependencies=set(), + extras=set(package_extras), + ) + if package.key in distinct_packages: + distinct_packages[package.key] = package.merge( + distinct_packages[package.key] + ) + else: + distinct_packages[package.key] = package + + all_packages = distinct_packages.values() + + # Next, group packages by their canonical name + packages_by_canonical_name: Dict[str, List[PDMPackage]] = defaultdict(list) + for package in all_packages: + packages_by_canonical_name[package.name].append(package) + + # And sort the packages by version in descending order (newest first) + for package_list in packages_by_canonical_name.values(): + package_list.sort(key=lambda p: p.version, reverse=True) + + # Next, iterate through each package's dependencies and find the newest one that matches. + # Construct a PackageDependency and store it. + for package in all_packages: + for dep in package.dependencies: + dependency_packages = packages_by_canonical_name[ + package_canonical_name(dep.name) + ] + for dep_pkg in dependency_packages: + if dep_pkg.satisfies(dep): + resolved = PackageDependency( + name=dep_pkg.name, + version=dep_pkg.version, + marker=str(dep.marker or ""), + ) + package.resolved_dependencies.add(resolved) + break + else: + raise MismatchedVersionException( + f"Found no packages to satisfy dependency (name={dep.name}, spec={dep.specifier})" + ) + + pinned_keys: Dict[NormalizedName, PackageKey] = {} + for pin, pin_spec in pinned_package_specs.items(): + pin_packages = packages_by_canonical_name[pin] + for pin_pkg in pin_packages: + if pin_spec.specifier.contains(pin_pkg.version, prereleases=True): + pinned_keys[pin] = pin_pkg.key + break + else: + raise MismatchedVersionException( + f"Found no packages to satisfy pin (name={pin}, spec={pin_spec})" + ) + + # Replace pins of local packages with pins of their dependencies. + # We may need to loop multiple times if local packages depend on one another. + while local_pins := [ + key for key in pinned_keys.values() if distinct_packages[key].is_local + ]: + for pin_key in local_pins: + pin_pkg = distinct_packages[pin_key] + pinned_keys.update( + {dep.name: dep.key for dep in pin_pkg.resolved_dependencies} + ) + del pinned_keys[pin_key.name] + + lock_packages: Dict[PackageKey, RawPackage] = {} + for package in all_packages: + if package.is_local: + _print_warn( + "Local package {} elided from pycross repo. It can still be referenced directly from the main repo.".format( + package.key + ) + ) + continue + lock_package = package.to_lock_package() + lock_packages[lock_package.key] = lock_package + + return RawLockSet( + packages=lock_packages, + pins=pinned_keys, + ) + + +def main(args: Any) -> None: + output = args.output + + lock_set = translate( + project_file=args.project_file, + lock_file=args.lock_file, + default_group=args.default_group, + optional_groups=args.optional_group, + all_optional_groups=args.all_optional_groups, + development_groups=args.development_group, + all_development_groups=args.all_development_groups, + ) + + if args.require_static_urls: + for pkg in lock_set.packages.values(): + for file in pkg.files: + if not file.urls: + raise LockfileNotStaticException( + "Lock file does not contain static urls. Please use --static-urls when creating the lockfile." + ) + + with open(output, "w") as f: + f.write(lock_set.to_json(indent=2)) + + +def parse_flags() -> Any: + parser = FlagFileArgumentParser(description="Generate pycross dependency bzl file.") + + parser.add_argument( + "--project-file", + type=Path, + required=True, + help="The path to pyproject.toml.", + ) + + parser.add_argument( + "--lock-file", + type=Path, + required=True, + help="The path to pdm.lock.", + ) + + parser.add_argument( + "--default-group", + action="store_true", + help="Whether to install dependencies from the default group.", + ) + + parser.add_argument( + "--optional-group", + action="append", + default=[], + help="Optional dependency groups to install.", + ) + + parser.add_argument( + "--all-optional-groups", + action="store_true", + help="Install all optional dependency groups.", + ) + + parser.add_argument( + "--development-group", + action="append", + default=[], + help="Development dependency groups to install.", + ) + + parser.add_argument( + "--all-development-groups", + action="store_true", + help="Install all development dependency groups.", + ) + + parser.add_argument( + "--require-static-urls", + action="store_true", + help="Require that the lock file provide static URLs.", + ) + + parser.add_argument( + "--output", + type=Path, + required=True, + help="The path to the output bzl file.", + ) + + return parser.parse_args() + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main(parse_flags()) diff --git a/python/private/pycross_staging/private/tools/poetry_translator.py b/python/private/pycross_staging/private/tools/poetry_translator.py new file mode 100644 index 0000000000..2fe7b5278b --- /dev/null +++ b/python/private/pycross_staging/private/tools/poetry_translator.py @@ -0,0 +1,296 @@ +import os +from collections import defaultdict +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Dict, List, Optional + +import tomli +from packaging.utils import ( + InvalidSdistFilename, + InvalidWheelFilename, + NormalizedName, + parse_sdist_filename, + parse_wheel_filename, +) +from packaging.version import Version +from poetry.core.constraints.version import Version as PoetryVersion +from poetry.core.constraints.version import parse_constraint +from poetry.core.version import markers +from pycross.private.tools.args import FlagFileArgumentParser +from pycross.private.tools.lock_model import ( + PackageDependency, + PackageFile, + PackageKey, + RawLockSet, + RawPackage, + package_canonical_name, +) + + +class MismatchedVersionException(Exception): + pass + + +@dataclass +class PoetryDependency: + name: str + spec: str + marker: Optional[str] + + @property + def constraint(self): + return parse_constraint(self.spec) + + @property + def marker_without_extra(self) -> Optional[str]: + parsed = markers.parse_marker(self.marker) + return str(parsed.without_extras()) + + def matches(self, other: "PoetryPackage") -> bool: + if package_canonical_name(self.name) != package_canonical_name(other.name): + return False + return self.constraint.allows(other.version) + + +@dataclass +class PoetryPackage: + name: NormalizedName + version: PoetryVersion + python_versions: str + dependencies: List[PoetryDependency] + files: List[PackageFile] + resolved_dependencies: List[PackageDependency] + + @property + def key(self) -> PackageKey: + return PackageKey.from_parts(self.name, Version(str(self.version))) + + @property + def pypa_version(self) -> Version: + return Version(str(self.version)) + + def to_lock_package(self) -> RawPackage: + return RawPackage( + name=self.name, + version=self.pypa_version, + python_versions=self.python_versions, + dependencies=sorted(self.resolved_dependencies, key=lambda p: p.key), + files=sorted(self.files, key=lambda f: f.name), + ) + + +def get_files_for_package( + files: List[PackageFile], + package_name: NormalizedName, + package_version: PoetryVersion, +) -> List[PackageFile]: + result = [] + for file in files: + try: + file_package_name, file_package_version, _, _ = parse_wheel_filename( + file.name + ) + except InvalidWheelFilename: + try: + file_package_name, file_package_version = parse_sdist_filename( + file.name + ) + except InvalidSdistFilename: + continue + + if file_package_name == package_name and str(file_package_version) == str( + package_version + ): + result.append(file) + + return result + + +def translate(project_file: Path, lock_file: Path) -> RawLockSet: + try: + with open(project_file, "rb") as f: + project_dict = tomli.load(f) + except Exception as e: + raise Exception(f"Could not load project file: {project_file}: {e}") + + try: + with open(lock_file, "rb") as f: + lock_dict = tomli.load(f) + except Exception as e: + raise Exception(f"Could not load lock file: {lock_file}: {e}") + + pinned_package_specs = {} + for pin, pin_info in ( + project_dict.get("tool", {}).get("poetry", {}).get("dependencies", {}) + ).items(): + pin = package_canonical_name(pin) + if pin == "python": + # Skip the special line indicating python version. + continue + if isinstance(pin_info, str): + pinned_package_specs[pin] = parse_constraint(pin_info) + else: + pinned_package_specs[pin] = parse_constraint(pin_info["version"]) + + def parse_file_info(file_info) -> PackageFile: + file_name = file_info["file"] + file_hash = file_info["hash"] + assert file_hash.startswith("sha256:") + return PackageFile(name=file_name, sha256=file_hash[7:]) + + # First, build a list of package files. + # There are scenarios when files for multiple versions of a package are present in the list. They'll be filtered + # later. + lock_files = lock_dict.get("metadata", {}).get("files", {}) + files_by_package_name = { + package_name: [parse_file_info(f) for f in files] + for package_name, files in lock_files.items() + } + + # Next, pull out all Package entries in a poetry-specific model. + poetry_packages: List[PoetryPackage] = [] + for lock_pkg in lock_dict.get("package", []): + package_listed_name = lock_pkg["name"] + package_name = package_canonical_name(package_listed_name) + package_version = lock_pkg["version"] + package_python_versions = lock_pkg["python-versions"] + + if package_python_versions == "*": + # Special case for all python versions + package_python_versions = "" + + dependencies = [] + for name, dep_list in lock_pkg.get("dependencies", {}).items(): + # In some cases the dependency is actually a list of alternatives, each with a different + # marker. Generally this is not the case, but we coerce a single entry into a list of 1. + if not isinstance(dep_list, list): + dep_list = [dep_list] + for dep in dep_list: + if isinstance(dep, str): + marker = None + spec = dep + else: + marker = dep.get("markers") + spec = dep.get("version") + + dependencies.append( + PoetryDependency(name=name, spec=spec, marker=marker) + ) + + # In older versions of poetry the list of files was held in a metadata section at the bottom of the poetry.lock file + # The lock file format now (as of 2022-12-16), has the files specified local to each dependency as another field. + # Here we will check for the files being present in the new location, and if not there we fall back to the older one. + files = [parse_file_info(f) for f in lock_pkg.get("files", [])] + if len(files) == 0: + files = files_by_package_name[package_listed_name] + + poetry_packages.append( + PoetryPackage( + name=package_name, + version=PoetryVersion.parse(package_version), + python_versions=package_python_versions, + dependencies=dependencies, + files=get_files_for_package( + files, + package_name, + package_version, + ), + resolved_dependencies=[], + ) + ) + + # Next, group poetry packages by their canonical name + packages_by_canonical_name: Dict[str, List[PoetryPackage]] = defaultdict(list) + for package in poetry_packages: + packages_by_canonical_name[package.name].append(package) + + # And sort the packages by version in descending order (newest first) + for package_list in packages_by_canonical_name.values(): + package_list.sort(key=lambda p: p.version, reverse=True) + + # Next, iterate through each package's dependencies and find the newest one that matches. + # Construct a PackageDependency and store it. + for package in poetry_packages: + for dep in package.dependencies: + dependency_packages = packages_by_canonical_name[ + package_canonical_name(dep.name) + ] + for dep_pkg in dependency_packages: + if dep.matches(dep_pkg): + resolved = PackageDependency( + name=dep_pkg.name, + version=dep_pkg.pypa_version, + marker=dep.marker_without_extra or "", + ) + package.resolved_dependencies.append(resolved) + break + else: + raise MismatchedVersionException( + f"Found no packages to satisfy dependency (name={dep.name}, spec={dep.spec})" + ) + + pinned_keys = {} + for pin, pin_spec in pinned_package_specs.items(): + pin_packages = packages_by_canonical_name[pin] + for pin_pkg in pin_packages: + if pin_spec.allows(pin_pkg.version): + pinned_keys[pin] = pin_pkg.key + break + else: + raise MismatchedVersionException( + f"Found no packages to satisfy pin (name={pin}, spec={pin_spec})" + ) + + lock_packages = {} + for package in poetry_packages: + lock_package = package.to_lock_package() + lock_packages[lock_package.key] = lock_package + + return RawLockSet( + packages=lock_packages, + pins=pinned_keys, + ) + + +def main(args: Any) -> None: + output = args.output + + lock_set = translate(args.project_file, args.lock_file) + + with open(output, "w") as f: + f.write(lock_set.to_json(indent=2)) + + +def parse_flags() -> Any: + parser = FlagFileArgumentParser(description="Generate pycross dependency bzl file.") + + parser.add_argument( + "--project-file", + type=Path, + required=True, + help="The path to pyproject.toml.", + ) + + parser.add_argument( + "--lock-file", + type=Path, + required=True, + help="The path to pdm.lock.", + ) + + parser.add_argument( + "--output", + type=Path, + required=True, + help="The path to the output bzl file.", + ) + + return parser.parse_args() + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main(parse_flags()) diff --git a/python/private/pycross_staging/private/tools/raw_lock_resolver.py b/python/private/pycross_staging/private/tools/raw_lock_resolver.py new file mode 100644 index 0000000000..275bf58461 --- /dev/null +++ b/python/private/pycross_staging/private/tools/raw_lock_resolver.py @@ -0,0 +1,585 @@ +import json +import operator +import os +from argparse import ArgumentParser +from collections import defaultdict +from dataclasses import dataclass, field +from functools import cached_property +from pathlib import Path +from typing import AbstractSet, Any, Dict, List, Optional, Set +from urllib.parse import urlparse + +from packaging.markers import Marker +from packaging.specifiers import SpecifierSet +from packaging.utils import NormalizedName, parse_wheel_filename +from packaging.version import Version +from pip._internal.index.package_finder import ( + CandidateEvaluator, + LinkEvaluator, + LinkType, +) +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.link import Link +from pycross.private.tools.args import FlagFileArgumentParser +from pycross.private.tools.lock_model import ( + EnvironmentReference, + FileKey, + FileReference, + PackageFile, + PackageKey, + RawLockSet, + RawPackage, + ResolvedLockSet, + ResolvedPackage, + is_wheel, + package_canonical_name, +) +from pycross.private.tools.target_environment import TargetEnv + + +@dataclass(frozen=True) +class PackageSource: + label: Optional[str] = None + file: Optional[PackageFile] = None + + def __post_init__(self): + assert ( + int(self.label is not None) + int(self.file is not None) == 1 + ), "Exactly one of label or file must be specified." + + @property + def file_reference(self) -> FileReference: + return FileReference( + label=self.label, + key=self.file.key if self.file is not None else None, + ) + + +@dataclass +class LabelAndTargetEnv: + label: str + target_environment: TargetEnv + + def to_environment_reference(self) -> EnvironmentReference: + return EnvironmentReference.from_target_env(self.label, self.target_environment) + + +class GenerationContext: + def __init__( + self, + target_environments: List[TargetEnv], + local_wheels: Dict[str, str], + remote_wheels: Dict[str, PackageFile], + always_include_sdist: bool, + ): + self.target_environments = target_environments + self.local_wheels = local_wheels + self.remote_wheels = remote_wheels + self.target_environments_by_name = { + tenv.name: tenv for tenv in target_environments + } + self.always_include_sdist = always_include_sdist + + def check_package_compatibility(self, package: RawPackage) -> None: + """Sanity check to make sure the requires_python attribute on each package matches our environments.""" + spec = SpecifierSet(package.python_versions or "") + for environment in self.target_environments: + if not spec.contains(environment.version): + raise Exception( + f"Package {package.name} does not support Python version {environment.version} " + f"in environment {environment.name}" + ) + + def get_dependencies_by_environment( + self, package: RawPackage, ignore_dependency_names: Set[str] + ) -> Dict[Optional[str], Set[PackageKey]]: + env_deps = defaultdict(set) + # We sort deps by version in descending order. In case the list of dependencies + # has multiple entries for the same name that match an environment, we prefer the + # latest version. + ordered_deps = sorted( + package.dependencies, key=operator.attrgetter("version"), reverse=True + ) + # Filter out dependencies that we've been told to ignore + if ignore_dependency_names: + ordered_deps = [ + d for d in ordered_deps if d.name not in ignore_dependency_names + ] + + for target in self.target_environments: + added_for_target = set() + for dep in ordered_deps: + # Only add each dependency once per target. + if dep.name in added_for_target: + continue + # If the dependency has no marker, just add it for each environment. + if not dep.marker: + env_deps[target.name].add(dep.key) + added_for_target.add(dep.name) + + # Otherwise, only add dependencies whose markers evaluate to the current target. + else: + marker = Marker(dep.marker) + if marker.evaluate(target.markers): + env_deps[target.name].add(dep.key) + added_for_target.add(dep.name) + + if env_deps: + # Pull out deps common to all environments + common_deps = set.intersection(*env_deps.values()) + env_deps_deduped = {} + for env, deps in env_deps.items(): + deps = deps - common_deps + if deps: + env_deps_deduped[env] = deps + + env_deps_deduped[None] = common_deps + return env_deps_deduped + + return {} + + def get_package_sources_by_environment( + self, package: RawPackage, source_only: bool = False + ) -> Dict[str, PackageSource]: + formats = ( + frozenset(["source"]) if source_only else frozenset(["source", "binary"]) + ) + environment_sources = {} + for environment in sorted( + self.target_environments, key=lambda tenv: tenv.name.lower() + ): + link_evaluator = LinkEvaluator( + project_name=package.name, + canonical_name=package.name, + formats=formats, + target_python=environment.target_python, + allow_yanked=True, + ignore_requires_python=True, + ) + + package_sources = {} + + # Start with the files defined in the input lock model + for file in package.files: + package_sources[file.name] = PackageSource(file=file) + + # Override per-file with given remote wheel URLs + for filename, remote_file in self.remote_wheels.items(): + name, version, _, _ = parse_wheel_filename(filename) + if (package.name, package.version) == (name, version): + package_sources[filename] = PackageSource(file=remote_file) + + # Override per-file with given local wheel labels + for filename, local_label in self.local_wheels.items(): + name, version, _, _ = parse_wheel_filename(filename) + if (package.name, package.version) == (name, version): + package_sources[filename] = PackageSource(label=local_label) + + candidates_to_package_sources = {} + for filename, package_source in package_sources.items(): + candidate = InstallationCandidate( + package.name, str(package.version), Link(filename) + ) + candidates_to_package_sources[candidate] = package_source + + candidates = [] + for candidate in candidates_to_package_sources: + link_type, _ = link_evaluator.evaluate_link(candidate.link) + if link_type == LinkType.candidate: + candidates.append(candidate) + + candidate_evaluator = CandidateEvaluator.create( + package.name, environment.target_python + ) + compute_result = candidate_evaluator.compute_best_candidate(candidates) + if compute_result.best_candidate: + environment_sources[environment.name] = candidates_to_package_sources[ + compute_result.best_candidate + ] + + return environment_sources + + +@dataclass +class PackageAnnotations: + build_dependencies: List[PackageKey] = field(default_factory=list) + build_target: Optional[str] = None + always_build: bool = False + ignore_dependencies: Set[str] = field(default_factory=set) + install_exclude_globs: Set[str] = field(default_factory=set) + + +class PackageResolver: + def __init__( + self, + package: RawPackage, + context: GenerationContext, + annotations: Optional[PackageAnnotations], + ): + annotations = annotations or PackageAnnotations() # Default to an empty set + + self.key = package.key + self.package_name = package.name + self.uses_sdist = False + + self._build_deps = annotations.build_dependencies + self._build_target = annotations.build_target + self._install_exclude_globs = annotations.install_exclude_globs + + deps_by_env = context.get_dependencies_by_environment( + package, + annotations.ignore_dependencies, + ) + self._common_deps = deps_by_env.get(None, set()) + self._env_deps = {k: v for k, v in deps_by_env.items() if k is not None} + + self._package_sources_by_env = context.get_package_sources_by_environment( + package, + annotations.always_build, + ) + + used_package_sources = set(self._package_sources_by_env.values()) + + # Figure out if environments require an sdist (build from source). + sdist_file_key = None + for package_source in used_package_sources: + if package_source.file and package_source.file.is_sdist: + sdist_file_key = package_source.file.key + self.uses_sdist = True + break + + # If we didn't find an sdist in environment sources but + # always_include_sdist is enabled, search all of the package's files. + if not sdist_file_key and context.always_include_sdist: + for file in package.files: + if file.is_sdist: + sdist_file_key = file.key + used_package_sources.add(PackageSource(file=file)) + + self.sdist_file = FileReference(key=sdist_file_key) if sdist_file_key else None + self.package_sources = frozenset(used_package_sources) + + @cached_property + def all_dependency_keys(self) -> Set[PackageKey]: + """Returns all package keys (name-version) that this target depends on, + including platform-specific and build dependencies.""" + keys = set(self._common_deps) + for env_deps in self._env_deps.values(): + keys |= env_deps + keys |= set(self._build_deps) + return keys + + def to_resolved_package(self) -> ResolvedPackage: + return ResolvedPackage( + key=self.key, + build_dependencies=sorted(self._build_deps), + common_dependencies=sorted(self._common_deps), + environment_dependencies={ + env: sorted(deps) for env, deps in sorted(self._env_deps.items()) + }, + environment_files={ + env: ps.file_reference + for env, ps in sorted(self._package_sources_by_env.items()) + }, + build_target=self._build_target, + sdist_file=self.sdist_file, + install_exclude_globs=list(self._install_exclude_globs), + ) + + +def url_wheel_name(url: str) -> str: + # Returns the wheel filename given a url. No magic here; just take the last component of the URL path. + parsed = urlparse(url) + filename = os.path.basename(parsed.path) + assert filename, f"Could not determine wheel filename from url: {url}" + assert is_wheel(filename), f"Filename is not a wheel: {url}" + return filename + + +def resolve_single_version( + name: str, + versions_by_name: Dict[NormalizedName, List[PackageKey]], + all_versions: AbstractSet[PackageKey], + attr_name: str, +) -> PackageKey: + # Handle the case of an exact version being specified. + if "@" in name: + name_part, version_part = name.split("@", maxsplit=1) + key = PackageKey.from_parts( + package_canonical_name(name_part), Version(version_part) + ) + if key not in all_versions: + raise Exception(f'{attr_name} entry "{name}" matches no packages') + return key + + options = versions_by_name.get(package_canonical_name(name)) + if not options: + raise Exception(f'{attr_name} entry "{name}" matches no packages') + + if len(options) > 1: + raise Exception( + f'{attr_name} entry "{name}" matches multiple packages (choose one): {sorted(options)}' + ) + + return options[0] + + +def collect_package_annotations( + args: Any, lock_model: RawLockSet +) -> Dict[PackageKey, PackageAnnotations]: + annotations: Dict[PackageKey, PackageAnnotations] = defaultdict(PackageAnnotations) + all_package_keys_by_canonical_name: Dict[ + NormalizedName, List[PackageKey] + ] = defaultdict(list) + for package in lock_model.packages.values(): + all_package_keys_by_canonical_name[package.name].append(package.key) + + with open(args.annotations_file, "r") as f: + annotations_data = json.load(f) + + for pkg, annotation in annotations_data.items(): + resolved_pkg = resolve_single_version( + pkg, + all_package_keys_by_canonical_name, + lock_model.packages.keys(), + "annotations", + ) + + for dep in annotation.get("build_dependencies", []): + resolved_dep = resolve_single_version( + dep, + all_package_keys_by_canonical_name, + lock_model.packages.keys(), + "build_dependencies", + ) + annotations[resolved_pkg].build_dependencies.append(resolved_dep) + + if annotation.get("build_target"): + annotations[resolved_pkg].build_target = annotation["build_target"] + + if annotation.get("always_build"): + annotations[resolved_pkg].always_build = True + + for dep in annotation.get("ignore_dependencies", []): + if ( + dep not in all_package_keys_by_canonical_name + and dep not in lock_model.packages.keys() + ): + raise Exception( + f'package_ignore_dependencies entry "{dep}" matches no packages' + ) + + # This dependency will be resolved to a single version later + annotations[resolved_pkg].ignore_dependencies.add(dep) + + for glob in annotation.get("install_exclude_globs", []): + annotations[resolved_pkg].install_exclude_globs.add(glob) + + # Return as a non-default dict + return dict(annotations) + + +def resolve(args: Any) -> ResolvedLockSet: + environment_pairs: List[LabelAndTargetEnv] = [] + for target_environment in args.target_environment or []: + target_file, target_label = target_environment + with open(target_file, "r") as f: + environment_pairs.append( + LabelAndTargetEnv( + label=target_label, + target_environment=TargetEnv.from_dict(json.load(f)), + ) + ) + environment_pairs.sort(key=lambda x: x.target_environment.name.lower()) + environments = [ep.target_environment for ep in environment_pairs] + + local_wheels = {} + for local_wheel in args.local_wheel or []: + filename, label = local_wheel + assert is_wheel(filename), f"Local label is not a wheel: {label}" + local_wheels[filename] = label + + remote_wheels = {} + for remote_wheel in args.remote_wheel or []: + url, sha256 = remote_wheel + filename = url_wheel_name(url) + remote_wheels[filename] = PackageFile(name=filename, sha256=sha256, urls=(url,)) + + context = GenerationContext( + target_environments=environments, + local_wheels=local_wheels, + remote_wheels=remote_wheels, + always_include_sdist=args.always_include_sdist, + ) + + with open(args.lock_model_file, "r") as f: + data = f.read() + lock_model = RawLockSet.from_json(data) + + # Collect package "annotations" + annotations = collect_package_annotations(args, lock_model) + + # Walk the dependency graph starting from the set if pinned packages (in pyproject.toml), computing the + # transitive closure. + work = list(lock_model.pins.values()) + packages_by_package_key: Dict[PackageKey, PackageResolver] = {} + + while work: + next_package_key = work.pop() + if next_package_key in packages_by_package_key: + continue + package = lock_model.packages[next_package_key] + context.check_package_compatibility(package) + entry = PackageResolver( + package, + context, + annotations.pop(next_package_key, None), + ) + packages_by_package_key[next_package_key] = entry + work.extend(entry.all_dependency_keys) + + # The annotations dict should be empty now; if not, annotations were specified + # for packages that are not actually part of our final set. + if annotations: + raise Exception( + f"Annotations specified for packages that are not part of the locked set: " + f'{", ".join([str(key) for key in sorted(annotations.keys())])}' + ) + + resolved_packages = sorted(packages_by_package_key.values(), key=lambda x: x.key) + + # If builds are disallowed, ensure that none of the targets include an sdist build + if args.disallow_builds: + builds = [] + for package in resolved_packages: + if package.uses_sdist: + builds.append(package.key) + if builds: + raise Exception( + "Builds are disallowed, but the following would include pycross_wheel_build targets: " + f"{', '.join(builds)}" + ) + + repos: Dict[FileKey, PackageFile] = {} + for package_target in resolved_packages: + for source in package_target.package_sources: + if not source.file: + continue + repos[source.file.key] = source.file + + repos = dict(sorted(repos.items())) + + def pin_name(name: str) -> NormalizedName: + return package_canonical_name(name) + + pins = {pin_name(k): v for k, v in lock_model.pins.items()} + if args.default_alias_single_version: + packages_by_pin_name = defaultdict(list) + for package_target in resolved_packages: + packages_by_pin_name[pin_name(package_target.package_name)].append( + package_target.key + ) + + for package_pin_name, packages in packages_by_pin_name.items(): + if package_pin_name in pins: + continue + if len(packages) > 1: + continue + pins[package_pin_name] = packages[0] + + resolved_environments = { + env.target_environment.name: env.to_environment_reference() + for env in environment_pairs + } + resolved_packages = { + pkg.key: pkg.to_resolved_package() for pkg in resolved_packages + } + + return ResolvedLockSet( + environments=resolved_environments, + packages=resolved_packages, + pins=pins, + remote_files=repos, + ) + + +def add_shared_flags(parser: ArgumentParser) -> None: + parser.add_argument( + "--lock-model-file", + type=Path, + required=True, + help="The path to the lock model JSON file.", + ) + + parser.add_argument( + "--target-environment", + nargs=2, + action="append", + help="A (file, label) parameter that maps a pycross_target_environment label to its JSON output file.", + ) + + parser.add_argument( + "--local-wheel", + nargs=2, + action="append", + help="A (file, label) parameter that points to a wheel file in the local repository.", + ) + + parser.add_argument( + "--remote-wheel", + nargs=2, + action="append", + help="A (url, sha256) parameter that points to a remote wheel.", + ) + + parser.add_argument( + "--default-alias-single-version", + action="store_true", + help="Generate aliases for all packages with single versions.", + ) + + parser.add_argument( + "--disallow-builds", + action="store_true", + help="If set, an error is raised if the generated lock contains wheel build targets.", + ) + + parser.add_argument( + "--always-include-sdist", + action="store_true", + help="If set, always include a package's sdist if one exists.", + ) + + parser.add_argument( + "--annotations-file", + type=Path, + help="The path to the annotations JSON file.", + ) + + +def parse_flags() -> Any: + parser = FlagFileArgumentParser(description="Generate a resolved lock structure.") + + add_shared_flags(parser) + parser.add_argument( + "--output", + type=Path, + required=True, + help="The path to the output JSON file.", + ) + + return parser.parse_args() + + +def main(args: Any) -> None: + result = resolve(args) + with open(args.output, "w") as f: + f.write(result.to_json(indent=2)) + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main(parse_flags()) diff --git a/python/private/pycross_staging/private/tools/repairwheel/BUILD.bazel b/python/private/pycross_staging/private/tools/repairwheel/BUILD.bazel new file mode 100644 index 0000000000..f2ac453d77 --- /dev/null +++ b/python/private/pycross_staging/private/tools/repairwheel/BUILD.bazel @@ -0,0 +1,14 @@ +load("@rules_pycross_internal//:python.bzl", "py_binary") + +package(default_visibility = ["//visibility:private"]) + +py_binary( + name = "repair_wheel_hook", + srcs = [ + "repair_wheel_hook.py", + ], + visibility = ["//pycross/hooks:__pkg__"], + deps = [ + "@rules_pycross_internal//deps:repairwheel", + ], +) diff --git a/python/private/pycross_staging/private/tools/repairwheel/repair_wheel_hook.py b/python/private/pycross_staging/private/tools/repairwheel/repair_wheel_hook.py new file mode 100644 index 0000000000..168abd30bd --- /dev/null +++ b/python/private/pycross_staging/private/tools/repairwheel/repair_wheel_hook.py @@ -0,0 +1,34 @@ +import os +import subprocess +import sys +from pathlib import Path + + +def main() -> None: + lib_path_env = os.environ["PYCROSS_LIBRARY_PATH"] + lib_path = [Path(p) for p in lib_path_env.split(os.pathsep)] + wheel_file = Path(os.environ["PYCROSS_WHEEL_FILE"]) + output_dir = Path(os.environ["PYCROSS_WHEEL_OUTPUT_ROOT"]) + + args = [ + sys.executable, + "-m", + "repairwheel", + str(wheel_file), + "--output-dir", + str(output_dir), + "--no-sys-paths", + ] + + for lp in lib_path: + args.extend(["--lib-dir", str(lp)]) + + subprocess.check_call(args, env=os.environ) + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main() diff --git a/python/private/pycross_staging/private/tools/resolved_lock_renderer.py b/python/private/pycross_staging/private/tools/resolved_lock_renderer.py new file mode 100644 index 0000000000..e97043dc92 --- /dev/null +++ b/python/private/pycross_staging/private/tools/resolved_lock_renderer.py @@ -0,0 +1,716 @@ +from __future__ import annotations + +import json +import os +import textwrap +from argparse import ArgumentParser +from collections import defaultdict +from dataclasses import dataclass +from functools import cached_property +from pathlib import Path +from typing import Any, Dict, Iterator, List, Optional, Set, TextIO, Union + +from pycross.private.tools.args import FlagFileArgumentParser +from pycross.private.tools.lock_model import ( + ConfigSetting, + FileKey, + FileReference, + PackageFile, + PackageKey, + ResolvedLockSet, + ResolvedPackage, + package_canonical_name, +) + + +def ind(text: str, tabs=1): + """Indent text with the given number of tabs.""" + return textwrap.indent(text, " " * tabs) + + +def quoted_str(text: str) -> str: + """Return text wrapped in double quotes.""" + return json.dumps(text) + + +def sanitized(name: str) -> str: + return name.lower().replace("-", "_").replace("@", "_").replace("+", "_") + + +def prefixed(name: str, prefix: Optional[str]): + if not prefix: + return name + # Strip any trailing underscores from the provided prefix, first, then add one of our own. + return prefix.rstrip("_") + "_" + name + + +@dataclass(frozen=True) +class TargetRef: + """A reference to a target, able to generate a label.""" + + target: str + package: Optional[str] = None + repo: Optional[str] = None + + def __post_init__(self): + if self.repo is not None: + if self.package is None: + raise ValueError("package must be specified with repo") + + @cached_property + def label(self): + repo_part = f"@{self.repo}" if self.repo is not None else "" + package_part = f"//{self.package}" if self.package is not None else "" + target_part = f":{self.target}" + # Handle special case target shorthand + if package_part: + _, last_component = package_part.rsplit("/", 1) + if last_component == self.target: + target_part = "" + + return repo_part + package_part + target_part + + +@dataclass(frozen=True) +class QualifiedTargetRef(TargetRef): + """A TargetRef where all components are required.""" + + package: str + repo: str + + +class Naming: + def __init__( + self, + repo_prefix: Optional[str], + target_environment_select: str, + ): + self.repo_prefix = repo_prefix + self.target_environment_select = target_environment_select + + def package(self, package_key: PackageKey) -> TargetRef: + return TargetRef(str(package_key)) + + def environment(self, environment_name: str) -> TargetRef: + return TargetRef(prefixed(environment_name, "_env")) + + def wheel_build(self, package_key: PackageKey) -> TargetRef: + return TargetRef(prefixed(str(package_key), "_build")) + + def wheel(self, package_key: PackageKey) -> TargetRef: + return TargetRef(prefixed(str(package_key), "_wheel")) + + def sdist(self, package_key: PackageKey) -> TargetRef: + return TargetRef(prefixed(str(package_key), "_sdist")) + + def repo_file(self, file: PackageFile) -> QualifiedTargetRef: + name = file.name + for extension in [".tar.gz", ".zip", ".whl"]: + if name.endswith(extension): + name = name[: -len(extension)] + break + typ = "sdist" if file.is_sdist else "wheel" + repo = f"{self.repo_prefix}_{typ}_{sanitized(name)}" + return QualifiedTargetRef(repo=repo, package="file", target="file") + + +class EnvTarget: + def __init__(self, environment_name: str, setting: ConfigSetting, naming: Naming): + self.naming = naming + self.environment_name = environment_name + self.setting = setting + + def render(self) -> str: + lines = [ + "native.config_setting(", + ind(f'name = "{self.naming.environment(self.environment_name).target}",'), + ] + if self.setting.constraint_values: + lines.append(ind("constraint_values = [")) + for cv in self.setting.constraint_values: + lines.append(ind(f"{quoted_str(cv)},", 2)) + lines.append(ind("],")) + if self.setting.flag_values: + lines.append( + ind("flag_values = {"), + ) + for flag, value in self.setting.flag_values.items(): + lines.append(ind(f"{quoted_str(flag)}: {quoted_str(value)},", 2)) + lines.append(ind("},")) + lines.append(")") + + return "\n".join(lines) + + +class EnvAliasTarget: + def __init__( + self, environment_name: str, config_setting_target: str, naming: Naming + ): + self.naming = naming + self.environment_name = environment_name + self.config_setting_target = config_setting_target + + def render(self) -> str: + lines = [ + "native.alias(", + ind( + f"name = {quoted_str(self.naming.environment(self.environment_name).target)}," + ), + ind(f"actual = {quoted_str(self.config_setting_target)},"), + ")", + ] + return "\n".join(lines) + + +class PackageTarget: + def __init__( + self, + package: ResolvedPackage, + file_labels: Dict[FileKey, str], + naming: Naming, + ): + self.package = package + self.file_labels = file_labels + self.naming = naming + + @cached_property + def _sdist_label(self) -> Optional[str]: + if self.package.sdist_file: + key = self.package.sdist_file.key + if key is not None and key.is_sdist: + return self.file_labels[key] + + @property + def _has_runtime_deps(self) -> bool: + return bool( + self.package.common_dependencies or self.package.environment_dependencies + ) + + @property + def _has_build_deps(self) -> bool: + return bool(self.package.build_dependencies) + + @property + def _has_sdist(self) -> bool: + return self._sdist_label is not None + + @cached_property + def _needs_generated_build_target(self) -> bool: + if self.package.build_target: + return False + for f in self.package.environment_files.values(): + if f.key and f.key.is_sdist: + return True + return False + + @property + def imports(self) -> Set[str]: + if self._has_sdist and not self.package.build_target: + return {"pycross_wheel_build", "pycross_wheel_library"} + else: + return {"pycross_wheel_library"} + + def _common_entries(self, deps: List[PackageKey], indent: int) -> Iterator[str]: + for dep in deps: + yield ind(f'"{self.naming.package(dep).label}",', indent) + + def _select_entries( + self, env_deps: Dict[str, List[PackageKey]], indent + ) -> Iterator[str]: + for env_name, deps in env_deps.items(): + yield ind(f'"{self.naming.environment(env_name).label}": [', indent) + yield from self._common_entries(deps, indent + 1) + yield ind("],", indent) + yield ind('"//conditions:default": [],', indent) + + @cached_property + def _deps_name(self): + key_str = str(self.package.key) + sanitized = ( + key_str.replace("-", "_") + .replace(".", "_") + .replace("@", "_") + .replace("+", "_") + ) + return f"_{sanitized}_deps" + + @cached_property + def _build_deps_name(self): + key_str = str(self.package.key) + sanitized = ( + key_str.replace("-", "_") + .replace(".", "_") + .replace("@", "_") + .replace("+", "_") + ) + return f"_{sanitized}_build_deps" + + def _render_runtime_deps(self) -> str: + lines = [] + + if self.package.common_dependencies and self.package.environment_dependencies: + lines.append(f"{self._deps_name} = [") + lines.extend(self._common_entries(self.package.common_dependencies, 1)) + lines.append("] + select({") + lines.extend(self._select_entries(self.package.environment_dependencies, 1)) + lines.append("})") + + elif self.package.common_dependencies: + lines.append(f"{self._deps_name} = [") + lines.extend(self._common_entries(self.package.common_dependencies, 1)) + lines.append("]") + + elif self.package.environment_dependencies: + lines.append(self._deps_name + " = select({") + lines.extend(self._select_entries(self.package.environment_dependencies, 1)) + lines.append("})") + + return "\n".join(lines) + + def _render_build_deps(self) -> str: + lines = [f"{self._build_deps_name} = ["] + for dep in sorted( + self.package.build_dependencies, key=lambda k: self.naming.package(k).label + ): + lines.append(ind(f'"{self.naming.package(dep).label}",', 1)) + lines.append("]") + + return "\n".join(lines) + + def _render_sdist(self) -> str: + sdist_label = self._sdist_label + assert self._sdist_label + + lines = [ + "native.alias(", + ind(f'name = "{self.naming.sdist(self.package.key).target}",'), + ind(f'actual = "{sdist_label}",'), + ")", + ] + + return "\n".join(lines) + + def _render_build(self) -> str: + assert self._has_sdist + + lines = [ + "pycross_wheel_build(", + ind(f'name = "{self.naming.wheel_build(self.package.key).target}",'), + ind(f'sdist = "{self.naming.sdist(self.package.key).label}",'), + ind(f"target_environment = {self.naming.target_environment_select},"), + ] + + dep_names = [] + if self._has_runtime_deps: + dep_names.append(self._deps_name) + if self._has_build_deps: + dep_names.append(self._build_deps_name) + + if dep_names: + lines.append(ind(f"deps = {' + '.join(dep_names)},")) + lines.extend( + [ + ind('tags = ["manual"],'), + ")", + ] + ) + + return "\n".join(lines) + + def _render_wheel(self) -> str: + lines = [ + "native.alias(", + ind(f'name = "{self.naming.wheel(self.package.key).target}",'), + ] + # Add the wheel alias target. + # If all environments use the same wheel, don't use select. + + def wheel_target(file_ref: FileReference) -> str: + if file_ref.label: + return file_ref.label + + assert file_ref.key + if file_ref.key.is_wheel: + return self.file_labels[file_ref.key] + elif self.package.build_target: + return self.package.build_target + else: + return self.naming.wheel_build(self.package.key).label + + distinct_file_refs = set(self.package.environment_files.values()) + if len(distinct_file_refs) == 1: + source = next(iter(distinct_file_refs)) + lines.append(ind(f'actual = "{wheel_target(source)}",')) + else: + lines.append(ind("actual = select({")) + for env_name, ref in self.package.environment_files.items(): + lines.append( + ind( + f'"{self.naming.environment(env_name).label}": "{wheel_target(ref)}",', + 2, + ) + ) + lines.append(ind("}),")) + + lines.append(")") + + return "\n".join(lines) + + def _render_pkg(self) -> str: + lines = [ + "pycross_wheel_library(", + ind(f'name = "{self.naming.package(self.package.key).target}",'), + ] + if self._has_runtime_deps: + lines.append(ind(f"deps = {self._deps_name},")) + + lines.append(ind(f'wheel = "{self.naming.wheel(self.package.key).label}",')) + + if self.package.install_exclude_globs: + lines.append(ind("install_exclude_globs = [")) + for install_exclude_glob in self.package.install_exclude_globs: + lines.append(ind(f'"{install_exclude_glob}",', 2)) + lines.append(ind("],")) + + lines.append(")") + + return "\n".join(lines) + + def render(self) -> str: + parts = [] + if self._has_runtime_deps: + parts.append(self._render_runtime_deps()) + parts.append("") + if self._has_sdist: + parts.append(self._render_sdist()) + parts.append("") + if self._needs_generated_build_target: + if self.package.build_dependencies: + parts.append(self._render_build_deps()) + parts.append("") + parts.append(self._render_build()) + parts.append("") + parts.append(self._render_wheel()) + parts.append("") + parts.append(self._render_pkg()) + return "\n".join(parts) + + +class UrlRepoTarget: + def __init__(self, name: str, file: PackageFile): + assert ( + file.urls + ), "UrlWheelRepoTarget requires a PackageFile with one or more URLs" + self.name = name + self.file = file + + @property + def imports(self) -> Set[str]: + return {"maybe", "http_file"} + + def render(self) -> str: + parts = [] + parts.extend( + [ + "maybe(", + ind("http_file,"), + ind(f'name = "{self.name}",'), + ind("urls = ["), + ] + ) + + urls = sorted(self.file.urls or []) + for url in urls: + parts.append(ind(f'"{url}",', 2)) + + parts.extend( + [ + ind("],"), + ind(f'sha256 = "{self.file.sha256}",'), + ind(f'downloaded_file_path = "{self.file.name}",'), + ")", + ] + ) + + return "\n".join(parts) + + +class PypiFileRepoTarget: + def __init__(self, name: str, file: PackageFile, pypi_index: Optional[str]): + self.name = name + self.file = file + self.pypi_index = pypi_index + + @property + def imports(self) -> Set[str]: + return {"maybe", "pypi_file"} + + def render(self) -> str: + lines = [ + "maybe(", + ind("pypi_file,"), + ind(f'name = "{self.name}",'), + ind(f'package_name = "{self.file.package_name}",'), + ind(f'package_version = "{self.file.package_version}",'), + ind(f'filename = "{self.file.name}",'), + ind(f'sha256 = "{self.file.sha256}",'), + ] + + if self.pypi_index: + lines.append(ind(f'index = "{self.pypi_index}",')) + + lines.append(")") + + return "\n".join(lines) + + +def gen_load_statements(imports: Set[str], pycross_repo: str) -> List[str]: + possible_imports = { + "http_file": "@bazel_tools//tools/build_defs/repo:http.bzl", + "maybe": "@bazel_tools//tools/build_defs/repo:utils.bzl", + "pycross_wheel_build": f"{pycross_repo}//pycross:defs.bzl", + "pycross_wheel_library": f"{pycross_repo}//pycross:defs.bzl", + "pypi_file": f"{pycross_repo}//pycross:defs.bzl", + } + + load_statement_groups = defaultdict(list) + for i in imports: + load_statement_groups[possible_imports[i]].append(i) + + # External repo loads come before local loads. + sorted_files = sorted( + load_statement_groups, key=lambda f: (0 if f.startswith("@") else 1, f) + ) + + lines = [] + for file in sorted_files: + file_imports = load_statement_groups[file] + lines.append( + f"load({quoted_str(file)}, {', '.join(quoted_str(i) for i in sorted(file_imports))})" + ) + + return lines + + +def render(resolved_lock: ResolvedLockSet, args: Any, output: TextIO) -> None: + naming = Naming( + repo_prefix=args.repo_prefix, + target_environment_select="_target", + ) + + pypi_index = args.pypi_index or None + + repo_labels = {FileKey(key): label for key, label in (args.repo or [])} + repo_targets: List[Union[PypiFileRepoTarget, UrlRepoTarget]] = [] + + for file_key, file in resolved_lock.remote_files.items(): + if file_key in repo_labels: + continue + + target = naming.repo_file(file) + name = target.repo + repo_labels[file_key] = target.label + + if file.urls: + repo_targets.append(UrlRepoTarget(name, file)) + else: + repo_targets.append(PypiFileRepoTarget(name, file, pypi_index)) + + repo_targets.sort(key=lambda rt: rt.name) + + package_targets = [ + PackageTarget( + package=p, + file_labels=repo_labels, + naming=naming, + ) + for p in resolved_lock.packages.values() + ] + + # pin aliases follow the standard package normalization rules. + # https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization + def pin_name(name: str) -> str: + return package_canonical_name(name) + + pins = {pin_name(k): v for k, v in resolved_lock.pins.items()} + + # Figure out which load statements we need. + imports = set() + for p in package_targets: + imports.update(p.imports) + for r in repo_targets: + imports.update(r.imports) + load_statements = gen_load_statements(imports, args.pycross_repo_name) + + def w(*text): + if not text: + text = [""] + for t in text: + print(t, file=output) + + w( + "# This file is generated by rules_pycross.", + "# It is not intended for manual editing.", + '"""Pycross-generated dependency targets."""', + "", + ) + if load_statements: + w(*load_statements) + w() + + # Build PINS map + if not args.no_pins: + if pins: + w("PINS = {") + for pinned_package_name in sorted(pins.keys()): + pinned_package_key = pins[pinned_package_name] + w( + ind( + f"{quoted_str(pinned_package_name)}: {quoted_str(naming.package(pinned_package_key).target)}," + ) + ) + w("}") + w() + else: + w("PINS = {}") + w() + + if args.generate_file_map: + if repo_targets: + w("FILES = {") + for repo in repo_targets: + label = f"@{repo.name}//file:{repo.file.name}" + w(ind(f"{quoted_str(repo.file.name)}: Label({quoted_str(label)}),")) + w("}") + w() + else: + w("FILES = {}") + w() + + # Build targets + w( + "# buildifier: disable=unnamed-macro", + "def targets():", + ind('"""Generated package targets."""'), + "", + ) + + if not args.no_pins: + # Create pin aliases based on the PINS dict above. + w( + ind("for pin_name, pin_target in PINS.items():", 1), + ind("native.alias(", 2), + ind("name = pin_name,", 3), + ind('actual = ":" + pin_target,', 3), + ind(")", 2), + ) + w() + + for env_name, env_ref in resolved_lock.environments.items(): + if env_ref.config_setting_label: + env_target = EnvAliasTarget(env_name, env_ref.config_setting_label, naming) + else: + assert env_ref.config_setting + env_target = EnvTarget(env_name, env_ref.config_setting, naming) + w(ind(env_target.render())) + w() + + w(ind("# buildifier: disable=unused-variable")) + w(ind(f"{naming.target_environment_select} = select({{")) + for env_name, env_ref in resolved_lock.environments.items(): + w( + ind( + f'"{naming.environment(env_name).label}": "{env_ref.environment_label}",', + 2, + ) + ) + w(ind("})")) + + for e in package_targets: + w() + w(ind(e.render())) + + # Repos + w( + "", + "# buildifier: disable=unnamed-macro", + "def repositories():", + ind('"""Generated package repositories."""'), + ) + for r in repo_targets: + w() + w(ind(r.render())) + + +def add_shared_flags(parser: ArgumentParser) -> None: + parser.add_argument( + "--repo", + nargs=2, + action="append", + help="A (file_key, label) parameter that maps a FileKey to a label that provides it.", + ) + + parser.add_argument( + "--repo-prefix", + type=str, + default="", + help="The prefix to apply to repository targets.", + ) + + parser.add_argument( + "--pypi-index", + help="The PyPI-compatible index to use. Defaults to pypi.org.", + ) + + parser.add_argument( + "--generate-file-map", + action="store_true", + help="Generate a FILES dict containing a mapping of filenames to repo labels.", + ) + + parser.add_argument( + "--pycross-repo-name", + default="@rules_pycross", + help="Our own repo name.", + ) + + parser.add_argument( + "--no-pins", + action="store_true", + help="Don't create pinned alias targets.", + ) + + +def parse_flags() -> Any: + parser = FlagFileArgumentParser(description="Generate pycross dependency bzl file.") + + add_shared_flags(parser) + parser.add_argument( + "--resolved-lock", + type=Path, + required=True, + help="The path to the resolved lock structure.", + ) + parser.add_argument( + "--output", + type=Path, + required=True, + help="The path to the output bzl file.", + ) + + return parser.parse_args() + + +def main(args: Any) -> None: + with open(args.resolved_lock, "r") as f: + resolved_lock = ResolvedLockSet.from_json(f.read()) + with open(args.output, "w") as f: + render(resolved_lock, args, f) + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main(parse_flags()) diff --git a/python/private/pycross_staging/private/tools/target_environment.py b/python/private/pycross_staging/private/tools/target_environment.py new file mode 100644 index 0000000000..025c4da776 --- /dev/null +++ b/python/private/pycross_staging/private/tools/target_environment.py @@ -0,0 +1,213 @@ +""" +Stuff to define a target Python environment. + +See https://peps.python.org/pep-0508/#environment-markers +""" +from dataclasses import asdict, dataclass +from typing import Any, Dict, List, Optional + +from pip._internal.models.target_python import TargetPython + + +@dataclass(frozen=True) +class TargetEnv: + name: str + implementation: str + version: str + abis: List[str] + platforms: List[str] + compatibility_tags: List[str] + markers: Dict[str, str] + python_compatible_with: List[str] + flag_values: Dict[str, str] + config_setting_target: Optional[str] = None + + @staticmethod + def from_target_python( + name: str, + target_python: TargetPython, + markers: Dict[str, str], + python_compatible_with: List[str], + flag_values: Dict[str, str], + config_setting_target: Optional[str] = None, + ) -> "TargetEnv": + all_markers = guess_environment_markers(target_python) + for key, val in markers.items(): + if key not in all_markers: + raise ValueError(f"Invalid marker: {key}") + all_markers[key] = val + + return TargetEnv( + name=name, + implementation=target_python.implementation or "py", + version=".".join((str(i) for i in target_python.py_version_info)), + abis=target_python.abis or [], + platforms=target_python.platforms or [], + compatibility_tags=[str(t) for t in target_python.get_sorted_tags()], + markers=all_markers, + python_compatible_with=python_compatible_with, + flag_values=flag_values, + config_setting_target=config_setting_target, + ) + + @property + def target_python(self) -> TargetPython: + return TargetPython( + platforms=self.platforms, + py_version_info=tuple(int(p) for p in self.version.split(".")[:3]), + abis=self.abis, + implementation=self.implementation, + ) + + @staticmethod + def from_dict(data: Dict[str, Any]) -> "TargetEnv": + return TargetEnv(**data) + + def to_dict(self) -> Dict[str, Any]: + return asdict(self) + + +def normalize_os(py: TargetPython) -> str: + for platform in py.platforms or []: + if platform.startswith("linux"): + return "linux" + elif platform.startswith("manylinux"): + return "linux" + elif platform.startswith("macos"): + return "darwin" + elif platform.startswith("win"): + return "windows" + return "" + + +def normalize_arch(py: TargetPython) -> str: + for platform in py.platforms or []: + if "x86_64" in platform: + return "x86_64" + elif "amd64" in platform: + return "x86_64" + elif "aarch64" in platform: + return "aarch64" + elif "arm64" in platform: + return "aarch64" + elif "x86" in platform: + return "x86" + elif "i386" in platform: + return "x86" + elif "i686" in platform: + return "x86" + elif platform == "win32": + return "x86" + return "" + + +def guess_os_name(py: TargetPython) -> str: + return { + "linux": "posix", + "darwin": "posix", + "windows": "nt", + }.get(normalize_os(py), "") + + +def guess_sys_platform(py: TargetPython) -> str: + return { + "linux": "linux", + "darwin": "darwin", + "windows": "win32", + }.get(normalize_os(py), "") + + +def guess_platform_machine(py: TargetPython) -> str: + normal_os = normalize_os(py) + if normal_os == "linux": + return { + "aarch64": "aarch64", + "x86": "i386", + "x86_64": "x86_64", + }.get(normalize_arch(py), "") + elif normal_os == "darwin": + return { + "aarch64": "arm64", + "x86_64": "x86_64", + }.get(normalize_arch(py), "") + elif normal_os == "windows": + return { + "x86": "i386", + "x86_64": "x86_64", + }.get(normalize_arch(py), "") + + return "" + + +def guess_platform_python_implementation(py: TargetPython) -> str: + # See https://peps.python.org/pep-0425/#python-tag + if not py.implementation: + return "" + abbrev = py.implementation[:2] + return { + "py": "Python", + "cp": "CPython", + "ip": "IronPython", + "pp": "PyPy", + "jy": "Jython", + }.get(abbrev, "") + + +def guess_platform_release(py: TargetPython) -> str: + # Not possible from a TargetPython. + return "" + + +def guess_platform_system(py: TargetPython) -> str: + return { + "linux": "Linux", + "darwin": "Darwin", + "windows": "Windows", + }.get(normalize_os(py), "") + + +def guess_platform_version(py: TargetPython) -> str: + # Not possible from a TargetPython. + return "" + + +def guess_python_version(py: TargetPython) -> str: + return ".".join((str(i) for i in py.py_version_info[:2])) + + +def guess_python_full_version(py: TargetPython) -> str: + return ".".join((str(i) for i in py.py_version_info[:3])) + + +def guess_implementation_name(py: TargetPython) -> str: + # See https://peps.python.org/pep-0425/#python-tag + if not py.implementation: + return "" + abbrev = py.implementation[:2] + return { + "py": "python", + "cp": "cpython", + "ip": "ironpython", + "pp": "pypy", + "jy": "jython", + }.get(abbrev, "") + + +def guess_implementation_version(py: TargetPython) -> str: + return guess_python_full_version(py) + + +def guess_environment_markers(py: TargetPython) -> Dict[str, str]: + return { + "os_name": guess_os_name(py), + "sys_platform": guess_sys_platform(py), + "platform_machine": guess_platform_machine(py), + "platform_python_implementation": guess_platform_python_implementation(py), + "platform_release": guess_platform_release(py), + "platform_system": guess_platform_system(py), + "platform_version": guess_platform_version(py), + "python_version": guess_python_version(py), + "python_full_version": guess_python_full_version(py), + "implementation_name": guess_implementation_name(py), + "implementation_version": guess_implementation_version(py), + } diff --git a/python/private/pycross_staging/private/tools/target_environment_generator.py b/python/private/pycross_staging/private/tools/target_environment_generator.py new file mode 100644 index 0000000000..51f8d416a2 --- /dev/null +++ b/python/private/pycross_staging/private/tools/target_environment_generator.py @@ -0,0 +1,200 @@ +""" +A tool that takes an input PEP 425 tag and an optional list of environment +marker overrides and outputs the result of guessed markers with overrides. +""" +from __future__ import annotations + +import json +import os +from argparse import Namespace +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Dict, Iterable, List, Optional + +from dacite.config import Config +from dacite.core import from_dict +from pip._internal.models.target_python import TargetPython +from pycross.private.tools.args import FlagFileArgumentParser +from pycross.private.tools.target_environment import TargetEnv + +_MANYLINUX_ALIASES = { + "manylinux1_x86_64": "manylinux_2_5_x86_64", + "manylinux1_i686": "manylinux_2_5_i686", + "manylinux2010_x86_64": "manylinux_2_12_x86_64", + "manylinux2010_i686": "manylinux_2_12_i686", + "manylinux2014_x86_64": "manylinux_2_17_x86_64", + "manylinux2014_i686": "manylinux_2_17_i686", + "manylinux2014_aarch64": "manylinux_2_17_aarch64", + "manylinux2014_armv7l": "manylinux_2_17_armv7l", + "manylinux2014_ppc64": "manylinux_2_17_ppc64", + "manylinux2014_ppc64le": "manylinux_2_17_ppc64le", + "manylinux2014_s390x": "manylinux_2_17_s390x", +} +_MANYLINUX_ALIASES.update({v: k for k, v in _MANYLINUX_ALIASES.items()}) + + +@dataclass +class Input: + name: str + implementation: str + version: str + output: Path + abis: List[str] = field(default_factory=list) + platforms: List[str] = field(default_factory=list) + environment_markers: Dict[str, str] = field(default_factory=dict) + python_compatible_with: List[str] = field(default_factory=list) + flag_values: Dict[str, str] = field(default_factory=dict) + config_setting_target: Optional[str] = None + + @staticmethod + def from_dict(data: Dict[str, Any]) -> Input: + return from_dict(Input, data, config=Config(cast=[Path])) + + +def _expand_manylinux_platforms(platforms: Iterable[str]) -> List[str]: + extra_platforms = set() + platforms = set(platforms) + for platform in platforms: + if platform in _MANYLINUX_ALIASES: + extra_platforms.add(_MANYLINUX_ALIASES[platform]) + platforms.update(extra_platforms) + return sorted(platforms) + + +def create(input: Input) -> None: + overrides = {} + for key, val in input.environment_markers: + overrides[key] = val + + version_info = tuple(int(part) for part in input.version.split(".")) + if len(version_info) != 3: + raise ValueError("Version must be in the format a.b.c.") + + platforms = _expand_manylinux_platforms(input.platforms) + target_python = TargetPython( + platforms=platforms or ["any"], + py_version_info=version_info, + abis=input.abis or ["none"], + implementation=input.implementation, + ) + + target = TargetEnv.from_target_python( + input.name, + target_python, + overrides, + input.python_compatible_with, + input.flag_values, + input.config_setting_target, + ) + with open(input.output, "w") as f: + json.dump(target.to_dict(), f, indent=2, sort_keys=True) + f.write("\n") + + +def parse_flags() -> Namespace: + root = FlagFileArgumentParser(description="Generate target python information.") + + subparsers = root.add_subparsers(dest="subparser_name") + + create_parser = subparsers.add_parser("create") + create_parser.add_argument( + "--name", + required=True, + help="The given platform name.", + ) + + create_parser.add_argument( + "--implementation", + required=True, + help="The PEP 425 implementation abbreviation (e.g., cp for cpython).", + ) + + create_parser.add_argument( + "--version", + required=True, + help="The Python version.", + ) + + create_parser.add_argument( + "--abi", + action="append", + dest="abis", + help="A list of PEP 425 abi tags.", + ) + + create_parser.add_argument( + "--platform", + action="append", + dest="platforms", + help="A list of PEP 425 platform tags.", + ) + + create_parser.add_argument( + "--environment-marker", + nargs=2, + action="append", + dest="environment_markers", + help="Environment marker overrides in the format `marker override`.", + ) + + create_parser.add_argument( + "--python-compatible-with", + action="append", + help="Name of the environment constraint label.", + ) + + create_parser.add_argument( + "--flag-value", + nargs=2, + action="append", + dest="flag_values", + help="A config_setting flag value.", + ) + + create_parser.add_argument( + "--config-setting-target", + help="The config_setting target to use.", + ) + + create_parser.add_argument( + "--output", + type=Path, + required=True, + help="The output file.", + ) + + batch_create_parser = subparsers.add_parser("batch-create") + batch_create_parser.add_argument( + "--input", + type=Path, + required=True, + help="The input file.", + ) + + return root.parse_args() + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + args = parse_flags() + if args.subparser_name == "create": + input_dict = {k: v for k, v in vars(args).items() if v is not None} + + # Some of the parsed values come as lists of tuples, but they should be dicts. + for dict_key in ("environment_markers", "flag_values"): + if dict_key in input_dict: + input_dict[dict_key] = dict(input_dict[dict_key]) + + input = Input.from_dict(input_dict) + create(input) + elif args.subparser_name == "batch-create": + with open(args.input) as f: + inputs = json.load(f) + for input_dict in inputs: + input = Input.from_dict(input_dict) + create(input) + else: + raise AssertionError("Bad subparser_name: " + args.subparser_name) diff --git a/python/private/pycross_staging/private/tools/wheel_builder.py b/python/private/pycross_staging/private/tools/wheel_builder.py new file mode 100644 index 0000000000..6b0d055bf3 --- /dev/null +++ b/python/private/pycross_staging/private/tools/wheel_builder.py @@ -0,0 +1,1064 @@ +""" +A PEP 517 wheel builder that supports (or tries to) cross-platform builds. +""" +import json +import os +import shutil +import subprocess +import sys +import tarfile +import tempfile +import textwrap +import traceback +import zipfile +from pathlib import Path +from typing import Any, Dict, List, Mapping, NoReturn, Optional, Sequence, Tuple, Union + +from build import ProjectBuilder +from packaging.utils import parse_wheel_filename +from pycross.private.tools.args import FlagFileArgumentParser +from pycross.private.tools.crossenv.utils import find_sysconfig_data +from pycross.private.tools.target_environment import TargetEnv + +_COLORS = { + "red": "\33[91m", + "green": "\33[92m", + "yellow": "\33[93m", + "bold": "\33[1m", + "dim": "\33[2m", + "underline": "\33[4m", + "reset": "\33[0m", +} +_NO_COLORS = {color: "" for color in _COLORS} + + +def _init_colors() -> Dict[str, str]: + if "NO_COLOR" in os.environ: + if "FORCE_COLOR" in os.environ: + return _NO_COLORS + elif "FORCE_COLOR" in os.environ or sys.stdout.isatty(): + return _COLORS + return _NO_COLORS + + +_STYLES = _init_colors() + + +def _warn(msg: str) -> None: # pragma: no cover + """ + Print a warning message. Will color the output when writing to a TTY. + :param msg: Warning message + """ + print("{yellow}WARNING{reset} {}".format(msg, **_STYLES)) + + +def _error(msg: str, code: int = 1) -> NoReturn: # pragma: no cover + """ + Print an error message and exit. Will color the output when writing to a TTY. + :param msg: Error message + :param code: Error code + """ + print("{red}ERROR{reset} {}".format(msg, **_STYLES)) + raise SystemExit(code) + + +def relpath(path: Path, start: Path) -> Path: + return Path(os.path.relpath(path, start)) + + +def determine_target_path_from_exec( + exec_python_exe: Path, target_python_exe: Path +) -> List[Path]: + query_args = ( + exec_python_exe, + "-c", + "import json, sys; print(json.dumps(dict(exec=sys.executable, path=sys.path)))", + ) + try: + out_json = subprocess.check_output(args=query_args, env={}) + query_result = json.loads(out_json) + except subprocess.CalledProcessError as cpe: + print("Failed to query exec_python for target path") + print(cpe.output.decode(), file=sys.stderr) + raise + + exec_path = Path(query_result["exec"]).resolve() + sys_path = [Path(p).resolve() for p in query_result["path"]] + target_exec_resolved = target_python_exe.resolve() + + result = [] + for p in sys_path: + try: + # Get the ancestor common to both sys.executable and this path entry + common = Path(os.path.commonpath([exec_path, p])).absolute() + # Get the depth from sys.executable to that common ancestor + exec_depth = len(exec_path.relative_to(common).parents) + # Get the path entry relative to that common ancestor + rel = p.relative_to(common) + # Construct a path with the target executable + enough ".." entries + the relative path + up_path = Path(*[".."] * exec_depth) + path = (target_exec_resolved / up_path / rel).resolve() + result.append(path) + + except ValueError: + continue + + return result + + +def get_target_sysconfig( + target_sys_path: List[Path], + exec_python_exe: Path, + target_python_exe: Path, +) -> Dict[str, Any]: + if exec_python_exe == target_python_exe: + # No need to go searching if exec_python and target_python are the same. + query_args = ( + exec_python_exe, + "-c", + textwrap.dedent( + """\ + import importlib, json, sysconfig + sysconfigdata_name = sysconfig._get_sysconfigdata_name() + if sysconfigdata_name: + vars = importlib.import_module(sysconfigdata_name).build_time_vars + print(json.dumps(vars)) + else: + print("{}") + """ + ), + ) + try: + vars_json = subprocess.check_output(args=query_args) + return json.loads(vars_json) + except subprocess.CalledProcessError as cpe: + print("Failed to query exec_python for sysconfig vars") + print(cpe.output.decode(), file=sys.stderr) + raise + + # Otherwise, search target_sys_path entries. + # If target_sys_path is empty, we try to determine it from the exec python's sys path. + + if not target_sys_path: + target_sys_path = determine_target_path_from_exec( + exec_python_exe, target_python_exe + ) + + return find_sysconfig_data(target_sys_path) + + +def set_or_append(env: Dict[str, Any], key: str, value: str) -> None: + if key == "PATH": + sep = os.pathsep + else: + sep = " " + if key in env: + env[key] += sep + value + else: + env[key] = value + + +def get_default_build_env_vars(path_dirs: List[Path]) -> Dict[str, str]: + env = os.environ.copy() + + # Pop off some environment variables that might affect our build venv. + env.pop("PYTHONHOME", None) + env.pop("PYTHONPATH", None) + env.pop("RUNFILES_DIR", None) + + # set SOURCE_DATE_EPOCH to 1980 so that we can use python wheels + # https://github.com/NixOS/nixpkgs/blob/master/doc/languages-frameworks/python.section.md#python-setuppy-bdist_wheel-cannot-create-whl + if "SOURCE_DATE_EPOCH" not in env: + env["SOURCE_DATE_EPOCH"] = "315532800" + + # Python wheel metadata files can be unstable. + # See https://bitbucket.org/pypa/wheel/pull-requests/74/make-the-output-of-metadata-files/diff + if "PYTHONHASHSEED" not in env: + env["PYTHONHASHSEED"] = "0" + + # Python 3.11+ supports PYTHONSAFEPATH which, when set, prevents adding unsafe entries to sys.path. + # Ideally we would use isolated mode which is present in < 3.11, but that prevents us from specifying + # PYTHON* variables like PYTHONHASHSEED. + # + # https://docs.python.org/3/using/cmdline.html#envvar-PYTHONSAFEPATH + if "PYTHONSAFEPATH" not in env: + env["PYTHONSAFEPATH"] = "1" + + # Place our own directories, with possible overridden commands, at the beginning of PATH. + path_entries = [str(pd) for pd in path_dirs] + existing_path = env.get("PATH") + if existing_path: + path_entries.append(existing_path) + env["PATH"] = os.pathsep.join(path_entries) + + return env + + +def replace_path_placeholders( + data: Dict[str, Union[str, List[str]]], placeholder: str, replacement: Path +) -> Dict[str, Any]: + replacement_str = str(replacement) + if replacement_str.endswith("/"): + replacement_str = replacement_str[:-1] + result = {} + for k, v in data.items(): + if isinstance(v, list): + result[k] = [vi.replace(placeholder, replacement_str) for vi in v] + else: + result[k] = v.replace(placeholder, replacement_str) + + return result + + +def get_inherited_vars(target_sysconfig: Dict[str, Any]) -> Dict[str, Any]: + inherit_names = [ + "ABIFLAGS", + "ANDROID_API_LEVEL", + "EXE", + "EXT_SUFFIX", + "LDVERSION", + "MACHDEP", + "MACOSX_DEPLOYMENT_TARGET", + "Py_DEBUG", + "SHLIB_SUFFIX", + "VERSION", + "HOST_GNU_TYPE", + "MULTIARCH", + ] + + inherited = {name: target_sysconfig.get(name) for name in inherit_names} + + # Omitting Py_ENABLE_SHARED and LIBDIR. I'm not sure why these are needed or why we'd need to link + # to any Python shared libs. + + return inherited + + +def get_wrapper_flags(cflags: str) -> List[str]: + """Returns flags that should be added to a cc wrapper.""" + possible_flags = ["-target", "--target"] + + result = [] + split_cflags = cflags.split() + for i, flag in enumerate(split_cflags): + for possible_flag in possible_flags: + if not (flag.startswith(possible_flag)): + continue + if "=" in flag: + flag, value = flag.split("=") + additions = [f"{flag}={value}"] + else: + flag, value = flag, split_cflags[i + 1] + additions = [flag, value] + + if not flag == possible_flag: + # This is something else, like --target-cpu + continue + + result.extend(additions) + + return result + + +def wrap_cc( + lang: str, cc_exe: Path, cflags: str, python_exe: Path, bin_dir: Path +) -> Path: + assert lang in ("cc", "cxx") + version_str = subprocess.check_output([cc_exe, "--version"]).decode("utf-8") + first_line = version_str.splitlines()[0] + + needs_wrap = True + if "clang" in first_line or "zig" in first_line: + wrapper_name = { + "cc": "clang", + "cxx": "clang++", + }[lang] + elif "gcc" in first_line: + wrapper_name = { + "cc": "gcc", + "cxx": "g++", + }[lang] + else: + needs_wrap = False + wrapper_name = os.path.basename(cc_exe) + + wrapper_flags = get_wrapper_flags(cflags) + if not needs_wrap and not wrapper_flags: + # No reason to generate a wrapper; just return the given cc location. + return cc_exe + + wrapper_path = bin_dir / wrapper_name + + with open(wrapper_path, "w") as f: + f.write( + textwrap.dedent( + f"""\ + #!{python_exe.absolute()} + import os + import sys + + here = os.path.dirname(sys.argv[0]) + cc_exe = os.path.join(here, "{cc_exe}") + os.execv(cc_exe, [cc_exe] + {repr(wrapper_flags)} + sys.argv[1:]) + """ + ) + ) + + os.chmod(wrapper_path, 0o755) + return wrapper_path + + +def generate_cc_wrappers( + toolchain_vars: Dict[str, Any], python_exe: Path, bin_dir: Path +) -> Dict[str, str]: + orig_cc = toolchain_vars["CC"] + orig_cxx = toolchain_vars["CXX"] + cflags = toolchain_vars["CFLAGS"] + # Possibly generate wrappers around the CC and CXX executables. + wrapped_cc = wrap_cc("cc", orig_cc, cflags, python_exe, bin_dir) + wrapped_cxx = wrap_cc("cxx", orig_cxx, cflags, python_exe, bin_dir) + return { + "CC": str(wrapped_cc), + "CXX": str(wrapped_cxx), + } + + +def generate_cross_sysconfig_vars( + toolchain_vars: Dict[str, Any], + target_vars: Dict[str, Any], + wrapper_vars: Dict[str, Any], + lib_dir: Path, + include_paths: List[Path], +) -> Dict[str, Any]: + sysconfig_vars = toolchain_vars.copy() + sysconfig_vars.update(wrapper_vars) + sysconfig_vars.update(get_inherited_vars(target_vars)) + + # wheel_build.bzl gives us LDSHAREDFLAGS, but Python wants LDSHARED which is a combination of CC and LDSHAREDFLAGS + sysconfig_vars["LDSHARED"] = " ".join( + [sysconfig_vars["CC"], sysconfig_vars["LDSHAREDFLAGS"]] + ) + del sysconfig_vars["LDSHAREDFLAGS"] + + # Add search paths for listed native deps + for include_path in include_paths: + sysconfig_vars["CFLAGS"] += f" -I{include_path}" + sysconfig_vars["CFLAGS"] += f" -L{lib_dir}" + sysconfig_vars["LDSHARED"] += f" -L{lib_dir}" + + return sysconfig_vars + + +def generate_bin_tools(bin_dir: Path, toolchain_vars: Dict[str, str]) -> None: + # The bazel CC toolchains don't provide ranlib (as far as I can tell), and + # we don't want to use the host ranlib. So we place a no-op in PATH. + ranlib = bin_dir / "ranlib" + ranlib.symlink_to("/bin/true") + + # Some packages execute ar from the path rather than looking at the AR var, so we add our AR to the path + # if it exists. + ar_path = toolchain_vars.get("AR") + if ar_path: + ar = bin_dir / "ar" + ar.symlink_to(ar_path) + + +def link_path_tools(tools_dir: Path, path_tools: List[Tuple[Path, Path]]) -> None: + for path_tool_name, relative_path_tool_path in path_tools: + if len(path_tool_name.parts) > 1: + _error("path_tool name must not contain path separators") + path_tool_in_bin = tools_dir / path_tool_name + path_tool_in_bin.symlink_to(relative_path_tool_path) + + +def link_native_headers(include_dir: Path, headers: List[Path]) -> None: + for header in headers: + path_in_include = include_dir / header.name + if path_in_include.exists(): + _warn( + f"Not linking {header} into include directory because {header.name} already exists." + ) + continue + path_in_include.symlink_to(relpath(header, include_dir)) + + +def link_native_libraries(lib_dir: Path, libraries: List[Path]) -> None: + for library in libraries: + path_in_lib = lib_dir / library.name + if path_in_lib.exists(): + _warn( + f"Not linking {library} into lib directory because {library.name} already exists." + ) + continue + path_in_lib.symlink_to(relpath(library, lib_dir)) + + +def extract_sdist(sdist_path: Path, sdist_dir: Path) -> Path: + if sdist_path.name.endswith(".tar.gz"): + with tarfile.open(sdist_path, "r") as f: + f.extractall(sdist_dir) + elif sdist_path.name.endswith(".zip"): + with zipfile.ZipFile(sdist_path, "r") as f: + f.extractall(sdist_dir) + else: + _error(f"Unsupported sdist format: {sdist_path}") + + # After extraction, there should be a `packageName-version` directory + (extracted_dir,) = sdist_dir.glob("*") + return extracted_dir + + +def run_pre_build_hooks( + hooks: List[Path], + temp_dir: Path, + build_env: Dict[str, str], + config_settings: Dict[str, Any], +) -> Tuple[Dict[str, str], Dict[str, Any]]: + config_settings_file = temp_dir / "config_settings.json" + env_file = temp_dir / "build_env.json" + for hook in hooks: + hook_env = dict(build_env) + hook_env["PYCROSS_CONFIG_SETTINGS_FILE"] = str(config_settings_file) + hook_env["PYCROSS_ENV_VARS_FILE"] = str(env_file) + + # Write the current build env to a file. + with open(env_file, "w") as f: + json.dump(build_env, f) + + # Write current config settings to a file. + with open(config_settings_file, "w") as f: + json.dump(config_settings, f) + + try: + subprocess.check_output( + args=[hook], + env=hook_env, + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as cpe: + print("===== PRE-BUILD HOOK FAILED =====", file=sys.stderr) + print(cpe.output.decode(), file=sys.stderr) + raise + + # Read post-hook build.env and update our own environment variables. + with open(env_file, "r") as f: + build_env = json.load(f) + for k, v in build_env.items(): + if not (isinstance(k, str) and isinstance(v, str)): + _error( + "pre-build hook build_env.json must contain string keys and values" + ) + + # Read post-hook config_settings.json. + with open(config_settings_file, "r") as f: + config_settings = json.load(f) + + return build_env, config_settings + + +def run_post_build_hooks( + hooks: List[Path], + temp_dir: Path, + build_env: Dict[str, str], + wheel_file: Path, +) -> Path: + wheel_in = temp_dir / "post_wheel_in" + wheel_out = temp_dir / "post_wheel_out" + wheel_in.mkdir() + wheel_out.mkdir() + + orig_wheel_file = wheel_file + wheel_file = wheel_in / wheel_file.name + shutil.move(orig_wheel_file, wheel_file) + + for hook in hooks: + hook_env = dict(build_env) + hook_env["PYCROSS_WHEEL_FILE"] = str(wheel_file) + hook_env["PYCROSS_WHEEL_OUTPUT_ROOT"] = str(wheel_out) + + try: + subprocess.check_output( + args=[hook], + env=hook_env, + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as cpe: + print("===== POST-BUILD HOOK FAILED =====", file=sys.stderr) + print(cpe.output.decode(), file=sys.stderr) + raise + + output_files = list(wheel_out.glob("*")) + if len(output_files) > 1: + _error("post-build hook wrote multiple files in PYCROSS_WHEEL_OUTPUT_ROOT") + if output_files: + hook_wheel_file = output_files[0] + if hook_wheel_file.suffix != ".whl": + _error(f"post-build hook wrote non-whl file: {hook_wheel_file.name}") + + # We shuffle the newly-written wheel into post_wheel_in/ and clear post_wheel_out/ + shutil.rmtree(wheel_in) + wheel_in.mkdir() + wheel_file = wheel_in / hook_wheel_file.name + shutil.move(hook_wheel_file, wheel_file) + shutil.rmtree(wheel_out) + wheel_out.mkdir() + + return wheel_file + + +def check_filename_against_target( + wheel_name: str, target_environment: TargetEnv +) -> None: + _, _, _, tags = parse_wheel_filename(wheel_name) + tag_names = {str(t) for t in tags} + if not tag_names.intersection(target_environment.compatibility_tags): + _error( + f"No tags in {wheel_name} match target environment {target_environment.name}" + ) + + +def find_site_dir(env_dir: Path) -> Path: + lib_dir = env_dir / "lib" + try: + return next(lib_dir.glob("python*/site-packages")) + except StopIteration: + raise ValueError(f"Cannot find site-packages under {env_dir}") + + +def build_cross_venv( + env_dir: Path, + exec_python_exe: Path, + target_python_exe: Path, + sysconfig_vars: Dict[str, Any], + target_env: Optional[TargetEnv], +) -> None: + sysconfig_json = env_dir / "sysconfig.json" + with open(sysconfig_json, "w") as f: + json.dump(sysconfig_vars, f, indent=2) + + crossenv_args = [ + exec_python_exe, + "-m", + "pycross.private.tools.crossenv", + "--env-dir", + str(env_dir), + "--sysconfig-json", + str(sysconfig_json), + "--target-python", + target_python_exe, + ] + + if target_env: + for tag in target_env.compatibility_tags: + if "manylinux" in tag: + crossenv_args.extend( + [ + "--manylinux", + tag, + ] + ) + + try: + subprocess.check_output( + args=crossenv_args, env=os.environ, stderr=subprocess.STDOUT + ) + except subprocess.CalledProcessError as cpe: + print("===== CROSSENV FAILED =====", file=sys.stderr) + print(cpe.output.decode(), file=sys.stderr) + raise + + +def build_standard_venv( + env_dir: Path, exec_python_exe: Path, sysconfig_vars: Dict[str, Any] +) -> None: + venv_args = [ + exec_python_exe, + "-m", + "venv", + "--symlinks", + "--without-pip", + str(env_dir), + ] + + try: + subprocess.check_output( + args=venv_args, env=os.environ, stderr=subprocess.STDOUT + ) + except subprocess.CalledProcessError as cpe: + print("===== VENV FAILED =====", file=sys.stderr) + print(cpe.output.decode(), file=sys.stderr) + raise + + # Setup our customized sysconfig vars + site_dir = find_site_dir(env_dir) + with open(site_dir / "_pycross_sysconfigdata.py", "w") as f: + f.write(f"build_time_vars = {repr(sysconfig_vars)}\n") + with open(site_dir / "_pycross_sysconfigdata.pth", "w") as f: + f.write( + 'import os; os.environ["_PYTHON_SYSCONFIGDATA_NAME"] = "_pycross_sysconfigdata"\n' + ) + + +def build_venv( + bazel_root: Path, + env_dir: Path, + exec_python_exe: Path, + target_python_exe: Path, + sysconfig_vars: Dict[str, Any], + path: List[Path], + target_env: Optional[TargetEnv], + always_use_crossenv: bool = False, +) -> None: + if exec_python_exe != target_python_exe or always_use_crossenv: + build_cross_venv( + env_dir, exec_python_exe, target_python_exe, sysconfig_vars, target_env + ) + else: + build_standard_venv(env_dir, exec_python_exe, sysconfig_vars) + + site_dir = find_site_dir(env_dir) + + # Add a pth file to override sys.prefix and sys.exec_prefix as paths relative to the sdist root. + with open(site_dir / "_pycross_sys_prefix.pth", "w") as f: + f.write(f'import sys; sys.prefix = sys.exec_prefix = "{env_dir}"\n') + + # If we're using a Bazel-provided python (i.e., not system python), set sys.base_prefix to a path + # relative to the sdist root in an attempt to keep non-reproducible paths out of binaries. + if bazel_root in target_python_exe.parents: + # base_prefix and base_exec_prefix are the grandparent directory of the executable. + # E.g., if the executable is at python310/bin/python3, python310 is base_prefix. + # target_python_exe should already be a relative path. + with open(site_dir / "_pycross_sys_base_prefix.pth", "w") as f: + f.write( + f'import sys; sys.base_prefix = sys.base_exec_prefix = "{target_python_exe.parent.parent}"\n' + ) + + # Add a pth file to include all of our build dependencies. + with open(site_dir / "deps.pth", "w") as f: + f.write("\n".join(os.path.relpath(p, site_dir) for p in path) + "\n") + + +def build_wheel( + env_dir: Path, + wheel_dir: Path, + sdist_dir: Path, + build_env: Dict[str, str], + config_settings: Dict[str, str], + debug: bool = False, +) -> Path: + python_exe = env_dir / "bin" / "python" + + def _subprocess_runner( + cmd: Sequence[str], + cwd: Optional[str] = None, + extra_environ: Optional[Mapping[str, str]] = None, + ): + """The default method of calling the wrapper subprocess.""" + cmd = list(cmd) + env = build_env.copy() + + if extra_environ: + env.update(extra_environ) + + if debug: + try: + site = subprocess.check_output( + [cmd[0], "-m", "site"], cwd=cwd, env=env, stderr=subprocess.STDOUT + ) + print("===== BUILD SITE =====", file=sys.stdout) + print(site.decode(), file=sys.stdout) + except subprocess.CalledProcessError as cpe: + print("Warning: failed to collect site output", file=sys.stderr) + print(cpe.output.decode(), file=sys.stderr) + + try: + output = subprocess.check_output( + cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT + ) + except subprocess.CalledProcessError as cpe: + print("===== BUILD FAILED =====", file=sys.stderr) + print(cpe.output.decode(), file=sys.stderr) + raise + + if debug: + print(output.decode(), file=sys.stdout) + + builder = ProjectBuilder( + source_dir=sdist_dir, + python_executable=str(python_exe), + runner=_subprocess_runner, + ) + + try: + # TODO: Verify requirements in environment. + + wheel_file = builder.build( + distribution="wheel", + output_directory=wheel_dir, + config_settings=config_settings, + ) + + except Exception as e: # pragma: no cover + tb = traceback.format_exc().strip("\n") + print("\n{dim}{}{reset}\n".format(tb, **_STYLES)) + _error(str(e)) + raise # Won't happen because _error exits, but it makes static analyzers happy. + + return Path(wheel_file) + + +def init_build_env_vars( + args: Any, + temp_dir: Path, + path_dirs: List[Path], + include_dirs: List[Path], + lib_dirs: List[Path], + bazel_root: Path, +) -> Dict[str, str]: + vars = get_default_build_env_vars(path_dirs) + if args.build_env: + with open(args.build_env, "r") as f: + additional_build_env = replace_path_placeholders( + json.load(f), + "$$EXT_BUILD_ROOT$$", + bazel_root, + ) + for key, val in additional_build_env.items(): + set_or_append(vars, key, val) + + vars["PYCROSS_INCLUDE_PATH"] = os.pathsep.join(map(str, include_dirs)) + vars["PYCROSS_LIBRARY_PATH"] = os.pathsep.join(map(str, lib_dirs)) + vars["PYCROSS_BAZEL_ROOT"] = str(bazel_root) + vars["PYCROSS_BUILD_ROOT"] = str(temp_dir) + + return vars + + +def init_config_settings(args: Any, bazel_root: Path) -> Dict[str, Any]: + if not args.config_settings: + return {} + + with open(args.config_settings, "r") as f: + config_settings = replace_path_placeholders( + json.load(f), + "$$EXT_BUILD_ROOT$$", + bazel_root, + ) + + return config_settings + + +def load_target_environment(args: Any) -> Optional[TargetEnv]: + if args.target_environment_file: + with open(args.target_environment_file, "r") as f: + return TargetEnv.from_dict(json.load(f)) + + +def load_sysconfig_vars(args: Any, bazel_root: Path) -> Dict[str, Any]: + with open(args.sysconfig_vars, "r") as f: + vars = json.load(f) + return replace_path_placeholders( + vars, + "$$EXT_BUILD_ROOT$$", + bazel_root, + ) + + +def execroot_prefix(workspace_name: str) -> Path: + return Path("..") / "bazel-execroot" / workspace_name + + +def main(args: Any, temp_dir: Path, is_debug: bool) -> None: + # Paths passed into this action will be relative to bazel's execroot. + # But we need to build the wheel from within the extracted sdist directory. + # So here's the plan: + # * Build a temp area. In here we'll have sdist, env (virtual environment) and some + # other stuff. + # * Extract the sdist. + # * Link the bazel execroot to the temp area as `bazel_execroot`. + # * Change this process' directory to the sdist directory. + # * Prefix all input paths with `../bazel_execroot/`. + cwd = Path.cwd() + + # Extract the sdist and rename it to 'sdist' + sdist_dir = temp_dir / "sdist" + _sdist_extracted_dir = extract_sdist(args.sdist, temp_dir) + _sdist_extracted_dir.rename(sdist_dir) + + # Change into the new directory + os.chdir(sdist_dir) + sdist_dir = Path(".") + temp_dir = Path("..") + + # Add the execroot symlink into our temp area. We link to the parent of current cwd since + # current cwd is something like /execroot/ + (temp_dir / "bazel-execroot").symlink_to(cwd.parent) + + # This is the prefix relative to the sdist directory that we'll prepend to everything + prefix = execroot_prefix(cwd.name) + + def mktmpdir(name: str) -> Path: + d = temp_dir / name + d.mkdir() + # Return as relative from the sdist directory + return Path("..") / name + + wheel_dir = mktmpdir("wheel") + bin_dir = mktmpdir("bin") + tools_dir = mktmpdir("tools") + build_env_dir = mktmpdir("env") + include_dir = mktmpdir("include") + lib_dir = mktmpdir("lib") + + config_settings = init_config_settings(args, prefix) + toolchain_sysconfig_vars = load_sysconfig_vars(args, prefix) + target_environment = load_target_environment(args) + + include_paths = list(args.native_include_path) + include_paths.append(include_dir) + + build_env_vars = init_build_env_vars( + args=args, + temp_dir=temp_dir, + path_dirs=[tools_dir, bin_dir], + include_dirs=include_paths, + lib_dirs=[lib_dir], + bazel_root=prefix, + ) + + wrapper_sysconfig_vars = generate_cc_wrappers( + toolchain_vars=toolchain_sysconfig_vars, + python_exe=args.exec_python_executable, + bin_dir=bin_dir, + ) + target_sysconfig_vars = get_target_sysconfig( + target_sys_path=args.target_sys_path, + exec_python_exe=args.exec_python_executable, + target_python_exe=args.target_python_executable, + ) + sysconfig_vars = generate_cross_sysconfig_vars( + toolchain_vars=toolchain_sysconfig_vars, + target_vars=target_sysconfig_vars, + wrapper_vars=wrapper_sysconfig_vars, + lib_dir=lib_dir, + include_paths=include_paths, + ) + + build_venv( + bazel_root=prefix, + env_dir=build_env_dir, + exec_python_exe=args.exec_python_executable, + target_python_exe=args.target_python_executable, + sysconfig_vars=sysconfig_vars, + path=args.python_path, + target_env=target_environment, + always_use_crossenv=args.always_use_crossenv, + ) + + generate_bin_tools(bin_dir, toolchain_sysconfig_vars) + link_path_tools(tools_dir, args.path_tool) + link_native_headers(include_dir, args.native_header) + link_native_libraries(lib_dir, args.native_library) + + if is_debug: + print(f"Build environment: {build_env_dir.absolute()}") + + build_env_vars, config_settings = run_pre_build_hooks( + hooks=args.pre_build_hook, + temp_dir=temp_dir, + build_env=build_env_vars, + config_settings=config_settings, + ) + + wheel_file = build_wheel( + env_dir=build_env_dir, + wheel_dir=wheel_dir, + sdist_dir=sdist_dir, + build_env=build_env_vars, + config_settings=config_settings, + debug=is_debug, + ) + + wheel_file = run_post_build_hooks( + hooks=args.post_build_hook, + temp_dir=temp_dir, + build_env=build_env_vars, + wheel_file=wheel_file, + ) + + if target_environment: + check_filename_against_target(os.path.basename(wheel_file), target_environment) + + shutil.move(wheel_file, args.wheel_file) + with open(args.wheel_name_file, "w") as f: + f.write(os.path.basename(wheel_file)) + + +def parse_flags() -> Any: + # At the time of flags parsing, we should be within .../execroot/ + workspace_name = Path.cwd().name + prefix = execroot_prefix(workspace_name) + + def sdist_rel_path(val): + return prefix / val + + parser = FlagFileArgumentParser(description="Generate target python information.") + + parser.add_argument( + "--always-use-crossenv", + action="store_true", + ) + + parser.add_argument( + "--build-env", + type=sdist_rel_path, + help="A JSON file containing build environment variables.", + ) + + parser.add_argument( + "--config-settings", + type=sdist_rel_path, + help="A JSON file containing PEP 517 build config settings.", + ) + + parser.add_argument( + "--exec-python-executable", + type=sdist_rel_path, + required=True, + ) + + parser.add_argument( + "--native-header", + type=sdist_rel_path, + action="append", + default=[], + help="Header file (or directory of files) to link into our include directory.", + ) + + parser.add_argument( + "--native-include-path", + type=sdist_rel_path, + action="append", + default=[], + help="Include search path to add to CFLAGS.", + ) + + parser.add_argument( + "--native-library", + type=sdist_rel_path, + action="append", + default=[], + help="Library to link into our lib directory.", + ) + + parser.add_argument( + "--path-tool", + type=Path, + nargs=2, + action="append", + default=[], + help="A tool to made available in PATH when building the sdist.", + ) + + parser.add_argument( + "--post-build-hook", + type=sdist_rel_path, + action="append", + default=[], + help="A tool to run after building the wheel.", + ) + + parser.add_argument( + "--pre-build-hook", + type=sdist_rel_path, + action="append", + default=[], + help="A tool to run before building the sdist.", + ) + + parser.add_argument( + "--python-path", + type=sdist_rel_path, + action="append", + default=[], + help="An entry to add to sys.path", + ) + + parser.add_argument( + "--sdist", + type=Path, + required=True, + help="The sdist path.", + ) + + parser.add_argument( + "--sysconfig-vars", + type=sdist_rel_path, + required=True, + help="A JSON file containing variable to add to sysconfig.", + ) + + parser.add_argument( + "--target-environment-file", + type=sdist_rel_path, + help="A JSON file containing the target Python environment details.", + ) + + parser.add_argument( + "--target-python-executable", + type=sdist_rel_path, + required=True, + ) + + parser.add_argument( + "--target-sys-path", + type=sdist_rel_path, + action="append", + default=[], + ) + + parser.add_argument( + "--wheel-file", + type=sdist_rel_path, + required=True, + help="The wheel output path.", + ) + + parser.add_argument( + "--wheel-name-file", + type=sdist_rel_path, + required=True, + help="The wheel name output path.", + ) + + args = parser.parse_args() + + # Fix up path_tool; the second entry in each tuple should be sdist_rel_path, but the first should not. + if args.path_tool: + args.path_tool = [(p1, sdist_rel_path(p2)) for p1, p2 in args.path_tool] + + return args + + +def main_wrapper(args: Any) -> None: + # Some older versions of Python on MacOS leak __PYVENV_LAUNCHER__ through to subprocesses. + # When this is set, a created virtualenv will link to this value rather than sys.argv[0], which we don't want. + # So just clear it if it exists. + os.environ.pop("__PYVENV_LAUNCHER__", None) + + _is_debug = "RULES_PYCROSS_DEBUG" in os.environ + _temp_dir = Path(tempfile.mkdtemp(prefix="wheelbuild")) + + try: + main(args, _temp_dir, _is_debug) + finally: + if not _is_debug: + shutil.rmtree(_temp_dir, ignore_errors=True) + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main_wrapper(parse_flags()) diff --git a/python/private/pycross_staging/private/tools/wheel_installer.py b/python/private/pycross_staging/private/tools/wheel_installer.py new file mode 100644 index 0000000000..a6d7eacdae --- /dev/null +++ b/python/private/pycross_staging/private/tools/wheel_installer.py @@ -0,0 +1,161 @@ +""" +A tool that invokes pypa/build to build the given sdist tarball. +""" +from __future__ import annotations + +import fnmatch +import os +import shutil +import tempfile +import zipfile +from contextlib import contextmanager +from pathlib import Path +from typing import Any, Iterator, List, Union + +from installer import install +from installer.destinations import SchemeDictionaryDestination +from installer.sources import WheelContentElement, WheelFile +from pycross.private.tools import namespace_pkgs +from pycross.private.tools.args import FlagFileArgumentParser + + +def setup_namespace_pkg_compatibility(wheel_dir: Path) -> None: + """Converts native namespace packages to pkgutil-style packages + + Namespace packages can be created in one of three ways. They are detailed here: + https://packaging.python.org/guides/packaging-namespace-packages/#creating-a-namespace-package + + 'pkgutil-style namespace packages' (2) and 'pkg_resources-style namespace packages' (3) works in Bazel, but + 'native namespace packages' (1) do not. + + We ensure compatibility with Bazel of method 1 by converting them into method 2. + + Args: + wheel_dir: the directory of the wheel to convert + """ + + namespace_pkg_dirs = namespace_pkgs.implicit_namespace_packages( + str(wheel_dir), + ignored_dirnames=["%s/bin" % wheel_dir], + ) + + for ns_pkg_dir in namespace_pkg_dirs: + namespace_pkgs.add_pkgutil_style_namespace_pkg_init(ns_pkg_dir) + + +class FilteredWheelFile(WheelFile): + def __init__(self, f: zipfile.ZipFile, install_exclude_globs: List[str]) -> None: + super().__init__(f) + self._install_exclude_globs = install_exclude_globs + + @classmethod + @contextmanager + def open_filtered( + cls, path: Union[os.PathLike, str], install_exclude_globs: List[str] + ) -> Iterator[FilteredWheelFile]: + with zipfile.ZipFile(path) as f: + yield cls(f, install_exclude_globs) + + def get_contents(self) -> Iterator[WheelContentElement]: + for record_elements, stream, is_executable in super().get_contents(): + if not self.should_install(stream.name): + continue + yield record_elements, stream, is_executable + + def should_install(self, filename: str) -> bool: + for install_exclude_glob in self._install_exclude_globs: + if fnmatch.fnmatch(filename, install_exclude_glob): + return False + return True + + +def main(args: Any) -> None: + dest_dir = args.directory + lib_dir = dest_dir / "site-packages" + destination = SchemeDictionaryDestination( + scheme_dict={ + "platlib": str(lib_dir), + "purelib": str(lib_dir), + "headers": str(dest_dir / "include"), + "scripts": str(dest_dir / "bin"), + "data": str(dest_dir / "data"), + }, + interpreter="/usr/bin/env python3", # Generic; it's not feasible to run these scripts directly. + script_kind="posix", + bytecode_optimization_levels=[], # Setting to empty list to disable generation of .pyc files. + ) + + link_dir = Path(tempfile.mkdtemp()) + if args.wheel_name_file: + with open(args.wheel_name_file, "r") as f: + wheel_name = f.read().strip() + else: + wheel_name = os.path.basename(args.wheel) + + link_path = link_dir / wheel_name + os.symlink(os.path.join(os.getcwd(), args.wheel), link_path) + + try: + with FilteredWheelFile.open_filtered( + link_path, args.install_exclude_globs + ) as source: + install( + source=source, + destination=destination, + # Additional metadata that is generated by the installation tool. + additional_metadata={ + "INSTALLER": b"https://github.com/jvolkman/rules_pycross", + }, + ) + finally: + shutil.rmtree(link_dir, ignore_errors=True) + + setup_namespace_pkg_compatibility(lib_dir) + + +def parse_flags() -> Any: + parser = FlagFileArgumentParser(description="Extract a Python wheel.") + + parser.add_argument( + "--wheel", + type=Path, + required=True, + help="The wheel file path.", + ) + + parser.add_argument( + "--wheel-name-file", + type=Path, + required=False, + help="A file containing the canonical name of the wheel.", + ) + + parser.add_argument( + "--enable-implicit-namespace-pkgs", + action="store_true", + help="If true, disables conversion of implicit namespace packages and will unzip as-is.", + ) + + parser.add_argument( + "--install-exclude-glob", + action="append", + dest="install_exclude_globs", + default=[], + help="A glob for files to exclude during installation.", + ) + + parser.add_argument( + "--directory", + type=Path, + help="The output path.", + ) + + return parser.parse_args() + + +if __name__ == "__main__": + # When under `bazel run`, change to the actual working dir. + if "BUILD_WORKING_DIRECTORY" in os.environ: + os.chdir(os.environ["BUILD_WORKING_DIRECTORY"]) + + main(parse_flags()) diff --git a/python/private/pycross_staging/private/util.bzl b/python/private/pycross_staging/private/util.bzl new file mode 100644 index 0000000000..5bb400ae02 --- /dev/null +++ b/python/private/pycross_staging/private/util.bzl @@ -0,0 +1,43 @@ +"""Shared utilities""" + +# Whether we're using at least Bazel 7 +IS_BAZEL_7_OR_HIGHER = hasattr(native, "starlark_doc_extract") + +# Whether we're using bzlmod +BZLMOD = str(Label("//:invalid")).startswith("@@") + +# The http library seems to depend on cache.bzl as of Bazel 7 +REPO_HTTP_DEPS = [ + "@bazel_tools//tools/build_defs/repo:http.bzl", +] + [ + "@bazel_tools//tools/build_defs/repo:cache.bzl", +] if IS_BAZEL_7_OR_HIGHER else [] + +def trace_ctx(ctx, display_name = "ctx"): + """Wraps a context object so that method calls are printed with their arguments. + + Usage example: + def _my_module_impl(module_ctx): + module_ctx = trace_context(module_ctx, "module_ctx") + ... + """ + + def wrap(field_name): + field = getattr(ctx, field_name) + + if type(field) != "builtin_function_or_method": + return field + + def _wrapper(*a, **kw): + args = [repr(arg) for arg in a] + for k, v in kw.items(): + args.append("{}={}".format(k, repr(v))) + + # buildifier: disable=print + print("{}.{}({})".format(display_name, field_name, ", ".join(args))) + + return field(*a, **kw) + + return _wrapper + + return struct(**{field_name: wrap(field_name) for field_name in dir(ctx)}) diff --git a/python/private/pycross_staging/private/wheel_build.bzl b/python/private/pycross_staging/private/wheel_build.bzl new file mode 100644 index 0000000000..f2e895debe --- /dev/null +++ b/python/private/pycross_staging/private/wheel_build.bzl @@ -0,0 +1,447 @@ +"""Implementation of the pycross_wheel_build rule.""" + +load("@bazel_skylib//lib:paths.bzl", "paths") +load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain") +load("@rules_python//python:py_info.bzl", "PyInfo") +load( + ":cc_toolchain_util.bzl", + "absolutize_path_in_str", + "get_env_vars", + "get_flags_info", + "get_headers", + "get_libraries", + "get_tools_info", +) +load(":providers.bzl", "PycrossWheelInfo") + +PYTHON_TOOLCHAIN_TYPE = Label("@rules_python//python:toolchain_type") +PYCROSS_TOOLCHAIN_TYPE = Label("//pycross:toolchain_type") + +def _absolute_tool_value(workspace_name, value): + if value: + tool_value_absolute = absolutize_path_in_str(workspace_name, "$$EXT_BUILD_ROOT$$/", value, True) + + # If the tool path contains whitespaces (e.g. C:\Program Files\...), + # MSYS2 requires that the path is wrapped in double quotes + if " " in tool_value_absolute: + tool_value_absolute = "\\\"" + tool_value_absolute + "\\\"" + + return tool_value_absolute + return value + +def _join_flags_list(workspace_name, flags): + return " ".join([absolutize_path_in_str(workspace_name, "$$EXT_BUILD_ROOT$$/", flag) for flag in flags]) + +def _get_sysconfig_data(workspace_name, tools, flags): + cc = _absolute_tool_value(workspace_name, tools.cc) + cxx = _absolute_tool_value(workspace_name, tools.cxx) + ar = _absolute_tool_value(workspace_name, tools.cxx_linker_static) + ar_flags = flags.cxx_linker_static + + # If libtool is used as AR, the output file has to be prefixed with + # "-o". + if ar == "libtool" or ar.endswith("/libtool"): + ar_flags = ar_flags + ["-o"] + + vars = { + "AR": ar, + "ARFLAGS": _join_flags_list(workspace_name, ar_flags), + "CC": cc, + "CCSHARED": "-fPIC" if flags.needs_pic_for_dynamic_libraries else "", + "CFLAGS": _join_flags_list(workspace_name, flags.cc), + "CUSTOMIZED_OSX_COMPILER": "True", + "CXX": cxx, + "GNULD": "yes" if "gcc" in cc else "no", # is there a better way? + "LDSHAREDFLAGS": _join_flags_list(workspace_name, flags.cxx_linker_shared), + } + + return vars + +def _is_sibling_repository_layout_enabled(): + # It's possible to determine if --experimental_sibling_repository_layout is enabled by looking at + # Label(@foo).workspace_root. If it's enabled, this value will start with `../`. By default it'll + # start with `external/`. + # Use rules_pycross_internal, which we know is external to this rule and always available. + test = Label("@rules_pycross_internal//:BUILD.bazel") + return test.workspace_root.startswith("..") + +def _resolve_import_path_fn(ctx): + # Call the inner function with simple values so the closure it returns doesn't hold onto a large + # amount of state. + return _resolve_import_path_fn_inner( + ctx.workspace_name, + ctx.bin_dir.path, + _is_sibling_repository_layout_enabled(), + ) + +def _executable(target): + exe = target[DefaultInfo].files_to_run.executable + if not exe: + fail("%s is not executable" % target.label) + return exe.path + +def _resolve_import_path_fn_inner(workspace_name, bin_dir, sibling_layout): + # The PyInfo import names assume a runfiles-type structure. E.g.: + # mytool.runfiles/ + # main_repo/ + # my_package/ + # external_repo_1/ + # some_package/ + # external_repo_2/ + # ... + # + # An example PyInfo import name might be "external_repo_1/some_package", which maps nicely to the structure + # above. However, our wheel builder isn't consuming these dependencies as runfiles, but as inputs. And so + # for whatever reason the structure is different: + # + # sandbox/main_repo/ + # bazel-out/ + # k8-fastbuild/ + # bin/ + # my_package/ + # external/ + # external_repo_1/ + # some_package/ + # external_repo_2/ + # ... + # + # And to complicate the matter even further, the --experimental_sibling_repository_layout flag changes this + # structure to be: + # + # sandbox/main_repo/ + # bazel-out/ + # k8-fastbuild/ + # bin/ + # my_package/ + # external_repo_1/ + # k8-fastbuild/ + # bin/ + # some_package/ + # external_repo_2/ + # ... + # + + # ctx.bin_dir returns something like bazel-out/k8-fastbuild/bin in legacy mode, or + # bazel-out/my_external_repo/k8-fastbuild/bin in sibling layout when the target is within an external repo. + # We really just want the first part and the last two parts. The repo name that's added with sibling mode isn't + # useful for our case. + bin_dir_parts = bin_dir.split("/") + output_dir = bin_dir_parts[0] + bin_dir = paths.join(*bin_dir_parts[-2:]) + + def fn(import_name): + # Split the import name into its repo and path. + import_repo, import_path = import_name.split("/", 1) + + # Packages within the workspace are always the same regardless of sibling layout. + if import_repo == workspace_name: + return paths.join(output_dir, bin_dir, import_path) + + # Otherwise, if sibling layout is enabled... + if sibling_layout: + return paths.join(output_dir, import_repo, bin_dir, import_path) + + # And lastly, just use the traditional layout. + return paths.join(output_dir, bin_dir, "external", import_repo, import_path) + + return fn + +def _expand_locations_and_vars(attribute_name, ctx, val): + rule_dir = paths.join( + ctx.bin_dir.path, + ctx.label.workspace_root, + ctx.label.package, + ) + + additional_substitutions = { + "BUILD_FILE_PATH": ctx.build_file_path, + "INFO_FILE": ctx.info_file.path, + "RULEDIR": rule_dir, + "TARGET": "{}//{}:{}".format( + "@" + ctx.label.workspace_name if ctx.label.workspace_name else "", + ctx.label.package, + ctx.label.name, + ), + "VERSION_FILE": ctx.version_file.path, + "WORKSPACE": ctx.workspace_name, + } + + # We import $(abspath :foo) by replacing it with $(execpath :foo) prefixed by + # $$EXT_BUILD_ROOT$$/, which is replaced in our build action. Note that "$$$$" + # turns into "$$" after passing through ctx.expand_location. + val = val.replace("$(abspath ", "$$$$EXT_BUILD_ROOT$$$$/$(execpath ") + val = ctx.expand_location(val, ctx.attr.deps + ctx.attr.native_deps + ctx.attr.data) + val = ctx.expand_make_variables(attribute_name, val, additional_substitutions) + return val + +def _handle_toolchains(ctx, args, tools): + py_toolchain = ctx.toolchains[PYTHON_TOOLCHAIN_TYPE].py3_runtime + cpp_toolchain = find_cpp_toolchain(ctx) + + if cpp_toolchain.all_files: + tools.append(cpp_toolchain.all_files) + if py_toolchain.files: + tools.append(py_toolchain.files) + + # If a pycross toolchain is configured, we use that to get the exec and target Python. + if PYCROSS_TOOLCHAIN_TYPE in ctx.toolchains and ctx.toolchains[PYCROSS_TOOLCHAIN_TYPE]: + pycross_info = ctx.toolchains[PYCROSS_TOOLCHAIN_TYPE].pycross_info + args.add("--exec-python-executable", pycross_info.exec_python_executable) + args.add("--target-python-executable", pycross_info.target_python_executable) + if pycross_info.target_sys_path: + args.add_all(pycross_info.target_sys_path, before_each = "--target-sys-path") + if pycross_info.exec_python_files: + tools.append(pycross_info.exec_python_files) + if pycross_info.target_python_files: + tools.append(pycross_info.target_python_files) + + # Otherwise we use the configured Python toolchain. + else: + executable = py_toolchain.interpreter_path + if not executable: + executable = py_toolchain.interpreter.path + args.add("--exec-python-executable", executable) + args.add("--target-python-executable", executable) + +def _handle_sdist(ctx, args, inputs): # -> PycrossWheelInfo + inputs.append(ctx.file.sdist) + args.add("--sdist", ctx.file.sdist) + + sdist_name = ctx.file.sdist.basename + if sdist_name.lower().endswith(".tar.gz"): + wheel_name = sdist_name[:-7] + else: + wheel_name = sdist_name.rsplit(".", 1)[0] # Also includes .zip + + out_wheel = ctx.actions.declare_file(paths.join(ctx.attr.name, wheel_name + ".whl")) + out_wheel_name = ctx.actions.declare_file(paths.join(ctx.attr.name, wheel_name + ".whl.name")) + + args.add("--wheel-file", out_wheel) + args.add("--wheel-name-file", out_wheel_name) + + return PycrossWheelInfo( + wheel_file = out_wheel, + name_file = out_wheel_name, + ) + +def _handle_sysconfig_data(ctx, args, inputs): # -> cc_vars + cc_sysconfig_data = ctx.actions.declare_file(paths.join(ctx.attr.name, "cc_sysconfig.json")) + cc_vars = get_env_vars(ctx) + copts = [_expand_locations_and_vars("copts", ctx, copt) for copt in ctx.attr.copts] + linkopts = [_expand_locations_and_vars("linkopts", ctx, linkopt) for linkopt in ctx.attr.linkopts] + flags = get_flags_info(ctx, copts, linkopts) + tools = get_tools_info(ctx) + sysconfig_vars = _get_sysconfig_data(ctx.workspace_name, tools, flags) + ctx.actions.write(cc_sysconfig_data, json.encode(sysconfig_vars)) + + inputs.append(cc_sysconfig_data) + args.add("--sysconfig-vars", cc_sysconfig_data) + + return cc_vars + +def _handle_py_deps(ctx, args, tools): + imports = depset(transitive = [d[PyInfo].imports for d in ctx.attr.deps]) + args.add_all(imports, before_each = "--python-path", map_each = _resolve_import_path_fn(ctx), allow_closure = True) + tools.extend([dep[PyInfo].transitive_sources for dep in ctx.attr.deps]) + +def _handle_native_deps(ctx, args, tools): + for dep in ctx.attr.native_deps: + if CcInfo not in dep: + continue + ccinfo = dep[CcInfo] + + headers_and_includes = get_headers(ccinfo) + tools.append(ccinfo.compilation_context.headers) + args.add_all(headers_and_includes.include_dirs, before_each = "--native-include-path") + args.add_all(headers_and_includes.headers, before_each = "--native-header", expand_directories = False) + + libraries = get_libraries(ccinfo) + tools.append(depset(libraries)) + args.add_all(libraries, before_each = "--native-library") + +def _handle_target_environment(ctx, args, inputs): + if not ctx.attr.target_environment: + return + target_environment_file = ctx.file.target_environment + args.add("--target-environment-file", target_environment_file.path) + inputs.append(target_environment_file) + +def _handle_build_env(ctx, args, inputs): + if not ctx.attr.build_env: + return + build_env_data = ctx.actions.declare_file(paths.join(ctx.attr.name, "build_env.json")) + args.add("--build-env", build_env_data) + inputs.append(build_env_data) + vals = {} + for key, value in ctx.attr.build_env.items(): + vals[key] = _expand_locations_and_vars("build_env", ctx, value) + ctx.actions.write(build_env_data, json.encode(vals)) + +def _handle_config_settings(ctx, args, inputs): + if not ctx.attr.config_settings: + return + config_settings_data = ctx.actions.declare_file(paths.join(ctx.attr.name, "config_settings.json")) + args.add("--config-settings", config_settings_data) + inputs.append(config_settings_data) + vals = {} + for key, value in ctx.attr.config_settings.items(): + vals[key] = [_expand_locations_and_vars("config_settings", ctx, vi) for vi in value] + ctx.actions.write(config_settings_data, json.encode(vals)) + +def _handle_tools_and_data(ctx, args, tools, input_manifests): + tools.extend([data[DefaultInfo].files for data in ctx.attr.data]) + + if ctx.attr.pre_build_hooks: + args.add_all(ctx.attr.pre_build_hooks, before_each = "--pre-build-hook", map_each = _executable) + tool_inputs, tool_manifests = ctx.resolve_tools(tools = ctx.attr.pre_build_hooks) + tools.extend([tool_inputs]) + input_manifests.extend(tool_manifests) + + if ctx.attr.post_build_hooks: + args.add_all(ctx.attr.post_build_hooks, before_each = "--post-build-hook", map_each = _executable) + tool_inputs, tool_manifests = ctx.resolve_tools(tools = ctx.attr.post_build_hooks) + tools.extend([tool_inputs]) + input_manifests.extend(tool_manifests) + + if ctx.attr.path_tools: + for tool, name in ctx.attr.path_tools.items(): + args.add_all("--path-tool", [name, _executable(tool)]) + tool_inputs, tool_manifests = ctx.resolve_tools(tools = ctx.attr.path_tools.keys()) + tools.extend([tool_inputs]) + input_manifests.extend(tool_manifests) + +def _pycross_wheel_build_impl(ctx): + args = ctx.actions.args().use_param_file("--flagfile=%s") + inputs = [] + tools = [] + input_manifests = [] + + pycross_wheel_info = _handle_sdist(ctx, args, inputs) + cc_vars = _handle_sysconfig_data(ctx, args, inputs) + _handle_toolchains(ctx, args, tools) + _handle_py_deps(ctx, args, tools) + _handle_native_deps(ctx, args, tools) + _handle_target_environment(ctx, args, inputs) + + _handle_build_env(ctx, args, inputs) + _handle_config_settings(ctx, args, inputs) + + _handle_tools_and_data(ctx, args, tools, input_manifests) + + env = dict(cc_vars) + env.update(ctx.configuration.default_shell_env) + + ctx.actions.run( + inputs = inputs, + outputs = [pycross_wheel_info.wheel_file, pycross_wheel_info.name_file], + tools = depset(transitive = tools), + input_manifests = input_manifests, + executable = ctx.executable._tool, + use_default_shell_env = False, + env = env, + arguments = [args], + mnemonic = "WheelBuild", + progress_message = "Building %s" % ctx.file.sdist.basename, + ) + + return [ + pycross_wheel_info, + DefaultInfo( + files = depset( + direct = [pycross_wheel_info.wheel_file], + ), + ), + OutputGroupInfo( + all_files = depset([ + pycross_wheel_info.wheel_file, + pycross_wheel_info.name_file, + ]), + ), + ] + +def _pycross_toolchains(): + if hasattr(config_common, "toolchain_type"): + # Optional toolchains are supported + return [ + config_common.toolchain_type(PYTHON_TOOLCHAIN_TYPE, mandatory = True), + config_common.toolchain_type(PYCROSS_TOOLCHAIN_TYPE, mandatory = False), + ] + use_cpp_toolchain() + else: + return [PYTHON_TOOLCHAIN_TYPE] + use_cpp_toolchain() + +pycross_wheel_build = rule( + implementation = _pycross_wheel_build_impl, + attrs = { + "build_env": attr.string_dict( + doc = ( + "Environment variables passed to the sdist build. " + + "Values are subject to 'Make variable', location, and build_cwd_token expansion." + ), + ), + "config_settings": attr.string_list_dict( + doc = ( + "PEP 517 config settings passed to the sdist build. " + + "Values are subject to 'Make variable', location, and build_cwd_token expansion." + ), + ), + "copts": attr.string_list( + doc = "Additional C compiler options.", + default = [], + ), + "data": attr.label_list( + doc = "Additional data and dependencies used by the build.", + providers = [DefaultInfo], + allow_files = True, + ), + "deps": attr.label_list( + doc = "A list of Python build dependencies for the wheel.", + providers = [PyInfo], + ), + "linkopts": attr.string_list( + doc = "Additional C linker options.", + default = [], + ), + "native_deps": attr.label_list( + doc = "A list of native build dependencies (CcInfo) for the wheel.", + providers = [CcInfo], + ), + "path_tools": attr.label_keyed_string_dict( + doc = ( + "A mapping of binaries to names that are placed in PATH when building the sdist." + ), + cfg = "exec", + ), + "post_build_hooks": attr.label_list( + doc = ( + "A list of binaries that are executed after the wheel is built." + ), + cfg = "exec", + ), + "pre_build_hooks": attr.label_list( + doc = ( + "A list of binaries that are executed prior to building the sdist." + ), + cfg = "exec", + ), + "sdist": attr.label( + doc = "The sdist file.", + allow_single_file = [".tar.gz", ".zip"], + mandatory = True, + ), + "target_environment": attr.label( + doc = "The target environment to build for.", + allow_single_file = [".json"], + ), + "_cc_toolchain": attr.label( + default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"), + ), + "_tool": attr.label( + default = Label("//pycross/private/tools:wheel_builder"), + cfg = "exec", + executable = True, + ), + }, + toolchains = _pycross_toolchains(), + fragments = ["cpp"], + host_fragments = ["cpp"], +) diff --git a/python/private/pycross_staging/private/wheel_library.bzl b/python/private/pycross_staging/private/wheel_library.bzl new file mode 100644 index 0000000000..7280c5fb71 --- /dev/null +++ b/python/private/pycross_staging/private/wheel_library.bzl @@ -0,0 +1,128 @@ +"""Implementation of the pycross_wheel_library rule.""" + +load("@bazel_skylib//lib:paths.bzl", "paths") +load("@rules_python//python:py_info.bzl", "PyInfo") +load(":providers.bzl", "PycrossWheelInfo") + +def _pycross_wheel_library_impl(ctx): + out = ctx.actions.declare_directory(ctx.attr.name) + + wheel_target = ctx.attr.wheel + if PycrossWheelInfo in wheel_target: + wheel_file = wheel_target[PycrossWheelInfo].wheel_file + name_file = wheel_target[PycrossWheelInfo].name_file + else: + wheel_file = ctx.file.wheel + name_file = None + + args = ctx.actions.args().use_param_file("--flagfile=%s") + args.add("--wheel", wheel_file) + args.add("--directory", out.path) + + inputs = [wheel_file] + if name_file: + inputs.append(name_file) + args.add("--wheel-name-file", name_file) + + if ctx.attr.enable_implicit_namespace_pkgs: + args.add("--enable-implicit-namespace-pkgs") + + for install_exclude_glob in ctx.attr.install_exclude_globs: + args.add("--install-exclude-glob", install_exclude_glob) + + ctx.actions.run( + inputs = inputs, + outputs = [out], + executable = ctx.executable._tool, + arguments = [args], + # Set environment variables to make generated .pyc files reproducible. + env = { + "PYTHONHASHSEED": "0", + "SOURCE_DATE_EPOCH": "315532800", + }, + mnemonic = "WheelInstall", + progress_message = "Installing %s" % ctx.file.wheel.basename, + ) + + has_py2_only_sources = ctx.attr.python_version == "PY2" + has_py3_only_sources = ctx.attr.python_version == "PY3" + if not has_py2_only_sources: + for d in ctx.attr.deps: + if d[PyInfo].has_py2_only_sources: + has_py2_only_sources = True + break + if not has_py3_only_sources: + for d in ctx.attr.deps: + if d[PyInfo].has_py3_only_sources: + has_py3_only_sources = True + break + + # TODO: Is there a more correct way to get this runfiles-relative import path? + imp = paths.join( + ctx.label.workspace_name or ctx.workspace_name, # Default to the local workspace. + ctx.label.package, + ctx.label.name, + "site-packages", # we put lib files in this subdirectory. + ) + + imports = depset( + direct = [imp], + transitive = [d[PyInfo].imports for d in ctx.attr.deps], + ) + transitive_sources = depset( + direct = [out], + transitive = [dep[PyInfo].transitive_sources for dep in ctx.attr.deps if PyInfo in dep], + ) + runfiles = ctx.runfiles(files = [out]) + for d in ctx.attr.deps: + runfiles = runfiles.merge(d[DefaultInfo].default_runfiles) + + return [ + DefaultInfo( + files = depset(direct = [out]), + runfiles = runfiles, + ), + PyInfo( + has_py2_only_sources = has_py2_only_sources, + has_py3_only_sources = has_py3_only_sources, + imports = imports, + transitive_sources = transitive_sources, + uses_shared_libraries = True, # Docs say this is unused + ), + ] + +pycross_wheel_library = rule( + implementation = _pycross_wheel_library_impl, + attrs = { + "deps": attr.label_list( + doc = "A list of this wheel's Python library dependencies.", + providers = [DefaultInfo, PyInfo], + ), + "enable_implicit_namespace_pkgs": attr.bool( + default = True, + doc = """ +If true, disables conversion of native namespace packages into pkg-util style namespace packages. When set all py_binary +and py_test targets must specify either `legacy_create_init=False` or the global Bazel option +`--incompatible_default_to_explicit_init_py` to prevent `__init__.py` being automatically generated in every directory. +This option is required to support some packages which cannot handle the conversion to pkg-util style. + """, + ), + "install_exclude_globs": attr.string_list( + doc = "A list of globs for files to exclude during installation.", + ), + "python_version": attr.string( + doc = "The python version required for this wheel ('PY2' or 'PY3')", + values = ["PY2", "PY3", ""], + ), + "wheel": attr.label( + doc = "The wheel file.", + allow_single_file = [".whl"], + mandatory = True, + ), + "_tool": attr.label( + default = Label("//pycross/private/tools:wheel_installer"), + cfg = "exec", + executable = True, + ), + }, +) diff --git a/python/private/pycross_staging/private/wheel_zipimport_library.bzl b/python/private/pycross_staging/private/wheel_zipimport_library.bzl new file mode 100644 index 0000000000..ae8b5094f6 --- /dev/null +++ b/python/private/pycross_staging/private/wheel_zipimport_library.bzl @@ -0,0 +1,69 @@ +"""Implementation of the pycross_wheel_zipimport_library rule.""" + +load("@bazel_skylib//lib:paths.bzl", "paths") +load("@rules_python//python:py_info.bzl", "PyInfo") + +def _pycross_wheel_zipimport_library_impl(ctx): + wheel_label = ctx.file.wheel.owner or ctx.attr.wheel.label + wheel_file = ctx.file.wheel + + has_py2_only_sources = False + has_py3_only_sources = True + if not has_py2_only_sources: + for d in ctx.attr.deps: + if d[PyInfo].has_py2_only_sources: + has_py2_only_sources = True + break + if not has_py3_only_sources: + for d in ctx.attr.deps: + if d[PyInfo].has_py3_only_sources: + has_py3_only_sources = True + break + + # TODO: Is there a more correct way to get this runfiles-relative import path? + imp = paths.join( + wheel_label.workspace_name or ctx.workspace_name, # Default to the local workspace. + wheel_label.package, + wheel_label.name, + ) + + imports = depset( + direct = [imp], + transitive = [d[PyInfo].imports for d in ctx.attr.deps], + ) + transitive_sources = depset( + direct = [wheel_file], + transitive = [dep[PyInfo].transitive_sources for dep in ctx.attr.deps if PyInfo in dep], + ) + runfiles = ctx.runfiles(files = [wheel_file]) + for d in ctx.attr.deps: + runfiles = runfiles.merge(d[DefaultInfo].default_runfiles) + + return [ + DefaultInfo( + files = depset(direct = [wheel_file]), + runfiles = runfiles, + ), + PyInfo( + has_py2_only_sources = has_py2_only_sources, + has_py3_only_sources = has_py3_only_sources, + imports = imports, + transitive_sources = transitive_sources, + uses_shared_libraries = True, # Docs say this is unused + ), + ] + +pycross_wheel_zipimport_library = rule( + implementation = _pycross_wheel_zipimport_library_impl, + attrs = { + "deps": attr.label_list( + doc = "A list of this wheel's Python library dependencies.", + providers = [DefaultInfo, PyInfo], + ), + "wheel": attr.label( + doc = "The wheel file.", + allow_single_file = [".whl"], + mandatory = True, + ), + }, +) diff --git a/python/private/pycross_staging/repositories.bzl b/python/private/pycross_staging/repositories.bzl new file mode 100644 index 0000000000..0dcc9e659a --- /dev/null +++ b/python/private/pycross_staging/repositories.bzl @@ -0,0 +1,29 @@ +"""Declare runtime dependencies""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") +load("//pycross/private:internal_repo.bzl", "create_internal_repo") +load("//pycross/private:pycross_deps.lock.bzl", pypi_all_repositories = "repositories") +load("//pycross/private:pycross_deps_core.lock.bzl", core_files = "FILES") + +# The python_interpreter_target was previously used when pip_install was used for +# pycross' own dependencies. Leaving it here in case we need it in the future. +# buildifier: disable=unused-variable +def rules_pycross_dependencies(python_interpreter_target = None, python_interpreter = None): + # The minimal version of bazel_skylib we require + maybe( + http_archive, + name = "bazel_skylib", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz", + ], + sha256 = "f7be3474d42aae265405a592bb7da8e171919d74c16f082a5457840f06054728", + ) + + pypi_all_repositories() + create_internal_repo( + python_interpreter_target = python_interpreter_target, + python_interpreter = python_interpreter, + wheels = core_files, + ) diff --git a/python/private/pycross_staging/toolchain.bzl b/python/private/pycross_staging/toolchain.bzl new file mode 100644 index 0000000000..b1fb5d7ef8 --- /dev/null +++ b/python/private/pycross_staging/toolchain.bzl @@ -0,0 +1,57 @@ +"""This module implements the language-specific toolchain rule. +""" + +PycrossBuildExecRuntimeInfo = provider( + doc = "Extended information about a (exec, target) Python interpreter pair.", + fields = { + "exec_python_executable": "The path to the exec Python interpreter, either absolute or relative to execroot.", + "exec_python_files": "A depset containing all files for the exec interpreter.", + "target_python_executable": "The path to the target Python interpreter, either absolute or relative to execroot.", + "target_python_files": "A depset containing all files for the target interpreter.", + "target_sys_path": "An array of system path directories (i.e., the value of sys.path from `python -m site`).", + }, +) + +def _pycross_hermetic_toolchain_impl(ctx): + exec_py_info = ctx.attr.exec_interpreter[PyRuntimeInfo] + target_py_info = ctx.attr.target_interpreter[PyRuntimeInfo] + + pycross_info = PycrossBuildExecRuntimeInfo( + exec_python_files = exec_py_info.files, + exec_python_executable = exec_py_info.interpreter.path, + target_python_files = target_py_info.files, + target_python_executable = target_py_info.interpreter.path, + target_sys_path = None, + ) + + return [ + platform_common.ToolchainInfo( + pycross_info = pycross_info, + ), + ] + +pycross_hermetic_toolchain = rule( + implementation = _pycross_hermetic_toolchain_impl, + attrs = { + "exec_interpreter": attr.label( + doc = "The execution Python interpreter (PyRuntimeInfo).", + mandatory = True, + providers = [PyRuntimeInfo], + cfg = "exec", + ), + "target_interpreter": attr.label( + doc = "The target Python interpreter (PyRuntimeInfo).", + mandatory = True, + providers = [PyRuntimeInfo], + cfg = "target", + ), + }, +) + +def config_compatible(config_setting_target): + return select( + { + config_setting_target: [], + "//conditions:default": ["@platforms//:incompatible"], + }, + ) diff --git a/python/private/pycross_staging/workspace.bzl b/python/private/pycross_staging/workspace.bzl new file mode 100644 index 0000000000..2d7ec49dca --- /dev/null +++ b/python/private/pycross_staging/workspace.bzl @@ -0,0 +1,19 @@ +"Public repository rule API re-exports" + +load("//pycross/private:lock_file_repo.bzl", _pycross_lock_file_repo = "pycross_lock_file_repo") +load("//pycross/private:lock_repo.bzl", _pycross_lock_repo = "pycross_lock_repo") +load( + "//pycross/private:pdm_lock_model.bzl", + _lock_repo_model_pdm = "lock_repo_model_pdm", +) +load( + "//pycross/private:poetry_lock_model.bzl", + _lock_repo_model_poetry = "lock_repo_model_poetry", +) +load("//pycross/private:toolchain_helpers.bzl", _pycross_register_for_python_toolchains = "pycross_register_for_python_toolchains") + +lock_repo_model_pdm = _lock_repo_model_pdm +lock_repo_model_poetry = _lock_repo_model_poetry +pycross_lock_file_repo = _pycross_lock_file_repo +pycross_lock_repo = _pycross_lock_repo +pycross_register_for_python_toolchains = _pycross_register_for_python_toolchains