diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml index 84df9b7005..d9dd618908 100644 --- a/.bazelci/presubmit.yml +++ b/.bazelci/presubmit.yml @@ -21,8 +21,8 @@ buildifier: .minimum_supported_version: &minimum_supported_version # For testing minimum supported version. # NOTE: Keep in sync with //:version.bzl - bazel: 5.4.0 - skip_in_bazel_downstream_pipeline: "Bazel 5 required" + bazel: 6.2.0 + skip_in_bazel_downstream_pipeline: "Bazel 6 required" .minimum_supported_bzlmod_version: &minimum_supported_bzlmod_version bazel: 6.2.0 # test minimum supported version of bazel for bzlmod tests .reusable_config: &reusable_config @@ -64,10 +64,7 @@ buildifier: - //tests:version_3_9_test - //tests:version_default_test .pystar_base: &pystar_base - # TODO: Change to "7.x" once Bazel 7 is available - # https://github.com/bazelbuild/bazel/commit/f3aafea59ae021c6a12086cb2cd34c5fa782faf1 - # is available in rolling. - bazel: "last_rc" + bazel: "7.x" environment: RULES_PYTHON_ENABLE_PYSTAR: "1" test_flags: diff --git a/.bazelignore b/.bazelignore index 564eb06195..9104609b02 100644 --- a/.bazelignore +++ b/.bazelignore @@ -13,6 +13,10 @@ bazel-testlogs examples/bzlmod/bazel-bzlmod examples/bzlmod/other_module/bazel-other_module examples/bzlmod_build_file_generation/bazel-bzlmod_build_file_generation +examples/multi_python_versions/bazel-multi_python_versions examples/pip_parse/bazel-pip_parse +examples/pip_parse_vendored/bazel-pip_parse_vendored examples/py_proto_library/bazel-py_proto_library +tests/compile_pip_requirements/bazel-compile_pip_requirements tests/ignore_root_user_error/bazel-ignore_root_user_error +tests/pip_repository_entry_points/bazel-pip_repository_entry_points diff --git a/.bazelrc b/.bazelrc index 631bd10a0f..fd2e442d48 100644 --- a/.bazelrc +++ b/.bazelrc @@ -3,8 +3,8 @@ # This lets us glob() up all the files inside the examples to make them inputs to tests # (Note, we cannot use `common --deleted_packages` because the bazel version command doesn't support it) # To update these lines, run tools/bazel_integration_test/update_deleted_packages.sh -build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,tests/compile_pip_requirements,tests/compile_pip_requirements_test_from_external_workspace,tests/ignore_root_user_error,tests/pip_repository_entry_points -query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,tests/compile_pip_requirements,tests/compile_pip_requirements_test_from_external_workspace,tests/ignore_root_user_error,tests/pip_repository_entry_points +build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/dupe_requirements,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,tests/compile_pip_requirements,tests/compile_pip_requirements_test_from_external_workspace,tests/ignore_root_user_error,tests/pip_repository_entry_points +query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/dupe_requirements,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,tests/compile_pip_requirements,tests/compile_pip_requirements_test_from_external_workspace,tests/ignore_root_user_error,tests/pip_repository_entry_points test --test_output=errors @@ -25,5 +25,8 @@ startup --windows_enable_symlinks common --noexperimental_enable_bzlmod # Additional config to use for readthedocs builds. -# See .readthedocs.yml for additional flags +# See .readthedocs.yml for additional flags that can only be determined from +# the runtime environment. build:rtd --stamp +# Some bzl files contain repos only available under bzlmod +build:rtd --enable_bzlmod diff --git a/.bazelversion b/.bazelversion index 6abaeb2f90..66ce77b7ea 100644 --- a/.bazelversion +++ b/.bazelversion @@ -1 +1 @@ -6.2.0 +7.0.0 diff --git a/.github/workflows/create_archive_and_notes.sh b/.github/workflows/create_archive_and_notes.sh index f7a291a6be..ffeecd5800 100755 --- a/.github/workflows/create_archive_and_notes.sh +++ b/.github/workflows/create_archive_and_notes.sh @@ -37,7 +37,8 @@ bazel_dep(name = "rules_python", version = "${TAG}") pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip") pip.parse( - name = "pip", + hub_name = "pip", + python_version = "3.11", requirements_lock = "//:requirements_lock.txt", ) diff --git a/BUILD.bazel b/BUILD.bazel index 8dd2242dcf..007e524146 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -24,6 +24,11 @@ exports_files([ "version.bzl", ]) +exports_files( + glob(["*.md"]), + visibility = ["//docs:__subpackages__"], +) + filegroup( name = "distribution", srcs = [ diff --git a/CHANGELOG.md b/CHANGELOG.md index 1937fe549d..b032f4e427 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,6 +36,21 @@ A brief description of the categories of changes: * (gazelle) The gazelle plugin helper was not working with Python toolchains 3.11 and above due to a bug in the helper components not being on PYTHONPATH. +* (pip_parse) The repositories created by `whl_library` can now parse the `whl` + METADATA and generate dependency closures irrespective of the host platform + the generation is executed on. This can be turned on by supplying + `experimental_target_platforms = ["all"]` to the `pip_parse` or the `bzlmod` + equivalent. This may help in cases where fetching wheels for a different + platform using `download_only = True` feature. +* (bzlmod pip.parse) The `pip.parse(python_interpreter)` arg now works for + specifying a local system interpreter. +* (bzlmod pip.parse) Requirements files with duplicate entries for the same + package (e.g. one for the package, one for an extra) now work. + +### Added + +* (docs) bzlmod extensions are now documented on rules-python.readthedocs.io + [0.XX.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.XX.0 ## [0.27.0] - 2023-11-16 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bf3a496158..c6532d9ac5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -65,10 +65,6 @@ and setup. Subsequent runs will be faster, but there are many tests, and some of them are slow. If you're working on a particular area of code, you can run just the tests in those directories instead, which can speed up your edit-run cycle. -Note that there are tests to verify generated documentation is correct -- if -you're modifying the signature of a public function, these tests will likely -fail and you'll need to [regenerate the api docs](#documentation). - ## Formatting Starlark files should be formatted by @@ -150,17 +146,6 @@ For the full details of types, see Some checked-in files are generated and need to be updated when a new PR is merged. -### Documentation - -To regenerate the content under the `docs/` directory, run this command: - -```shell -bazel run //docs:update -``` - -This needs to be done whenever the docstrings in the corresponding .bzl files -are changed; a test failure will remind you to run this command when needed. - ## Core rules The bulk of this repo is owned and maintained by the Bazel Python community. diff --git a/MODULE.bazel b/MODULE.bazel index 9aaeaf62dd..f53815c72e 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -56,12 +56,12 @@ use_repo(python, "pythons_hub") register_toolchains("@pythons_hub//:all") # ===== DEV ONLY SETUP ===== -docs_pip = use_extension( +dev_pip = use_extension( "//python/extensions:pip.bzl", "pip", dev_dependency = True, ) -docs_pip.parse( +dev_pip.parse( experimental_requirement_cycles = { "sphinx": [ "sphinx", @@ -72,7 +72,7 @@ docs_pip.parse( "sphinxcontrib-applehelp", ], }, - hub_name = "docs_deps", + hub_name = "dev_pip", python_version = "3.11", requirements_darwin = "//docs/sphinx:requirements_darwin.txt", requirements_lock = "//docs/sphinx:requirements_linux.txt", diff --git a/WORKSPACE b/WORKSPACE index 074a7b9ed2..b8e778e4f6 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -109,7 +109,7 @@ install_deps() # Install sphinx for doc generation. pip_parse( - name = "docs_deps", + name = "dev_pip", experimental_requirement_cycles = { "sphinx": [ "sphinx", @@ -126,7 +126,7 @@ pip_parse( requirements_lock = "//docs/sphinx:requirements_linux.txt", ) -load("@docs_deps//:requirements.bzl", docs_install_deps = "install_deps") +load("@dev_pip//:requirements.bzl", docs_install_deps = "install_deps") docs_install_deps() @@ -140,3 +140,9 @@ http_file( "https://files.pythonhosted.org/packages/50/67/3e966d99a07d60a21a21d7ec016e9e4c2642a86fea251ec68677daf71d4d/numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", ], ) + +# rules_proto expects //external:python_headers to point at the python headers. +bind( + name = "python_headers", + actual = "//python/cc:current_py_cc_headers", +) diff --git a/docs/sphinx/BUILD.bazel b/docs/sphinx/BUILD.bazel index 832a58ec9e..4c14aee99f 100644 --- a/docs/sphinx/BUILD.bazel +++ b/docs/sphinx/BUILD.bazel @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -load("@docs_deps//:requirements.bzl", "requirement") -load("@rules_python//python:pip.bzl", "compile_pip_requirements") +load("@dev_pip//:requirements.bzl", "requirement") +load("//python:pip.bzl", "compile_pip_requirements") +load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED") # buildifier: disable=bzl-visibility +load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER") # buildifier: disable=bzl-visibility load("//sphinxdocs:readthedocs.bzl", "readthedocs_install") load("//sphinxdocs:sphinx.bzl", "sphinx_build_binary", "sphinx_docs", "sphinx_inventory") load("//sphinxdocs:sphinx_stardoc.bzl", "sphinx_stardocs") @@ -55,6 +57,10 @@ sphinx_docs( formats = [ "html", ], + renamed_srcs = { + "//:CHANGELOG.md": "changelog.md", + "//:CONTRIBUTING.md": "contributing.md", + }, sphinx = ":sphinx-build", strip_prefix = package_name() + "/", tags = ["docs"], @@ -79,7 +85,13 @@ sphinx_stardocs( "api/entry_points/py_console_script_binary.md": "//python/entry_points:py_console_script_binary_bzl", "api/packaging.md": "//python:packaging_bzl", "api/pip.md": "//python:pip_bzl", - }, + } | ({ + # Bazel 6 + Stardoc isn't able to parse something about the python bzlmod extension + "api/extensions/python.md": "//python/extensions:python_bzl", + } if IS_BAZEL_7_OR_HIGHER else {}) | ({ + # This depends on @pythons_hub, which is only created under bzlmod + "api/extensions/pip.md": "//python/extensions:pip_bzl", + } if BZLMOD_ENABLED else {}), footer = "_stardoc_footer.md", tags = ["docs"], target_compatible_with = _TARGET_COMPATIBLE_WITH, diff --git a/docs/sphinx/_stardoc_footer.md b/docs/sphinx/_stardoc_footer.md index 65d74f4d5e..7aa33f778f 100644 --- a/docs/sphinx/_stardoc_footer.md +++ b/docs/sphinx/_stardoc_footer.md @@ -7,6 +7,8 @@ [`Label`]: https://bazel.build/rules/lib/Label [`list`]: https://bazel.build/rules/lib/list [`str`]: https://bazel.build/rules/lib/string +[str]: https://bazel.build/rules/lib/string +[`int`]: https://bazel.build/rules/lib/int [`struct`]: https://bazel.build/rules/lib/builtins/struct [`Target`]: https://bazel.build/rules/lib/Target [target-name]: https://bazel.build/concepts/labels#target-names diff --git a/docs/sphinx/_static/css/custom.css b/docs/sphinx/_static/css/custom.css index c97d2f525c..4b073d4cd2 100644 --- a/docs/sphinx/_static/css/custom.css +++ b/docs/sphinx/_static/css/custom.css @@ -12,8 +12,17 @@ border-bottom: thin solid grey; padding-left: 0.5ex; } +.starlark-object h3 { + background-color: #e7f2fa; + padding-left: 0.5ex; +} + +.starlark-module-extension-tag-class h3 { + background-color: #add8e6; + padding-left: 0.5ex; +} -.starlark-object>p, .starlark-object>dl { +.starlark-object>p, .starlark-object>dl, .starlark-object>section>* { /* Prevent the words from touching the border line */ padding-left: 0.5ex; } diff --git a/docs/sphinx/index.md b/docs/sphinx/index.md index a84dab50b3..bec652a0ca 100644 --- a/docs/sphinx/index.md +++ b/docs/sphinx/index.md @@ -60,6 +60,8 @@ pypi-dependencies pip coverage gazelle +Contributing +Changelog api/index glossary genindex diff --git a/docs/sphinx/pyproject.toml b/docs/sphinx/pyproject.toml index 02e0f36496..d36c9f269c 100644 --- a/docs/sphinx/pyproject.toml +++ b/docs/sphinx/pyproject.toml @@ -9,4 +9,5 @@ dependencies = [ "myst-parser", "sphinx_rtd_theme", "readthedocs-sphinx-ext", + "absl-py", ] diff --git a/docs/sphinx/readthedocs_build.sh b/docs/sphinx/readthedocs_build.sh index e6908a3ca4..c611b7c4fb 100755 --- a/docs/sphinx/readthedocs_build.sh +++ b/docs/sphinx/readthedocs_build.sh @@ -14,6 +14,7 @@ extra_env+=("--//sphinxdocs:extra_env=HOSTNAME=$HOSTNAME") set -x bazel run \ + --config=rtd \ "--//sphinxdocs:extra_defines=version=$READTHEDOCS_VERSION" \ "${extra_env[@]}" \ //docs/sphinx:readthedocs_install diff --git a/docs/sphinx/requirements_linux.txt b/docs/sphinx/requirements_linux.txt index 429ddd46ca..85c61f37b2 100644 --- a/docs/sphinx/requirements_linux.txt +++ b/docs/sphinx/requirements_linux.txt @@ -4,6 +4,10 @@ # # bazel run //docs/sphinx:requirements.update # +absl-py==2.0.0 \ + --hash=sha256:9a28abb62774ae4e8edbe2dd4c49ffcd45a6a848952a5eccc6a49f3f0fc1e2f3 \ + --hash=sha256:d9690211c5fcfefcdd1a45470ac2b5c5acd45241c3af71eed96bc5441746c0d5 + # via rules-python-docs (docs/sphinx/pyproject.toml) alabaster==0.7.13 \ --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \ --hash=sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2 diff --git a/examples/BUILD.bazel b/examples/BUILD.bazel index 35c88cc3fd..e11728ed28 100644 --- a/examples/BUILD.bazel +++ b/examples/BUILD.bazel @@ -11,57 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -load("//tools/bazel_integration_test:bazel_integration_test.bzl", "bazel_integration_test") package(default_visibility = ["//visibility:public"]) licenses(["notice"]) # Apache 2.0 - -bazel_integration_test( - name = "build_file_generation_example", - timeout = "long", -) - -bazel_integration_test( - name = "bzlmod_build_file_generation_example", - timeout = "long", -) - -bazel_integration_test( - name = "pip_parse_example", - timeout = "long", -) - -bazel_integration_test( - name = "pip_parse_vendored_example", - timeout = "long", - tags = ["fix-windows"], -) - -bazel_integration_test( - name = "pip_repository_annotations_example", - timeout = "long", -) - -bazel_integration_test( - name = "py_proto_library_example", - timeout = "long", -) - -bazel_integration_test( - name = "py_proto_library_example_bzlmod", - timeout = "long", - bzlmod = True, - dirname = "py_proto_library", -) - -bazel_integration_test( - name = "multi_python_versions_example", - timeout = "long", -) - -bazel_integration_test( - name = "bzlmod_example", - bzlmod = True, - override_bazel_version = "6.2.0", -) diff --git a/examples/bzlmod/BUILD.bazel b/examples/bzlmod/BUILD.bazel index 6a4fdb8c4f..bb16f98a6f 100644 --- a/examples/bzlmod/BUILD.bazel +++ b/examples/bzlmod/BUILD.bazel @@ -25,6 +25,7 @@ compile_pip_requirements_3_9( # with pip-compile. compile_pip_requirements_3_10( name = "requirements_3_10", + timeout = "moderate", src = "requirements.in", requirements_txt = "requirements_lock_3_10.txt", requirements_windows = "requirements_windows_3_10.txt", diff --git a/examples/bzlmod/MODULE.bazel b/examples/bzlmod/MODULE.bazel index 44d686e3dc..240bb28022 100644 --- a/examples/bzlmod/MODULE.bazel +++ b/examples/bzlmod/MODULE.bazel @@ -98,6 +98,13 @@ pip.parse( "sphinxcontrib-serializinghtml", ], }, + # You can use one of the values below to specify the target platform + # to generate the dependency graph for. + experimental_target_platforms = [ + "all", + "linux_*", + "host", + ], hub_name = "pip", python_version = "3.9", requirements_lock = "//:requirements_lock_3_9.txt", @@ -121,6 +128,13 @@ pip.parse( "sphinxcontrib-serializinghtml", ], }, + # You can use one of the values below to specify the target platform + # to generate the dependency graph for. + experimental_target_platforms = [ + "all", + "linux_*", + "host", + ], hub_name = "pip", python_version = "3.10", requirements_lock = "//:requirements_lock_3_10.txt", @@ -153,3 +167,14 @@ local_path_override( module_name = "other_module", path = "other_module", ) + +# ===== +# Config for testing duplicate packages in requirements +# ===== +# +pip.parse( + hub_name = "dupe_requirements", + python_version = "3.9", # Must match whatever is marked is_default=True + requirements_lock = "//tests/dupe_requirements:requirements.txt", +) +use_repo(pip, "dupe_requirements") diff --git a/examples/bzlmod/requirements.in b/examples/bzlmod/requirements.in index ed177755ab..a713577f55 100644 --- a/examples/bzlmod/requirements.in +++ b/examples/bzlmod/requirements.in @@ -1,4 +1,4 @@ ---extra-index-url https://pypi.python.org/simple/ +--extra-index-url https://pypi.org/simple/ wheel websockets diff --git a/examples/bzlmod/requirements_lock_3_10.txt b/examples/bzlmod/requirements_lock_3_10.txt index a10d02e589..525fa3e962 100644 --- a/examples/bzlmod/requirements_lock_3_10.txt +++ b/examples/bzlmod/requirements_lock_3_10.txt @@ -4,7 +4,7 @@ # # bazel run //:requirements_3_10.update # ---extra-index-url https://pypi.python.org/simple/ +--extra-index-url https://pypi.org/simple/ alabaster==0.7.13 \ --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \ diff --git a/examples/bzlmod/requirements_lock_3_9.txt b/examples/bzlmod/requirements_lock_3_9.txt index 6fb57e11ca..e78562f5c2 100644 --- a/examples/bzlmod/requirements_lock_3_9.txt +++ b/examples/bzlmod/requirements_lock_3_9.txt @@ -4,7 +4,7 @@ # # bazel run //:requirements_3_9.update # ---extra-index-url https://pypi.python.org/simple/ +--extra-index-url https://pypi.org/simple/ alabaster==0.7.13 \ --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \ diff --git a/examples/bzlmod/requirements_windows_3_10.txt b/examples/bzlmod/requirements_windows_3_10.txt index 60d4980ed7..05905e545f 100644 --- a/examples/bzlmod/requirements_windows_3_10.txt +++ b/examples/bzlmod/requirements_windows_3_10.txt @@ -4,7 +4,7 @@ # # bazel run //:requirements_3_10.update # ---extra-index-url https://pypi.python.org/simple/ +--extra-index-url https://pypi.org/simple/ alabaster==0.7.13 \ --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \ diff --git a/examples/bzlmod/requirements_windows_3_9.txt b/examples/bzlmod/requirements_windows_3_9.txt index 4d5c31e46e..a325101ba0 100644 --- a/examples/bzlmod/requirements_windows_3_9.txt +++ b/examples/bzlmod/requirements_windows_3_9.txt @@ -4,7 +4,7 @@ # # bazel run //:requirements_3_9.update # ---extra-index-url https://pypi.python.org/simple/ +--extra-index-url https://pypi.org/simple/ alabaster==0.7.13 \ --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \ diff --git a/examples/bzlmod/tests/dupe_requirements/BUILD.bazel b/examples/bzlmod/tests/dupe_requirements/BUILD.bazel new file mode 100644 index 0000000000..47eb7ca0fb --- /dev/null +++ b/examples/bzlmod/tests/dupe_requirements/BUILD.bazel @@ -0,0 +1,19 @@ +load("@rules_python//python:pip.bzl", "compile_pip_requirements") +load("@rules_python//python:py_test.bzl", "py_test") + +py_test( + name = "dupe_requirements_test", + srcs = ["dupe_requirements_test.py"], + deps = [ + "@dupe_requirements//pyjwt", + ], +) + +compile_pip_requirements( + name = "requirements", + src = "requirements.in", + requirements_txt = "requirements.txt", + # This is to make the requirements diff test not run on CI. The content we + # need in requirements.txt isn't exactly what will be generated. + tags = ["manual"], +) diff --git a/examples/bzlmod/tests/dupe_requirements/dupe_requirements_test.py b/examples/bzlmod/tests/dupe_requirements/dupe_requirements_test.py new file mode 100644 index 0000000000..1139dc5252 --- /dev/null +++ b/examples/bzlmod/tests/dupe_requirements/dupe_requirements_test.py @@ -0,0 +1,4 @@ +# There's nothing to test at runtime. Building indicates success. +# Just import the relevant modules as a basic check. +import cryptography +import jwt diff --git a/examples/bzlmod/tests/dupe_requirements/requirements.in b/examples/bzlmod/tests/dupe_requirements/requirements.in new file mode 100644 index 0000000000..b1f623395a --- /dev/null +++ b/examples/bzlmod/tests/dupe_requirements/requirements.in @@ -0,0 +1,2 @@ +pyjwt +pyjwt[crypto] diff --git a/examples/bzlmod/tests/dupe_requirements/requirements.txt b/examples/bzlmod/tests/dupe_requirements/requirements.txt new file mode 100644 index 0000000000..785f556624 --- /dev/null +++ b/examples/bzlmod/tests/dupe_requirements/requirements.txt @@ -0,0 +1,97 @@ +# +# This file is manually tweaked output from the automatic generation. +# To generate: +# 1. bazel run //tests/dupe_requirements:requirements.update +# 2. Then copy/paste the pyjtw lines so there are duplicates +# +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 + # via -r tests/dupe_requirements/requirements.in +pyjwt[crypto]==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 + # via -r tests/dupe_requirements/requirements.in +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 + # via cryptography +cryptography==41.0.7 \ + --hash=sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960 \ + --hash=sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a \ + --hash=sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc \ + --hash=sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a \ + --hash=sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf \ + --hash=sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1 \ + --hash=sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39 \ + --hash=sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406 \ + --hash=sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a \ + --hash=sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a \ + --hash=sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c \ + --hash=sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be \ + --hash=sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15 \ + --hash=sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2 \ + --hash=sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d \ + --hash=sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157 \ + --hash=sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003 \ + --hash=sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248 \ + --hash=sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a \ + --hash=sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec \ + --hash=sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309 \ + --hash=sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7 \ + --hash=sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d + # via pyjwt +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi diff --git a/examples/bzlmod/whl_mods/BUILD.bazel b/examples/bzlmod/whl_mods/BUILD.bazel index 6ca07dd2d1..241d9c1073 100644 --- a/examples/bzlmod/whl_mods/BUILD.bazel +++ b/examples/bzlmod/whl_mods/BUILD.bazel @@ -9,8 +9,8 @@ py_test( name = "pip_whl_mods_test", srcs = ["pip_whl_mods_test.py"], env = { - "REQUESTS_PKG_DIR": "pip_39_requests", - "WHEEL_PKG_DIR": "pip_39_wheel", + "REQUESTS_PKG": "$(rlocationpaths @pip//requests:pkg)", + "WHEEL_PKG": "$(rlocationpaths @pip//wheel:pkg)", }, main = "pip_whl_mods_test.py", deps = [ diff --git a/examples/bzlmod/whl_mods/pip_whl_mods_test.py b/examples/bzlmod/whl_mods/pip_whl_mods_test.py index c739b805bd..a88134b150 100644 --- a/examples/bzlmod/whl_mods/pip_whl_mods_test.py +++ b/examples/bzlmod/whl_mods/pip_whl_mods_test.py @@ -27,22 +27,30 @@ class PipWhlModsTest(unittest.TestCase): maxDiff = None - def package_path(self) -> str: - return "rules_python~override~pip~" + @staticmethod + def _get_bazel_pkg_dir_name(env_var: str) -> str: + a_file = Path(os.environ.get(env_var).split(" ")[0]) + head = a_file + while head.parent.name: + head = head.parent - def wheel_pkg_dir(self) -> str: - env = os.environ.get("WHEEL_PKG_DIR") - self.assertIsNotNone(env) - return env + return head.name + + @classmethod + def setUpClass(cls): + cls._wheel_pkg_dir = cls._get_bazel_pkg_dir_name("WHEEL_PKG") + cls._requests_pkg_dir = cls._get_bazel_pkg_dir_name("REQUESTS_PKG") + + def wheel_pkg_dir(self) -> Path: + return self._wheel_pkg def test_build_content_and_data(self): r = runfiles.Create() rpath = r.Rlocation( - "{}{}/generated_file.txt".format( - self.package_path(), - self.wheel_pkg_dir(), - ), - ) + "{}/generated_file.txt".format( + self._wheel_pkg_dir, + ), + ) generated_file = Path(rpath) self.assertTrue(generated_file.exists()) @@ -52,11 +60,10 @@ def test_build_content_and_data(self): def test_copy_files(self): r = runfiles.Create() rpath = r.Rlocation( - "{}{}/copied_content/file.txt".format( - self.package_path(), - self.wheel_pkg_dir(), - ) - ) + "{}/copied_content/file.txt".format( + self._wheel_pkg_dir, + ) + ) copied_file = Path(rpath) self.assertTrue(copied_file.exists()) @@ -64,14 +71,17 @@ def test_copy_files(self): self.assertEqual(content, "Hello world from copied file") def test_copy_executables(self): + executable_name = ( + "executable.exe" if platform.system() == "windows" else "executable.py" + ) + r = runfiles.Create() rpath = r.Rlocation( - "{}{}/copied_content/executable{}".format( - self.package_path(), - self.wheel_pkg_dir(), - ".exe" if platform.system() == "windows" else ".py", - ) - ) + "{}/copied_content/{}".format( + self._wheel_pkg_dir, + executable_name, + ) + ) executable = Path(rpath) self.assertTrue(executable.exists()) @@ -88,11 +98,10 @@ def test_data_exclude_glob(self): current_wheel_version = "0.40.0" r = runfiles.Create() - dist_info_dir = "{}{}/site-packages/wheel-{}.dist-info".format( - self.package_path(), - self.wheel_pkg_dir(), - current_wheel_version, - ) + dist_info_dir = "{}/site-packages/wheel-{}.dist-info".format( + self._wheel_pkg_dir, + current_wheel_version, + ) # Note: `METADATA` is important as it's consumed by https://docs.python.org/3/library/importlib.metadata.html # `METADATA` is expected to be there to show dist-info files are included in the runfiles. @@ -101,24 +110,20 @@ def test_data_exclude_glob(self): # However, `WHEEL` was explicitly excluded, so it should be missing wheel_path = r.Rlocation("{}/WHEEL".format(dist_info_dir)) - self.assertTrue(Path(metadata_path).exists()) - self.assertFalse(Path(wheel_path).exists()) - - def requests_pkg_dir(self) -> str: - env = os.environ.get("REQUESTS_PKG_DIR") - self.assertIsNotNone(env) - return env + self.assertTrue(Path(metadata_path).exists(), f"Could not find {metadata_path}") + self.assertFalse( + Path(wheel_path).exists(), f"Expected to not find {wheel_path}" + ) def test_extra(self): # This test verifies that annotations work correctly for pip packages with extras # specified, in this case requests[security]. r = runfiles.Create() rpath = r.Rlocation( - "{}{}/generated_file.txt".format( - self.package_path(), - self.requests_pkg_dir(), - ), - ) + "{}/generated_file.txt".format( + self._requests_pkg_dir, + ), + ) generated_file = Path(rpath) self.assertTrue(generated_file.exists()) diff --git a/examples/pip_parse/MODULE.bazel b/examples/pip_parse/MODULE.bazel index 308a97efac..3977f8aa16 100644 --- a/examples/pip_parse/MODULE.bazel +++ b/examples/pip_parse/MODULE.bazel @@ -26,5 +26,6 @@ pip.parse( hub_name = "pypi", python_version = "3.9", requirements_lock = "//:requirements_lock.txt", + requirements_windows = "//:requirements_windows.txt", ) use_repo(pip, "pypi") diff --git a/examples/pip_repository_annotations/requirements.in b/examples/pip_repository_annotations/requirements.in index 29419a216c..c9afafc6f5 100644 --- a/examples/pip_repository_annotations/requirements.in +++ b/examples/pip_repository_annotations/requirements.in @@ -1,6 +1,6 @@ # This flag allows for regression testing requirements arguments in # `pip_repository` rules. ---extra-index-url https://pypi.python.org/simple/ +--extra-index-url https://pypi.org/simple/ certifi>=2023.7.22 # https://security.snyk.io/vuln/SNYK-PYTHON-CERTIFI-5805047 wheel diff --git a/examples/pip_repository_annotations/requirements.txt b/examples/pip_repository_annotations/requirements.txt index 507d099c56..290d93e15c 100644 --- a/examples/pip_repository_annotations/requirements.txt +++ b/examples/pip_repository_annotations/requirements.txt @@ -4,7 +4,7 @@ # # bazel run //:requirements.update # ---extra-index-url https://pypi.python.org/simple/ +--extra-index-url https://pypi.org/simple/ certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ diff --git a/gazelle/python/BUILD.bazel b/gazelle/python/BUILD.bazel index 1d9460c347..fd051ebda6 100644 --- a/gazelle/python/BUILD.bazel +++ b/gazelle/python/BUILD.bazel @@ -1,6 +1,6 @@ load("@bazel_gazelle//:def.bzl", "gazelle_binary") load("@io_bazel_rules_go//go:def.bzl", "go_library") -load("@rules_python//python:defs.bzl", "py_binary") +load("@rules_python//python:defs.bzl", "py_binary", "py_test") load(":gazelle_test.bzl", "gazelle_test") go_library( @@ -58,6 +58,15 @@ py_binary( visibility = ["//visibility:public"], ) +py_test( + name = "parse_test", + srcs = [ + "parse.py", + "parse_test.py", + ], + imports = ["."], +) + filegroup( name = "helper.zip", srcs = [":helper"], diff --git a/gazelle/python/__main__.py b/gazelle/python/__main__.py index 18bc1ca37f..9974c66d13 100644 --- a/gazelle/python/__main__.py +++ b/gazelle/python/__main__.py @@ -23,7 +23,7 @@ if __name__ == "__main__": if len(sys.argv) < 2: - sys.exit("Please provide subcommand, either print or std_modules") + sys.exit("Please provide subcommand, either parse or std_modules") if sys.argv[1] == "parse": sys.exit(parse.main(sys.stdin, sys.stdout)) elif sys.argv[1] == "std_modules": diff --git a/gazelle/python/generate.go b/gazelle/python/generate.go index b28dc8000a..8d9b1699b5 100644 --- a/gazelle/python/generate.go +++ b/gazelle/python/generate.go @@ -20,6 +20,7 @@ import ( "log" "os" "path/filepath" + "sort" "strings" "github.com/bazelbuild/bazel-gazelle/config" @@ -89,9 +90,9 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes pyTestFilenames := treeset.NewWith(godsutils.StringComparator) pyFileNames := treeset.NewWith(godsutils.StringComparator) - // hasPyBinary controls whether a py_binary target should be generated for + // hasPyBinaryEntryPointFile controls whether a single py_binary target should be generated for // this package or not. - hasPyBinary := false + hasPyBinaryEntryPointFile := false // hasPyTestEntryPointFile and hasPyTestEntryPointTarget control whether a py_test target should // be generated for this package or not. @@ -106,8 +107,8 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes ext := filepath.Ext(f) if ext == ".py" { pyFileNames.Add(f) - if !hasPyBinary && f == pyBinaryEntrypointFilename { - hasPyBinary = true + if !hasPyBinaryEntryPointFile && f == pyBinaryEntrypointFilename { + hasPyBinaryEntryPointFile = true } else if !hasPyTestEntryPointFile && f == pyTestEntrypointFilename { hasPyTestEntryPointFile = true } else if f == conftestFilename { @@ -219,7 +220,7 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes collisionErrors := singlylinkedlist.New() appendPyLibrary := func(srcs *treeset.Set, pyLibraryTargetName string) { - deps, err := parser.parse(srcs) + allDeps, mainModules, err := parser.parse(srcs) if err != nil { log.Fatalf("ERROR: %v\n", err) } @@ -228,23 +229,42 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes // exists, and if it is of a different kind from the one we are // generating. If so, we have to throw an error since Gazelle won't // generate it correctly. - if args.File != nil { - for _, t := range args.File.Rules { - if t.Name() == pyLibraryTargetName && t.Kind() != actualPyLibraryKind { - fqTarget := label.New("", args.Rel, pyLibraryTargetName) - err := fmt.Errorf("failed to generate target %q of kind %q: "+ - "a target of kind %q with the same name already exists. "+ - "Use the '# gazelle:%s' directive to change the naming convention.", - fqTarget.String(), actualPyLibraryKind, t.Kind(), pythonconfig.LibraryNamingConvention) - collisionErrors.Add(err) + if err := ensureNoCollision(args.File, pyLibraryTargetName, actualPyLibraryKind); err != nil { + fqTarget := label.New("", args.Rel, pyLibraryTargetName) + err := fmt.Errorf("failed to generate target %q of kind %q: %w. "+ + "Use the '# gazelle:%s' directive to change the naming convention.", + fqTarget.String(), actualPyLibraryKind, err, pythonconfig.LibraryNamingConvention) + collisionErrors.Add(err) + } + + if !hasPyBinaryEntryPointFile { + sort.Strings(mainModules) + // Creating one py_binary target per main module when __main__.py doesn't exist. + for _, filename := range mainModules { + pyBinaryTargetName := strings.TrimSuffix(filepath.Base(filename), ".py") + if err := ensureNoCollision(args.File, pyBinaryTargetName, actualPyBinaryKind); err != nil { + fqTarget := label.New("", args.Rel, pyBinaryTargetName) + log.Printf("failed to generate target %q of kind %q: %v", + fqTarget.String(), actualPyBinaryKind, err) + continue } + binaryDeps := allDeps.Select(func(index int, value interface{}) bool { + return value.(module).Filepath == filepath.Join(args.Rel, filename) + }) + pyBinary := newTargetBuilder(pyBinaryKind, pyBinaryTargetName, pythonProjectRoot, args.Rel, pyFileNames). + addVisibility(visibility). + addSrc(filename). + addModuleDependencies(binaryDeps). + generateImportsAttribute().build() + result.Gen = append(result.Gen, pyBinary) + result.Imports = append(result.Imports, pyBinary.PrivateAttr(config.GazelleImportsKey)) } } pyLibrary := newTargetBuilder(pyLibraryKind, pyLibraryTargetName, pythonProjectRoot, args.Rel, pyFileNames). addVisibility(visibility). addSrcs(srcs). - addModuleDependencies(deps). + addModuleDependencies(allDeps). generateImportsAttribute(). build() @@ -268,8 +288,8 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes appendPyLibrary(pyLibraryFilenames, cfg.RenderLibraryName(packageName)) } - if hasPyBinary { - deps, err := parser.parseSingle(pyBinaryEntrypointFilename) + if hasPyBinaryEntryPointFile { + deps, _, err := parser.parseSingle(pyBinaryEntrypointFilename) if err != nil { log.Fatalf("ERROR: %v\n", err) } @@ -280,17 +300,12 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes // exists, and if it is of a different kind from the one we are // generating. If so, we have to throw an error since Gazelle won't // generate it correctly. - if args.File != nil { - for _, t := range args.File.Rules { - if t.Name() == pyBinaryTargetName && t.Kind() != actualPyBinaryKind { - fqTarget := label.New("", args.Rel, pyBinaryTargetName) - err := fmt.Errorf("failed to generate target %q of kind %q: "+ - "a target of kind %q with the same name already exists. "+ - "Use the '# gazelle:%s' directive to change the naming convention.", - fqTarget.String(), actualPyBinaryKind, t.Kind(), pythonconfig.BinaryNamingConvention) - collisionErrors.Add(err) - } - } + if err := ensureNoCollision(args.File, pyBinaryTargetName, actualPyBinaryKind); err != nil { + fqTarget := label.New("", args.Rel, pyBinaryTargetName) + err := fmt.Errorf("failed to generate target %q of kind %q: %w. "+ + "Use the '# gazelle:%s' directive to change the naming convention.", + fqTarget.String(), actualPyBinaryKind, err, pythonconfig.BinaryNamingConvention) + collisionErrors.Add(err) } pyBinaryTarget := newTargetBuilder(pyBinaryKind, pyBinaryTargetName, pythonProjectRoot, args.Rel, pyFileNames). @@ -308,7 +323,7 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes var conftest *rule.Rule if hasConftestFile { - deps, err := parser.parseSingle(conftestFilename) + deps, _, err := parser.parseSingle(conftestFilename) if err != nil { log.Fatalf("ERROR: %v\n", err) } @@ -317,16 +332,11 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes // exists, and if it is of a different kind from the one we are // generating. If so, we have to throw an error since Gazelle won't // generate it correctly. - if args.File != nil { - for _, t := range args.File.Rules { - if t.Name() == conftestTargetname && t.Kind() != actualPyLibraryKind { - fqTarget := label.New("", args.Rel, conftestTargetname) - err := fmt.Errorf("failed to generate target %q of kind %q: "+ - "a target of kind %q with the same name already exists.", - fqTarget.String(), actualPyLibraryKind, t.Kind()) - collisionErrors.Add(err) - } - } + if err := ensureNoCollision(args.File, conftestTargetname, actualPyLibraryKind); err != nil { + fqTarget := label.New("", args.Rel, conftestTargetname) + err := fmt.Errorf("failed to generate target %q of kind %q: %w. ", + fqTarget.String(), actualPyLibraryKind, err) + collisionErrors.Add(err) } conftestTarget := newTargetBuilder(pyLibraryKind, conftestTargetname, pythonProjectRoot, args.Rel, pyFileNames). @@ -344,7 +354,7 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes var pyTestTargets []*targetBuilder newPyTestTargetBuilder := func(srcs *treeset.Set, pyTestTargetName string) *targetBuilder { - deps, err := parser.parse(srcs) + deps, _, err := parser.parse(srcs) if err != nil { log.Fatalf("ERROR: %v\n", err) } @@ -352,17 +362,12 @@ func (py *Python) GenerateRules(args language.GenerateArgs) language.GenerateRes // exists, and if it is of a different kind from the one we are // generating. If so, we have to throw an error since Gazelle won't // generate it correctly. - if args.File != nil { - for _, t := range args.File.Rules { - if t.Name() == pyTestTargetName && t.Kind() != actualPyTestKind { - fqTarget := label.New("", args.Rel, pyTestTargetName) - err := fmt.Errorf("failed to generate target %q of kind %q: "+ - "a target of kind %q with the same name already exists. "+ - "Use the '# gazelle:%s' directive to change the naming convention.", - fqTarget.String(), actualPyTestKind, t.Kind(), pythonconfig.TestNamingConvention) - collisionErrors.Add(err) - } - } + if err := ensureNoCollision(args.File, pyTestTargetName, actualPyTestKind); err != nil { + fqTarget := label.New("", args.Rel, pyTestTargetName) + err := fmt.Errorf("failed to generate target %q of kind %q: %w. "+ + "Use the '# gazelle:%s' directive to change the naming convention.", + fqTarget.String(), actualPyTestKind, err, pythonconfig.TestNamingConvention) + collisionErrors.Add(err) } return newTargetBuilder(pyTestKind, pyTestTargetName, pythonProjectRoot, args.Rel, pyFileNames). addSrcs(srcs). @@ -487,3 +492,15 @@ func isEntrypointFile(path string) bool { return false } } + +func ensureNoCollision(file *rule.File, targetName, kind string) error { + if file == nil { + return nil + } + for _, t := range file.Rules { + if t.Name() == targetName && t.Kind() != kind { + return fmt.Errorf("a target of kind %q with the same name already exists", t.Kind()) + } + } + return nil +} diff --git a/gazelle/python/parse.py b/gazelle/python/parse.py index 6c0ef69598..daa6d2b47c 100644 --- a/gazelle/python/parse.py +++ b/gazelle/python/parse.py @@ -22,7 +22,7 @@ import os import sys from io import BytesIO -from tokenize import COMMENT, tokenize +from tokenize import COMMENT, NAME, OP, STRING, tokenize def parse_import_statements(content, filepath): @@ -59,6 +59,30 @@ def parse_comments(content): return comments +def parse_main(content): + g = tokenize(BytesIO(content.encode("utf-8")).readline) + for token_type, token_val, start, _, _ in g: + if token_type != NAME or token_val != "if" or start[1] != 0: + continue + try: + token_type, token_val, start, _, _ = next(g) + if token_type != NAME or token_val != "__name__": + continue + token_type, token_val, start, _, _ = next(g) + if token_type != OP or token_val != "==": + continue + token_type, token_val, start, _, _ = next(g) + if token_type != STRING or token_val.strip("\"'") != '__main__': + continue + token_type, token_val, start, _, _ = next(g) + if token_type != OP or token_val != ":": + continue + return True + except StopIteration: + break + return False + + def parse(repo_root, rel_package_path, filename): rel_filepath = os.path.join(rel_package_path, filename) abs_filepath = os.path.join(repo_root, rel_filepath) @@ -70,11 +94,16 @@ def parse(repo_root, rel_package_path, filename): parse_import_statements, content, rel_filepath ) comments_future = executor.submit(parse_comments, content) + main_future = executor.submit(parse_main, content) modules = modules_future.result() comments = comments_future.result() + has_main = main_future.result() + output = { + "filename": filename, "modules": modules, "comments": comments, + "has_main": has_main, } return output diff --git a/gazelle/python/parse_test.py b/gazelle/python/parse_test.py new file mode 100644 index 0000000000..3ebded44b3 --- /dev/null +++ b/gazelle/python/parse_test.py @@ -0,0 +1,39 @@ +import unittest +import parse + +class TestParse(unittest.TestCase): + def test_not_has_main(self): + content = "a = 1\nb = 2" + self.assertFalse(parse.parse_main(content)) + + def test_has_main_in_function(self): + content = """ +def foo(): + if __name__ == "__main__": + a = 3 +""" + self.assertFalse(parse.parse_main(content)) + + def test_has_main(self): + content = """ +import unittest + +from lib import main + + +class ExampleTest(unittest.TestCase): + def test_main(self): + self.assertEqual( + "", + main([["A", 1], ["B", 2]]), + ) + + +if __name__ == "__main__": + unittest.main() +""" + self.assertTrue(parse.parse_main(content)) + + +if __name__ == "__main__": + unittest.main() diff --git a/gazelle/python/parser.go b/gazelle/python/parser.go index 89310267c3..d22850b6a9 100644 --- a/gazelle/python/parser.go +++ b/gazelle/python/parser.go @@ -101,7 +101,7 @@ func newPython3Parser( // parseSingle parses a single Python file and returns the extracted modules // from the import statements as well as the parsed comments. -func (p *python3Parser) parseSingle(pyFilename string) (*treeset.Set, error) { +func (p *python3Parser) parseSingle(pyFilename string) (*treeset.Set, []string, error) { pyFilenames := treeset.NewWith(godsutils.StringComparator) pyFilenames.Add(pyFilename) return p.parse(pyFilenames) @@ -109,7 +109,7 @@ func (p *python3Parser) parseSingle(pyFilename string) (*treeset.Set, error) { // parse parses multiple Python files and returns the extracted modules from // the import statements as well as the parsed comments. -func (p *python3Parser) parse(pyFilenames *treeset.Set) (*treeset.Set, error) { +func (p *python3Parser) parse(pyFilenames *treeset.Set) (*treeset.Set, []string, error) { parserMutex.Lock() defer parserMutex.Unlock() @@ -122,24 +122,28 @@ func (p *python3Parser) parse(pyFilenames *treeset.Set) (*treeset.Set, error) { } encoder := json.NewEncoder(parserStdin) if err := encoder.Encode(&req); err != nil { - return nil, fmt.Errorf("failed to parse: %w", err) + return nil, nil, fmt.Errorf("failed to parse: %w", err) } reader := bufio.NewReader(parserStdout) data, err := reader.ReadBytes(0) if err != nil { - return nil, fmt.Errorf("failed to parse: %w", err) + return nil, nil, fmt.Errorf("failed to parse: %w", err) } data = data[:len(data)-1] var allRes []parserResponse if err := json.Unmarshal(data, &allRes); err != nil { - return nil, fmt.Errorf("failed to parse: %w", err) + return nil, nil, fmt.Errorf("failed to parse: %w", err) } + var mainModules []string for _, res := range allRes { + if res.HasMain { + mainModules = append(mainModules, res.FileName) + } annotations, err := annotationsFromComments(res.Comments) if err != nil { - return nil, fmt.Errorf("failed to parse annotations: %w", err) + return nil, nil, fmt.Errorf("failed to parse annotations: %w", err) } for _, m := range res.Modules { @@ -159,17 +163,22 @@ func (p *python3Parser) parse(pyFilenames *treeset.Set) (*treeset.Set, error) { } } - return modules, nil + return modules, mainModules, nil } // parserResponse represents a response returned by the parser.py for a given // parsed Python module. type parserResponse struct { + // FileName of the parsed module + FileName string // The modules depended by the parsed module. Modules []module `json:"modules"` // The comments contained in the parsed module. This contains the // annotations as they are comments in the Python module. Comments []comment `json:"comments"` + // HasMain indicates whether the Python module has `if __name == "__main__"` + // at the top level + HasMain bool `json:"has_main"` } // module represents a fully-qualified, dot-separated, Python module as seen on diff --git a/gazelle/python/testdata/binary_without_entrypoint/BUILD.in b/gazelle/python/testdata/binary_without_entrypoint/BUILD.in new file mode 100644 index 0000000000..1177dce580 --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/BUILD.in @@ -0,0 +1,8 @@ +# gazelle:python_library_naming_convention py_default_library +# gazelle:resolve py numpy @pip//:numpy +# gazelle:resolve py pandas @pip//:pandas + +filegroup( + name = "collided_main", + srcs = ["collided_main.py"], +) diff --git a/gazelle/python/testdata/binary_without_entrypoint/BUILD.out b/gazelle/python/testdata/binary_without_entrypoint/BUILD.out new file mode 100644 index 0000000000..72fc670b7c --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/BUILD.out @@ -0,0 +1,44 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library", "py_test") + +# gazelle:python_library_naming_convention py_default_library +# gazelle:resolve py numpy @pip//:numpy +# gazelle:resolve py pandas @pip//:pandas + +filegroup( + name = "collided_main", + srcs = ["collided_main.py"], +) + +py_binary( + name = "main", + srcs = ["main.py"], + visibility = ["//:__subpackages__"], + deps = ["@pip//:pandas"], +) + +py_binary( + name = "main2", + srcs = ["main2.py"], + visibility = ["//:__subpackages__"], + deps = [":py_default_library"], +) + +py_library( + name = "py_default_library", + srcs = [ + "__init__.py", + "collided_main.py", + "main.py", + "main2.py", + ], + visibility = ["//:__subpackages__"], + deps = [ + "@pip//:numpy", + "@pip//:pandas", + ], +) + +py_test( + name = "main_test", + srcs = ["main_test.py"], +) \ No newline at end of file diff --git a/gazelle/python/testdata/binary_without_entrypoint/README.md b/gazelle/python/testdata/binary_without_entrypoint/README.md new file mode 100644 index 0000000000..e91250d0ac --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/README.md @@ -0,0 +1,4 @@ +# Binary without entrypoint + +This test case asserts that when there is no __main__.py, a py_binary is generated per main module, unless a main +module main collides with existing target name. diff --git a/gazelle/python/testdata/binary_without_entrypoint/WORKSPACE b/gazelle/python/testdata/binary_without_entrypoint/WORKSPACE new file mode 100644 index 0000000000..faff6af87a --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/WORKSPACE @@ -0,0 +1 @@ +# This is a Bazel workspace for the Gazelle test data. diff --git a/gazelle/python/testdata/binary_without_entrypoint/__init__.py b/gazelle/python/testdata/binary_without_entrypoint/__init__.py new file mode 100644 index 0000000000..730755995d --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# For test purposes only. diff --git a/gazelle/python/testdata/binary_without_entrypoint/collided_main.py b/gazelle/python/testdata/binary_without_entrypoint/collided_main.py new file mode 100644 index 0000000000..3bf59c7795 --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/collided_main.py @@ -0,0 +1,4 @@ +import numpy + +if __name__ == "__main__": + run() \ No newline at end of file diff --git a/gazelle/python/testdata/binary_without_entrypoint/main.py b/gazelle/python/testdata/binary_without_entrypoint/main.py new file mode 100644 index 0000000000..f13cbe58bf --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/main.py @@ -0,0 +1,4 @@ +import pandas + +if __name__ == "__main__": + run() \ No newline at end of file diff --git a/gazelle/python/testdata/binary_without_entrypoint/main2.py b/gazelle/python/testdata/binary_without_entrypoint/main2.py new file mode 100644 index 0000000000..a82a5e604d --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/main2.py @@ -0,0 +1,4 @@ +import collided_main + +if __name__ == "__main__": + run() diff --git a/gazelle/python/testdata/binary_without_entrypoint/main_test.py b/gazelle/python/testdata/binary_without_entrypoint/main_test.py new file mode 100644 index 0000000000..505a766319 --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/main_test.py @@ -0,0 +1,7 @@ +import unittest + +class TestMain(unittest.unittest): + pass + +if __name__ == "__main__": + unittest.main() diff --git a/gazelle/python/testdata/binary_without_entrypoint/test.yaml b/gazelle/python/testdata/binary_without_entrypoint/test.yaml new file mode 100644 index 0000000000..44e4ae8364 --- /dev/null +++ b/gazelle/python/testdata/binary_without_entrypoint/test.yaml @@ -0,0 +1,18 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +expect: + stderr: | + gazelle: failed to generate target "//:collided_main" of kind "py_binary": a target of kind "filegroup" with the same name already exists diff --git a/internal_deps.bzl b/internal_deps.bzl index d2181d6f0f..72c3986f7b 100644 --- a/internal_deps.bzl +++ b/internal_deps.bzl @@ -105,10 +105,10 @@ def rules_python_internal_deps(): name = "futures_2_2_0_whl", downloaded_file_path = "futures-2.2.0-py2.py3-none-any.whl", sha256 = "9fd22b354a4c4755ad8c7d161d93f5026aca4cfe999bd2e53168f14765c02cd6", - # From https://pypi.python.org/pypi/futures/2.2.0 + # From https://pypi.org/pypi/futures/2.2.0 urls = [ - "https://mirror.bazel.build/pypi.python.org/packages/d7/1d/68874943aa37cf1c483fc61def813188473596043158faa6511c04a038b4/futures-2.2.0-py2.py3-none-any.whl", - "https://pypi.python.org/packages/d7/1d/68874943aa37cf1c483fc61def813188473596043158faa6511c04a038b4/futures-2.2.0-py2.py3-none-any.whl", + "https://mirror.bazel.build/pypi.org/packages/d7/1d/68874943aa37cf1c483fc61def813188473596043158faa6511c04a038b4/futures-2.2.0-py2.py3-none-any.whl", + "https://pypi.org/packages/d7/1d/68874943aa37cf1c483fc61def813188473596043158faa6511c04a038b4/futures-2.2.0-py2.py3-none-any.whl", ], ) @@ -116,10 +116,10 @@ def rules_python_internal_deps(): name = "futures_3_1_1_whl", downloaded_file_path = "futures-3.1.1-py2-none-any.whl", sha256 = "c4884a65654a7c45435063e14ae85280eb1f111d94e542396717ba9828c4337f", - # From https://pypi.python.org/pypi/futures + # From https://pypi.org/pypi/futures urls = [ - "https://mirror.bazel.build/pypi.python.org/packages/a6/1c/72a18c8c7502ee1b38a604a5c5243aa8c2a64f4bba4e6631b1b8972235dd/futures-3.1.1-py2-none-any.whl", - "https://pypi.python.org/packages/a6/1c/72a18c8c7502ee1b38a604a5c5243aa8c2a64f4bba4e6631b1b8972235dd/futures-3.1.1-py2-none-any.whl", + "https://mirror.bazel.build/pypi.org/packages/a6/1c/72a18c8c7502ee1b38a604a5c5243aa8c2a64f4bba4e6631b1b8972235dd/futures-3.1.1-py2-none-any.whl", + "https://pypi.org/packages/a6/1c/72a18c8c7502ee1b38a604a5c5243aa8c2a64f4bba4e6631b1b8972235dd/futures-3.1.1-py2-none-any.whl", ], ) @@ -127,10 +127,10 @@ def rules_python_internal_deps(): name = "google_cloud_language_whl", downloaded_file_path = "google_cloud_language-0.29.0-py2.py3-none-any.whl", sha256 = "a2dd34f0a0ebf5705dcbe34bd41199b1d0a55c4597d38ed045bd183361a561e9", - # From https://pypi.python.org/pypi/google-cloud-language + # From https://pypi.org/pypi/google-cloud-language urls = [ - "https://mirror.bazel.build/pypi.python.org/packages/6e/86/cae57e4802e72d9e626ee5828ed5a646cf4016b473a4a022f1038dba3460/google_cloud_language-0.29.0-py2.py3-none-any.whl", - "https://pypi.python.org/packages/6e/86/cae57e4802e72d9e626ee5828ed5a646cf4016b473a4a022f1038dba3460/google_cloud_language-0.29.0-py2.py3-none-any.whl", + "https://mirror.bazel.build/pypi.org/packages/6e/86/cae57e4802e72d9e626ee5828ed5a646cf4016b473a4a022f1038dba3460/google_cloud_language-0.29.0-py2.py3-none-any.whl", + "https://pypi.org/packages/6e/86/cae57e4802e72d9e626ee5828ed5a646cf4016b473a4a022f1038dba3460/google_cloud_language-0.29.0-py2.py3-none-any.whl", ], ) @@ -138,10 +138,10 @@ def rules_python_internal_deps(): name = "grpc_whl", downloaded_file_path = "grpcio-1.6.0-cp27-cp27m-manylinux1_i686.whl", sha256 = "c232d6d168cb582e5eba8e1c0da8d64b54b041dd5ea194895a2fe76050916561", - # From https://pypi.python.org/pypi/grpcio/1.6.0 + # From https://pypi.org/pypi/grpcio/1.6.0 urls = [ - "https://mirror.bazel.build/pypi.python.org/packages/c6/28/67651b4eabe616b27472c5518f9b2aa3f63beab8f62100b26f05ac428639/grpcio-1.6.0-cp27-cp27m-manylinux1_i686.whl", - "https://pypi.python.org/packages/c6/28/67651b4eabe616b27472c5518f9b2aa3f63beab8f62100b26f05ac428639/grpcio-1.6.0-cp27-cp27m-manylinux1_i686.whl", + "https://mirror.bazel.build/pypi.org/packages/c6/28/67651b4eabe616b27472c5518f9b2aa3f63beab8f62100b26f05ac428639/grpcio-1.6.0-cp27-cp27m-manylinux1_i686.whl", + "https://pypi.org/packages/c6/28/67651b4eabe616b27472c5518f9b2aa3f63beab8f62100b26f05ac428639/grpcio-1.6.0-cp27-cp27m-manylinux1_i686.whl", ], ) @@ -149,10 +149,10 @@ def rules_python_internal_deps(): name = "mock_whl", downloaded_file_path = "mock-2.0.0-py2.py3-none-any.whl", sha256 = "5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1", - # From https://pypi.python.org/pypi/mock + # From https://pypi.org/pypi/mock urls = [ - "https://mirror.bazel.build/pypi.python.org/packages/e6/35/f187bdf23be87092bd0f1200d43d23076cee4d0dec109f195173fd3ebc79/mock-2.0.0-py2.py3-none-any.whl", - "https://pypi.python.org/packages/e6/35/f187bdf23be87092bd0f1200d43d23076cee4d0dec109f195173fd3ebc79/mock-2.0.0-py2.py3-none-any.whl", + "https://mirror.bazel.build/pypi.org/packages/e6/35/f187bdf23be87092bd0f1200d43d23076cee4d0dec109f195173fd3ebc79/mock-2.0.0-py2.py3-none-any.whl", + "https://pypi.org/packages/e6/35/f187bdf23be87092bd0f1200d43d23076cee4d0dec109f195173fd3ebc79/mock-2.0.0-py2.py3-none-any.whl", ], ) diff --git a/python/BUILD.bazel b/python/BUILD.bazel index 6431532bd5..1ab59d582c 100644 --- a/python/BUILD.bazel +++ b/python/BUILD.bazel @@ -196,6 +196,8 @@ bzl_library( srcs = ["repositories.bzl"], deps = [ ":versions_bzl", + "//python/pip_install:repositories_bzl", + "//python/private:auth_bzl", "//python/private:bazel_tools_bzl", "//python/private:bzlmod_enabled_bzl", "//python/private:coverage_deps_bzl", diff --git a/python/config_settings/transition.bzl b/python/config_settings/transition.bzl index cb25965f76..1b1a5e82ef 100644 --- a/python/config_settings/transition.bzl +++ b/python/config_settings/transition.bzl @@ -18,8 +18,11 @@ them to the desired target platform. load("@bazel_skylib//lib:dicts.bzl", "dicts") load("//python:py_binary.bzl", _py_binary = "py_binary") +load("//python:py_info.bzl", "PyInfo") +load("//python:py_runtime_info.bzl", "PyRuntimeInfo") load("//python:py_test.bzl", _py_test = "py_test") load("//python/config_settings/private:py_args.bzl", "py_args") +load("//python/private:reexports.bzl", "BuiltinPyInfo", "BuiltinPyRuntimeInfo") def _transition_python_version_impl(_, attr): return {"//python/config_settings:python_version": str(attr.python_version)} @@ -60,14 +63,28 @@ def _transition_py_impl(ctx): for k, v in ctx.attr.env.items(): env[k] = ctx.expand_location(v) + if PyInfo in target: + py_info = target[PyInfo] + elif BuiltinPyInfo in target: + py_info = target[BuiltinPyInfo] + else: + fail("target {} does not have rules_python PyInfo or builtin PyInfo".format(target)) + + if PyRuntimeInfo in target: + py_runtime_info = target[PyRuntimeInfo] + elif BuiltinPyRuntimeInfo in target: + py_runtime_info = target[BuiltinPyRuntimeInfo] + else: + fail("target {} does not have rules_python PyRuntimeInfo or builtin PyRuntimeInfo".format(target)) + providers = [ DefaultInfo( executable = executable, files = depset([zipfile_symlink] if zipfile_symlink else [], transitive = [target[DefaultInfo].files]), runfiles = ctx.runfiles([zipfile_symlink] if zipfile_symlink else []).merge(target[DefaultInfo].default_runfiles), ), - target[PyInfo], - target[PyRuntimeInfo], + py_info, + py_runtime_info, # Ensure that the binary we're wrapping is included in code coverage. coverage_common.instrumented_files_info( ctx, diff --git a/python/extensions/BUILD.bazel b/python/extensions/BUILD.bazel index 4be3e37260..88e3984ea1 100644 --- a/python/extensions/BUILD.bazel +++ b/python/extensions/BUILD.bazel @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +load("@bazel_skylib//:bzl_library.bzl", "bzl_library") + package(default_visibility = ["//visibility:public"]) licenses(["notice"]) @@ -21,3 +23,17 @@ filegroup( srcs = glob(["**"]), visibility = ["//python:__pkg__"], ) + +bzl_library( + name = "pip_bzl", + srcs = ["pip.bzl"], + visibility = ["//:__subpackages__"], + deps = ["//python/private/bzlmod:pip_bzl"], +) + +bzl_library( + name = "python_bzl", + srcs = ["python.bzl"], + visibility = ["//:__subpackages__"], + deps = ["//python/private/bzlmod:python_bzl"], +) diff --git a/python/pip_install/pip_repository.bzl b/python/pip_install/pip_repository.bzl index 07e3353c77..dca36ce74c 100644 --- a/python/pip_install/pip_repository.bzl +++ b/python/pip_install/pip_repository.bzl @@ -76,7 +76,9 @@ def _resolve_python_interpreter(rctx): Args: rctx: Handle to the rule repository context. - Returns: Python interpreter path. + + Returns: + `path` object, for the resolved path to the Python interpreter. """ python_interpreter = _get_python_interpreter_attr(rctx) @@ -91,10 +93,13 @@ def _resolve_python_interpreter(rctx): if os == WINDOWS_NAME: python_interpreter = python_interpreter.realpath elif "/" not in python_interpreter: + # It's a plain command, e.g. "python3", to look up in the environment. found_python_interpreter = rctx.which(python_interpreter) if not found_python_interpreter: fail("python interpreter `{}` not found in PATH".format(python_interpreter)) python_interpreter = found_python_interpreter + else: + python_interpreter = rctx.path(python_interpreter) return python_interpreter def _get_xcode_location_cflags(rctx): @@ -345,6 +350,8 @@ def _pip_repository_impl(rctx): if rctx.attr.python_interpreter_target: config["python_interpreter_target"] = str(rctx.attr.python_interpreter_target) + if rctx.attr.experimental_target_platforms: + config["experimental_target_platforms"] = rctx.attr.experimental_target_platforms if rctx.attr.incompatible_generate_aliases: macro_tmpl = "@%s//{}:{}" % rctx.attr.name @@ -472,6 +479,30 @@ Warning: If a dependency participates in multiple cycles, all of those cycles must be collapsed down to one. For instance `a <-> b` and `a <-> c` cannot be listed as two separate cycles. +""", + ), + "experimental_target_platforms": attr.string_list( + default = [], + doc = """\ +A list of platforms that we will generate the conditional dependency graph for +cross platform wheels by parsing the wheel metadata. This will generate the +correct dependencies for packages like `sphinx` or `pylint`, which include +`colorama` when installed and used on Windows platforms. + +An empty list means falling back to the legacy behaviour where the host +platform is the target platform. + +WARNING: It may not work as expected in cases where the python interpreter +implementation that is being used at runtime is different between different platforms. +This has been tested for CPython only. + +Special values: `all` (for generating deps for all platforms), `host` (for +generating deps for the host platform only). `linux_*` and other `_*` values. +In the future we plan to set `all` as the default to this attribute. + +For specific target platforms use values of the form `_` where `` +is one of `linux`, `osx`, `windows` and arch is one of `x86_64`, `x86_32`, +`aarch64`, `s390x` and `ppc64le`. """, ), "extra_pip_args": attr.string_list( @@ -647,7 +678,7 @@ alias( ) ``` -## Vendoring the requirements.bzl file +### Vendoring the requirements.bzl file In some cases you may not want to generate the requirements.bzl file as a repository rule while Bazel is fetching dependencies. For example, if you produce a reusable Bazel module @@ -713,7 +744,10 @@ def _whl_library_impl(rctx): ) result = rctx.execute( - args + ["--whl-file", whl_path], + args + [ + "--whl-file", + whl_path, + ] + ["--platform={}".format(p) for p in rctx.attr.experimental_target_platforms], environment = environment, quiet = rctx.attr.quiet, timeout = rctx.attr.timeout, @@ -749,6 +783,7 @@ def _whl_library_impl(rctx): repo_prefix = rctx.attr.repo_prefix, whl_name = whl_path.basename, dependencies = metadata["deps"], + dependencies_by_platform = metadata["deps_by_platform"], group_name = rctx.attr.group_name, group_deps = rctx.attr.group_deps, data_exclude = rctx.attr.pip_data_exclude, @@ -815,7 +850,7 @@ whl_library_attrs = { doc = "Python requirement string describing the package to make available", ), "whl_patches": attr.label_keyed_string_dict( - doc = """"a label-keyed-string dict that has + doc = """a label-keyed-string dict that has json.encode(struct([whl_file], patch_strip]) as values. This is to maintain flexibility and correct bzlmod extension interface until we have a better way to define whl_library and move whl diff --git a/python/pip_install/private/generate_whl_library_build_bazel.bzl b/python/pip_install/private/generate_whl_library_build_bazel.bzl index 6d0f167f02..568b00e4df 100644 --- a/python/pip_install/private/generate_whl_library_build_bazel.bzl +++ b/python/pip_install/private/generate_whl_library_build_bazel.bzl @@ -25,6 +25,7 @@ load( "WHEEL_FILE_PUBLIC_LABEL", ) load("//python/private:normalize_name.bzl", "normalize_name") +load("//python/private:text_util.bzl", "render") _COPY_FILE_TEMPLATE = """\ copy_file( @@ -101,11 +102,36 @@ alias( ) """ +def _render_list_and_select(deps, deps_by_platform, tmpl): + deps = render.list([tmpl.format(d) for d in deps]) + + if not deps_by_platform: + return deps + + deps_by_platform = { + p if p.startswith("@") else ":is_" + p: [ + tmpl.format(d) + for d in deps + ] + for p, deps in deps_by_platform.items() + } + + # Add the default, which means that we will be just using the dependencies in + # `deps` for platforms that are not handled in a special way by the packages + deps_by_platform["//conditions:default"] = [] + deps_by_platform = render.select(deps_by_platform, value_repr = render.list) + + if deps == "[]": + return deps_by_platform + else: + return "{} + {}".format(deps, deps_by_platform) + def generate_whl_library_build_bazel( *, repo_prefix, whl_name, dependencies, + dependencies_by_platform, data_exclude, tags, entry_points, @@ -118,6 +144,7 @@ def generate_whl_library_build_bazel( repo_prefix: the repo prefix that should be used for dependency lists. whl_name: the whl_name that this is generated for. dependencies: a list of PyPI packages that are dependencies to the py_library. + dependencies_by_platform: a dict[str, list] of PyPI packages that may vary by platform. data_exclude: more patterns to exclude from the data attribute of generated py_library rules. tags: list of tags to apply to generated py_library rules. entry_points: A dict of entry points to add py_binary rules for. @@ -138,6 +165,10 @@ def generate_whl_library_build_bazel( srcs_exclude = [] data_exclude = [] + data_exclude dependencies = sorted([normalize_name(d) for d in dependencies]) + dependencies_by_platform = { + platform: sorted([normalize_name(d) for d in deps]) + for platform, deps in dependencies_by_platform.items() + } tags = sorted(tags) for entry_point, entry_point_script_name in entry_points.items(): @@ -185,22 +216,48 @@ def generate_whl_library_build_bazel( for d in group_deps } - # Filter out deps which are within the group to avoid cycles - non_group_deps = [ + dependencies = [ d for d in dependencies if d not in group_deps ] + dependencies_by_platform = { + p: deps + for p, deps in dependencies_by_platform.items() + for deps in [[d for d in deps if d not in group_deps]] + if deps + } - lib_dependencies = [ - "@%s%s//:%s" % (repo_prefix, normalize_name(d), PY_LIBRARY_PUBLIC_LABEL) - for d in non_group_deps - ] + for p in dependencies_by_platform: + if p.startswith("@"): + continue - whl_file_deps = [ - "@%s%s//:%s" % (repo_prefix, normalize_name(d), WHEEL_FILE_PUBLIC_LABEL) - for d in non_group_deps - ] + os, _, cpu = p.partition("_") + + additional_content.append( + """\ +config_setting( + name = "is_{os}_{cpu}", + constraint_values = [ + "@platforms//cpu:{cpu}", + "@platforms//os:{os}", + ], + visibility = ["//visibility:private"], +) +""".format(os = os, cpu = cpu), + ) + + lib_dependencies = _render_list_and_select( + deps = dependencies, + deps_by_platform = dependencies_by_platform, + tmpl = "@{}{{}}//:{}".format(repo_prefix, PY_LIBRARY_PUBLIC_LABEL), + ) + + whl_file_deps = _render_list_and_select( + deps = dependencies, + deps_by_platform = dependencies_by_platform, + tmpl = "@{}{{}}//:{}".format(repo_prefix, WHEEL_FILE_PUBLIC_LABEL), + ) # If this library is a member of a group, its public label aliases need to # point to the group implementation rule not the implementation rules. We @@ -223,13 +280,13 @@ def generate_whl_library_build_bazel( py_library_public_label = PY_LIBRARY_PUBLIC_LABEL, py_library_impl_label = PY_LIBRARY_IMPL_LABEL, py_library_actual_label = library_impl_label, - dependencies = repr(lib_dependencies), + dependencies = render.indent(lib_dependencies, " " * 4).lstrip(), + whl_file_deps = render.indent(whl_file_deps, " " * 4).lstrip(), data_exclude = repr(_data_exclude), whl_name = whl_name, whl_file_public_label = WHEEL_FILE_PUBLIC_LABEL, whl_file_impl_label = WHEEL_FILE_IMPL_LABEL, whl_file_actual_label = whl_impl_label, - whl_file_deps = repr(whl_file_deps), tags = repr(tags), data_label = DATA_LABEL, dist_info_label = DIST_INFO_LABEL, diff --git a/python/pip_install/tools/wheel_installer/BUILD.bazel b/python/pip_install/tools/wheel_installer/BUILD.bazel index 0eadcc25f6..a396488d3d 100644 --- a/python/pip_install/tools/wheel_installer/BUILD.bazel +++ b/python/pip_install/tools/wheel_installer/BUILD.bazel @@ -13,6 +13,7 @@ py_library( deps = [ requirement("installer"), requirement("pip"), + requirement("packaging"), requirement("setuptools"), ], ) @@ -47,6 +48,18 @@ py_test( ], ) +py_test( + name = "wheel_test", + size = "small", + srcs = [ + "wheel_test.py", + ], + data = ["//examples/wheel:minimal_with_py_package"], + deps = [ + ":lib", + ], +) + py_test( name = "wheel_installer_test", size = "small", diff --git a/python/pip_install/tools/wheel_installer/arguments.py b/python/pip_install/tools/wheel_installer/arguments.py index 25fd30f879..71133c29ca 100644 --- a/python/pip_install/tools/wheel_installer/arguments.py +++ b/python/pip_install/tools/wheel_installer/arguments.py @@ -15,7 +15,9 @@ import argparse import json import pathlib -from typing import Any +from typing import Any, Dict, Set + +from python.pip_install.tools.wheel_installer import wheel def parser(**kwargs: Any) -> argparse.ArgumentParser: @@ -39,6 +41,12 @@ def parser(**kwargs: Any) -> argparse.ArgumentParser: action="store", help="Extra arguments to pass down to pip.", ) + parser.add_argument( + "--platform", + action="extend", + type=wheel.Platform.from_string, + help="Platforms to target dependencies. Can be used multiple times.", + ) parser.add_argument( "--pip_data_exclude", action="store", @@ -68,8 +76,9 @@ def parser(**kwargs: Any) -> argparse.ArgumentParser: return parser -def deserialize_structured_args(args): +def deserialize_structured_args(args: Dict[str, str]) -> Dict: """Deserialize structured arguments passed from the starlark rules. + Args: args: dict of parsed command line arguments """ @@ -80,3 +89,18 @@ def deserialize_structured_args(args): else: args[arg_name] = [] return args + + +def get_platforms(args: argparse.Namespace) -> Set: + """Aggregate platforms into a single set. + + Args: + args: dict of parsed command line arguments + """ + platforms = set() + if args.platform is None: + return platforms + + platforms.update(args.platform) + + return platforms diff --git a/python/pip_install/tools/wheel_installer/arguments_test.py b/python/pip_install/tools/wheel_installer/arguments_test.py index 7193f4a2dc..840c2fa6cc 100644 --- a/python/pip_install/tools/wheel_installer/arguments_test.py +++ b/python/pip_install/tools/wheel_installer/arguments_test.py @@ -16,7 +16,7 @@ import json import unittest -from python.pip_install.tools.wheel_installer import arguments +from python.pip_install.tools.wheel_installer import arguments, wheel class ArgumentsTestCase(unittest.TestCase): @@ -52,6 +52,18 @@ def test_deserialize_structured_args(self) -> None: self.assertEqual(args["environment"], {"PIP_DO_SOMETHING": "True"}) self.assertEqual(args["extra_pip_args"], []) + def test_platform_aggregation(self) -> None: + parser = arguments.parser() + args = parser.parse_args( + args=[ + "--platform=host", + "--platform=linux_*", + "--platform=all", + "--requirement=foo", + ] + ) + self.assertEqual(set(wheel.Platform.all()), arguments.get_platforms(args)) + if __name__ == "__main__": unittest.main() diff --git a/python/pip_install/tools/wheel_installer/wheel.py b/python/pip_install/tools/wheel_installer/wheel.py index 84af04ca59..9c18dfde80 100644 --- a/python/pip_install/tools/wheel_installer/wheel.py +++ b/python/pip_install/tools/wheel_installer/wheel.py @@ -13,18 +13,405 @@ # limitations under the License. """Utility class to inspect an extracted wheel directory""" + import email -from typing import Dict, Optional, Set, Tuple +import platform +import re +import sys +from collections import defaultdict +from dataclasses import dataclass +from enum import Enum +from pathlib import Path +from typing import Any, Dict, List, Optional, Set, Tuple, Union import installer -import pkg_resources +from packaging.requirements import Requirement from pip._vendor.packaging.utils import canonicalize_name +class OS(Enum): + linux = 1 + osx = 2 + windows = 3 + darwin = osx + win32 = windows + + @staticmethod + def from_tag(tag: str) -> "OS": + if tag.startswith("linux"): + return OS.linux + elif tag.startswith("manylinux"): + return OS.linux + elif tag.startswith("musllinux"): + return OS.linux + elif tag.startswith("macos"): + return OS.osx + elif tag.startswith("win"): + return OS.windows + else: + raise ValueError(f"unknown tag: {tag}") + + +class Arch(Enum): + x86_64 = 1 + x86_32 = 2 + aarch64 = 3 + ppc = 4 + s390x = 5 + amd64 = x86_64 + arm64 = aarch64 + i386 = x86_32 + i686 = x86_32 + x86 = x86_32 + ppc64le = ppc + + @staticmethod + def from_tag(tag: str) -> "Arch": + for s, value in Arch.__members__.items(): + if s in tag: + return value + + if tag == "win32": + return Arch.x86_32 + else: + raise ValueError(f"unknown tag: {tag}") + + +@dataclass(frozen=True) +class Platform: + os: OS + arch: Optional[Arch] = None + + @classmethod + def all(cls, want_os: Optional[OS] = None) -> List["Platform"]: + return sorted( + [ + cls(os=os, arch=arch) + for os in OS + for arch in Arch + if not want_os or want_os == os + ] + ) + + @classmethod + def host(cls) -> List["Platform"]: + """Use the Python interpreter to detect the platform. + + We extract `os` from sys.platform and `arch` from platform.machine + + Returns: + A list of parsed values which makes the signature the same as + `Platform.all` and `Platform.from_string`. + """ + return [ + cls( + os=OS[sys.platform.lower()], + # FIXME @aignas 2023-12-13: Hermetic toolchain on Windows 3.11.6 + # is returning an empty string here, so lets default to x86_64 + arch=Arch[platform.machine().lower() or "x86_64"], + ) + ] + + def __lt__(self, other: Any) -> bool: + """Add a comparison method, so that `sorted` returns the most specialized platforms first.""" + if not isinstance(other, Platform) or other is None: + raise ValueError(f"cannot compare {other} with Platform") + + if self.arch is None and other.arch is not None: + return True + + if self.arch is not None and other.arch is None: + return True + + # Here we ensure that we sort by OS before sorting by arch + + if self.arch is None and other.arch is None: + return self.os.value < other.os.value + + if self.os.value < other.os.value: + return True + + if self.os.value == other.os.value: + return self.arch.value < other.arch.value + + return False + + def __str__(self) -> str: + if self.arch is None: + return f"@platforms//os:{self.os.name.lower()}" + + return self.os.name.lower() + "_" + self.arch.name.lower() + + @classmethod + def from_tag(cls, tag: str) -> "Platform": + return cls( + os=OS.from_tag(tag), + arch=Arch.from_tag(tag), + ) + + @classmethod + def from_string(cls, platform: Union[str, List[str]]) -> List["Platform"]: + """Parse a string and return a list of platforms""" + platform = [platform] if isinstance(platform, str) else list(platform) + ret = set() + for p in platform: + if p == "host": + ret.update(cls.host()) + elif p == "all": + ret.update(cls.all()) + elif p.endswith("*"): + os, _, _ = p.partition("_") + ret.update(cls.all(OS[os])) + else: + os, _, arch = p.partition("_") + ret.add(cls(os=OS[os], arch=Arch[arch])) + + return sorted(ret) + + # NOTE @aignas 2023-12-05: below is the minimum number of accessors that are defined in + # https://peps.python.org/pep-0496/ to make rules_python generate dependencies. + # + # WARNING: It may not work in cases where the python implementation is different between + # different platforms. + + # derived from OS + @property + def os_name(self) -> str: + if self.os == OS.linux or self.os == OS.osx: + return "posix" + elif self.os == OS.windows: + return "nt" + else: + return "" + + @property + def sys_platform(self) -> str: + if self.os == OS.linux: + return "linux" + elif self.os == OS.osx: + return "darwin" + elif self.os == OS.windows: + return "win32" + else: + return "" + + @property + def platform_system(self) -> str: + if self.os == OS.linux: + return "Linux" + elif self.os == OS.osx: + return "Darwin" + elif self.os == OS.windows: + return "Windows" + + # derived from OS and Arch + @property + def platform_machine(self) -> str: + """Guess the target 'platform_machine' marker. + + NOTE @aignas 2023-12-05: this may not work on really new systems, like + Windows if they define the platform markers in a different way. + """ + if self.arch == Arch.x86_64: + return "x86_64" + elif self.arch == Arch.x86_32 and self.os != OS.osx: + return "i386" + elif self.arch == Arch.x86_32: + return "" + elif self.arch == Arch.aarch64 and self.os == OS.linux: + return "aarch64" + elif self.arch == Arch.aarch64: + # Assuming that OSX and Windows use this one since the precedent is set here: + # https://github.com/cgohlke/win_arm64-wheels + return "arm64" + elif self.os != OS.linux: + return "" + elif self.arch == Arch.ppc64le: + return "ppc64le" + elif self.arch == Arch.s390x: + return "s390x" + else: + return "" + + def env_markers(self, extra: str) -> Dict[str, str]: + return { + "extra": extra, + "os_name": self.os_name, + "sys_platform": self.sys_platform, + "platform_machine": self.platform_machine, + "platform_system": self.platform_system, + "platform_release": "", # unset + "platform_version": "", # unset + # we assume that the following are the same as the interpreter used to setup the deps: + # "implementation_version": "X.Y.Z", + # "implementation_name": "cpython" + # "python_version": "X.Y", + # "python_full_version": "X.Y.Z", + # "platform_python_implementation: "CPython", + } + + +@dataclass(frozen=True) +class FrozenDeps: + deps: List[str] + deps_select: Dict[str, List[str]] + + +class Deps: + def __init__( + self, + name: str, + extras: Optional[Set[str]] = None, + platforms: Optional[Set[Platform]] = None, + ): + self.name: str = Deps._normalize(name) + self._deps: Set[str] = set() + self._select: Dict[Platform, Set[str]] = defaultdict(set) + self._want_extras: Set[str] = extras or {""} # empty strings means no extras + self._platforms: Set[Platform] = platforms or set() + + def _add(self, dep: str, platform: Optional[Platform]): + dep = Deps._normalize(dep) + + # Packages may create dependency cycles when specifying optional-dependencies / 'extras'. + # Example: github.com/google/etils/blob/a0b71032095db14acf6b33516bca6d885fe09e35/pyproject.toml#L32. + if dep == self.name: + return + + if platform: + self._select[platform].add(dep) + else: + self._deps.add(dep) + + @staticmethod + def _normalize(name: str) -> str: + return re.sub(r"[-_.]+", "_", name).lower() + + def add(self, *wheel_reqs: str) -> None: + reqs = [Requirement(wheel_req) for wheel_req in wheel_reqs] + + # Resolve any extra extras due to self-edges + self._want_extras = self._resolve_extras(reqs) + + # process self-edges first to resolve the extras used + for req in reqs: + self._add_req(req) + + def _resolve_extras(self, reqs: List[Requirement]) -> Set[str]: + """Resolve extras which are due to depending on self[some_other_extra]. + + Some packages may have cyclic dependencies resulting from extras being used, one example is + `elint`, where we have one set of extras as aliases for other extras + and we have an extra called 'all' that includes all other extras. + + When the `requirements.txt` is generated by `pip-tools`, then it is likely that + this step is not needed, but for other `requirements.txt` files this may be useful. + + NOTE @aignas 2023-12-08: the extra resolution is not platform dependent, but + in order for it to become platform dependent we would have to have separate targets for each extra in + self._want_extras. + """ + extras = self._want_extras + + self_reqs = [] + for req in reqs: + if Deps._normalize(req.name) != self.name: + continue + + if req.marker is None: + # I am pretty sure we cannot reach this code as it does not + # make sense to specify packages in this way, but since it is + # easy to handle, lets do it. + # + # TODO @aignas 2023-12-08: add a test + extras = extras | req.extras + else: + # process these in a separate loop + self_reqs.append(req) + + # A double loop is not strictly optimal, but always correct without recursion + for req in self_reqs: + if any(req.marker.evaluate({"extra": extra}) for extra in extras): + extras = extras | req.extras + else: + continue + + # Iterate through all packages to ensure that we include all of the extras from previously + # visited packages. + for req_ in self_reqs: + if any(req_.marker.evaluate({"extra": extra}) for extra in extras): + extras = extras | req_.extras + + return extras + + def _add_req(self, req: Requirement) -> None: + extras = self._want_extras + + if req.marker is None: + self._add(req.name, None) + return + + marker_str = str(req.marker) + + # NOTE @aignas 2023-12-08: in order to have reasonable select statements + # we do have to have some parsing of the markers, so it begs the question + # if packaging should be reimplemented in Starlark to have the best solution + # for now we will implement it in Python and see what the best parsing result + # can be before making this decision. + if not self._platforms or not any( + tag in marker_str + for tag in [ + "os_name", + "sys_platform", + "platform_machine", + "platform_system", + ] + ): + if any(req.marker.evaluate({"extra": extra}) for extra in extras): + self._add(req.name, None) + return + + for plat in self._platforms: + if not any( + req.marker.evaluate(plat.env_markers(extra)) for extra in extras + ): + continue + + if "platform_machine" in marker_str: + self._add(req.name, plat) + else: + self._add(req.name, Platform(plat.os)) + + def build(self) -> FrozenDeps: + if not self._select: + return FrozenDeps( + deps=sorted(self._deps), + deps_select={}, + ) + + # Get all of the OS-specific dependencies applicable to all architectures + select = { + p: deps for p, deps in self._select.items() if deps and p.arch is None + } + # Now add them to all arch specific dependencies + select.update( + { + p: deps | select.get(Platform(p.os), set()) + for p, deps in self._select.items() + if deps and p.arch is not None + } + ) + + return FrozenDeps( + deps=sorted(self._deps), + deps_select={str(p): sorted(deps) for p, deps in sorted(select.items())}, + ) + + class Wheel: """Representation of the compressed .whl file""" - def __init__(self, path: str): + def __init__(self, path: Path): self._path = path @property @@ -70,19 +457,31 @@ def entry_points(self) -> Dict[str, Tuple[str, str]]: return entry_points_mapping - def dependencies(self, extras_requested: Optional[Set[str]] = None) -> Set[str]: - dependency_set = set() + def dependencies( + self, + extras_requested: Set[str] = None, + platforms: Optional[Set[Platform]] = None, + ) -> FrozenDeps: + if platforms: + # NOTE @aignas 2023-12-04: if the wheel is a platform specific wheel, we only include deps for that platform + _, _, platform_tag = self._path.name.rpartition("-") + platform_tag = platform_tag[:-4] # strip .whl + if platform_tag != "any": + platform = Platform.from_tag(platform_tag) + assert ( + platform in platforms + ), f"BUG: wheel platform '{platform}' must be one of '{platforms}'" + platforms = {platform} + dependency_set = Deps( + self.name, + extras=extras_requested, + platforms=platforms, + ) for wheel_req in self.metadata.get_all("Requires-Dist", []): - req = pkg_resources.Requirement(wheel_req) # type: ignore - - if req.marker is None or any( - req.marker.evaluate({"extra": extra}) - for extra in extras_requested or [""] - ): - dependency_set.add(req.name) # type: ignore + dependency_set.add(wheel_req) - return dependency_set + return dependency_set.build() def unzip(self, directory: str) -> None: installation_schemes = { diff --git a/python/pip_install/tools/wheel_installer/wheel_installer.py b/python/pip_install/tools/wheel_installer/wheel_installer.py index f5ed8c3db8..801ef959f0 100644 --- a/python/pip_install/tools/wheel_installer/wheel_installer.py +++ b/python/pip_install/tools/wheel_installer/wheel_installer.py @@ -14,19 +14,16 @@ """Build and/or fetch a single wheel based on the requirement passed in""" -import argparse import errno import glob import json import os import re -import shutil import subprocess import sys -import textwrap from pathlib import Path from tempfile import NamedTemporaryFile -from typing import Dict, Iterable, List, Optional, Set, Tuple +from typing import Dict, List, Optional, Set, Tuple from pip._vendor.packaging.utils import canonicalize_name @@ -108,6 +105,7 @@ def _extract_wheel( wheel_file: str, extras: Dict[str, Set[str]], enable_implicit_namespace_pkgs: bool, + platforms: List[wheel.Platform], installation_dir: Path = Path("."), ) -> None: """Extracts wheel into given directory and creates py_library and filegroup targets. @@ -126,16 +124,15 @@ def _extract_wheel( _setup_namespace_pkg_compatibility(installation_dir) extras_requested = extras[whl.name] if whl.name in extras else set() - # Packages may create dependency cycles when specifying optional-dependencies / 'extras'. - # Example: github.com/google/etils/blob/a0b71032095db14acf6b33516bca6d885fe09e35/pyproject.toml#L32. - self_edge_dep = set([whl.name]) - whl_deps = sorted(whl.dependencies(extras_requested) - self_edge_dep) + + dependencies = whl.dependencies(extras_requested, platforms) with open(os.path.join(installation_dir, "metadata.json"), "w") as f: metadata = { "name": whl.name, "version": whl.version, - "deps": whl_deps, + "deps": dependencies.deps, + "deps_by_platform": dependencies.deps_select, "entry_points": [ { "name": name, @@ -164,6 +161,7 @@ def main() -> None: wheel_file=whl, extras=extras, enable_implicit_namespace_pkgs=args.enable_implicit_namespace_pkgs, + platforms=arguments.get_platforms(args), ) return diff --git a/python/pip_install/tools/wheel_installer/wheel_installer_test.py b/python/pip_install/tools/wheel_installer/wheel_installer_test.py index b24e50053f..6eacd1fda5 100644 --- a/python/pip_install/tools/wheel_installer/wheel_installer_test.py +++ b/python/pip_install/tools/wheel_installer/wheel_installer_test.py @@ -19,7 +19,7 @@ import unittest from pathlib import Path -from python.pip_install.tools.wheel_installer import wheel_installer +from python.pip_install.tools.wheel_installer import wheel, wheel_installer class TestRequirementExtrasParsing(unittest.TestCase): @@ -55,31 +55,6 @@ def test_parses_requirement_for_extra(self) -> None: ) -# TODO @aignas 2023-07-21: migrate to starlark -# class BazelTestCase(unittest.TestCase): -# def test_generate_entry_point_contents(self): -# got = wheel_installer._generate_entry_point_contents("sphinx.cmd.build", "main") -# want = """#!/usr/bin/env python3 -# import sys -# from sphinx.cmd.build import main -# if __name__ == "__main__": -# sys.exit(main()) -# """ -# self.assertEqual(got, want) -# -# def test_generate_entry_point_contents_with_shebang(self): -# got = wheel_installer._generate_entry_point_contents( -# "sphinx.cmd.build", "main", shebang="#!/usr/bin/python" -# ) -# want = """#!/usr/bin/python -# import sys -# from sphinx.cmd.build import main -# if __name__ == "__main__": -# sys.exit(main()) -# """ -# self.assertEqual(got, want) - - class TestWhlFilegroup(unittest.TestCase): def setUp(self) -> None: self.wheel_name = "example_minimal_package-0.0.1-py3-none-any.whl" @@ -92,10 +67,11 @@ def tearDown(self): def test_wheel_exists(self) -> None: wheel_installer._extract_wheel( - self.wheel_path, + Path(self.wheel_path), installation_dir=Path(self.wheel_dir), extras={}, enable_implicit_namespace_pkgs=False, + platforms=[], ) want_files = [ @@ -119,10 +95,34 @@ def test_wheel_exists(self) -> None: version="0.0.1", name="example-minimal-package", deps=[], + deps_by_platform={}, entry_points=[], ) self.assertEqual(want, metadata_file_content) +class TestWheelPlatform(unittest.TestCase): + def test_wheel_os_alias(self): + self.assertEqual("OS.osx", str(wheel.OS.osx)) + self.assertEqual(str(wheel.OS.darwin), str(wheel.OS.osx)) + + def test_wheel_arch_alias(self): + self.assertEqual("Arch.x86_64", str(wheel.Arch.x86_64)) + self.assertEqual(str(wheel.Arch.amd64), str(wheel.Arch.x86_64)) + + def test_wheel_platform_alias(self): + give = wheel.Platform( + os=wheel.OS.darwin, + arch=wheel.Arch.amd64, + ) + alias = wheel.Platform( + os=wheel.OS.osx, + arch=wheel.Arch.x86_64, + ) + + self.assertEqual("osx_x86_64", str(give)) + self.assertEqual(str(alias), str(give)) + + if __name__ == "__main__": unittest.main() diff --git a/python/pip_install/tools/wheel_installer/wheel_test.py b/python/pip_install/tools/wheel_installer/wheel_test.py new file mode 100644 index 0000000000..57bfa9458a --- /dev/null +++ b/python/pip_install/tools/wheel_installer/wheel_test.py @@ -0,0 +1,235 @@ +import unittest + +from python.pip_install.tools.wheel_installer import wheel + + +class DepsTest(unittest.TestCase): + def test_simple(self): + deps = wheel.Deps("foo") + deps.add("bar") + + got = deps.build() + + self.assertIsInstance(got, wheel.FrozenDeps) + self.assertEqual(["bar"], got.deps) + self.assertEqual({}, got.deps_select) + + def test_can_add_os_specific_deps(self): + platforms = { + "linux_x86_64", + "osx_x86_64", + "windows_x86_64", + } + deps = wheel.Deps("foo", platforms=set(wheel.Platform.from_string(platforms))) + deps.add( + "bar", + "posix_dep; os_name=='posix'", + "win_dep; os_name=='nt'", + ) + + got = deps.build() + + self.assertEqual(["bar"], got.deps) + self.assertEqual( + { + "@platforms//os:linux": ["posix_dep"], + "@platforms//os:osx": ["posix_dep"], + "@platforms//os:windows": ["win_dep"], + }, + got.deps_select, + ) + + def test_can_add_platform_specific_deps(self): + platforms = { + "linux_x86_64", + "osx_x86_64", + "osx_aarch64", + "windows_x86_64", + } + deps = wheel.Deps("foo", platforms=set(wheel.Platform.from_string(platforms))) + deps.add( + "bar", + "posix_dep; os_name=='posix'", + "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'", + "win_dep; os_name=='nt'", + ) + + got = deps.build() + + self.assertEqual(["bar"], got.deps) + self.assertEqual( + { + "osx_aarch64": ["m1_dep", "posix_dep"], + "@platforms//os:linux": ["posix_dep"], + "@platforms//os:osx": ["posix_dep"], + "@platforms//os:windows": ["win_dep"], + }, + got.deps_select, + ) + + def test_non_platform_markers_are_added_to_common_deps(self): + platforms = { + "linux_x86_64", + "osx_x86_64", + "osx_aarch64", + "windows_x86_64", + } + deps = wheel.Deps("foo", platforms=set(wheel.Platform.from_string(platforms))) + deps.add( + "bar", + "baz; implementation_name=='cpython'", + "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'", + ) + + got = deps.build() + + self.assertEqual(["bar", "baz"], got.deps) + self.assertEqual( + { + "osx_aarch64": ["m1_dep"], + }, + got.deps_select, + ) + + def test_self_is_ignored(self): + deps = wheel.Deps("foo", extras={"ssl"}) + deps.add( + "bar", + "req_dep; extra == 'requests'", + "foo[requests]; extra == 'ssl'", + "ssl_lib; extra == 'ssl'", + ) + + got = deps.build() + + self.assertEqual(["bar", "req_dep", "ssl_lib"], got.deps) + self.assertEqual({}, got.deps_select) + + def test_handle_etils(self): + deps = wheel.Deps("etils", extras={"all"}) + requires = """ +etils[array-types] ; extra == "all" +etils[eapp] ; extra == "all" +etils[ecolab] ; extra == "all" +etils[edc] ; extra == "all" +etils[enp] ; extra == "all" +etils[epath] ; extra == "all" +etils[epath-gcs] ; extra == "all" +etils[epath-s3] ; extra == "all" +etils[epy] ; extra == "all" +etils[etqdm] ; extra == "all" +etils[etree] ; extra == "all" +etils[etree-dm] ; extra == "all" +etils[etree-jax] ; extra == "all" +etils[etree-tf] ; extra == "all" +etils[enp] ; extra == "array-types" +pytest ; extra == "dev" +pytest-subtests ; extra == "dev" +pytest-xdist ; extra == "dev" +pyink ; extra == "dev" +pylint>=2.6.0 ; extra == "dev" +chex ; extra == "dev" +torch ; extra == "dev" +optree ; extra == "dev" +dataclass_array ; extra == "dev" +sphinx-apitree[ext] ; extra == "docs" +etils[dev,all] ; extra == "docs" +absl-py ; extra == "eapp" +simple_parsing ; extra == "eapp" +etils[epy] ; extra == "eapp" +jupyter ; extra == "ecolab" +numpy ; extra == "ecolab" +mediapy ; extra == "ecolab" +packaging ; extra == "ecolab" +etils[enp] ; extra == "ecolab" +etils[epy] ; extra == "ecolab" +etils[epy] ; extra == "edc" +numpy ; extra == "enp" +etils[epy] ; extra == "enp" +fsspec ; extra == "epath" +importlib_resources ; extra == "epath" +typing_extensions ; extra == "epath" +zipp ; extra == "epath" +etils[epy] ; extra == "epath" +gcsfs ; extra == "epath-gcs" +etils[epath] ; extra == "epath-gcs" +s3fs ; extra == "epath-s3" +etils[epath] ; extra == "epath-s3" +typing_extensions ; extra == "epy" +absl-py ; extra == "etqdm" +tqdm ; extra == "etqdm" +etils[epy] ; extra == "etqdm" +etils[array_types] ; extra == "etree" +etils[epy] ; extra == "etree" +etils[enp] ; extra == "etree" +etils[etqdm] ; extra == "etree" +dm-tree ; extra == "etree-dm" +etils[etree] ; extra == "etree-dm" +jax[cpu] ; extra == "etree-jax" +etils[etree] ; extra == "etree-jax" +tensorflow ; extra == "etree-tf" +etils[etree] ; extra == "etree-tf" +etils[ecolab] ; extra == "lazy-imports" +""" + + deps.add(*requires.strip().split("\n")) + + got = deps.build() + want = [ + "absl_py", + "dm_tree", + "fsspec", + "gcsfs", + "importlib_resources", + "jax", + "jupyter", + "mediapy", + "numpy", + "packaging", + "s3fs", + "simple_parsing", + "tensorflow", + "tqdm", + "typing_extensions", + "zipp", + ] + + self.assertEqual(want, got.deps) + self.assertEqual({}, got.deps_select) + + +class PlatformTest(unittest.TestCase): + def test_platform_from_string(self): + tests = { + "win_amd64": "windows_x86_64", + "macosx_10_9_arm64": "osx_aarch64", + "manylinux1_i686.manylinux_2_17_i686": "linux_x86_32", + "musllinux_1_1_ppc64le": "linux_ppc", + } + + for give, want in tests.items(): + with self.subTest(give=give, want=want): + self.assertEqual( + wheel.Platform.from_string(want)[0], + wheel.Platform.from_tag(give), + ) + + def test_can_get_host(self): + host = wheel.Platform.host() + self.assertIsNotNone(host) + self.assertEqual(1, len(wheel.Platform.from_string("host"))) + self.assertEqual(host, wheel.Platform.from_string("host")) + + def test_can_get_all(self): + all_platforms = wheel.Platform.all() + self.assertEqual(15, len(all_platforms)) + self.assertEqual(all_platforms, wheel.Platform.from_string("all")) + + def test_can_get_all_for_os(self): + linuxes = wheel.Platform.all(wheel.OS.linux) + self.assertEqual(5, len(linuxes)) + self.assertEqual(linuxes, wheel.Platform.from_string("linux_*")) + + +if __name__ == "__main__": + unittest.main() diff --git a/python/private/BUILD.bazel b/python/private/BUILD.bazel index 4f47206449..162d4ed8b3 100644 --- a/python/private/BUILD.bazel +++ b/python/private/BUILD.bazel @@ -48,6 +48,12 @@ filegroup( visibility = ["//python:__pkg__"], ) +bzl_library( + name = "auth_bzl", + srcs = ["auth.bzl"], + deps = [":bazel_tools_bzl"], +) + bzl_library( name = "autodetecting_toolchain_bzl", srcs = ["autodetecting_toolchain.bzl"], @@ -266,6 +272,13 @@ exports_files( visibility = ["//:__subpackages__"], ) +exports_files( + ["python_bootstrap_template.txt"], + # Not actually public. Only public because it's an implicit dependency of + # py_runtime. + visibility = ["//visibility:public"], +) + # Used to determine the use of `--stamp` in Starlark rules stamp_build_setting(name = "stamp") diff --git a/python/private/auth.bzl b/python/private/auth.bzl new file mode 100644 index 0000000000..39ada37cae --- /dev/null +++ b/python/private/auth.bzl @@ -0,0 +1,42 @@ +# Copyright 2022 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers copied from http_file source to be reused here. + +The implementation below is copied directly from Bazel's implementation of `http_archive`. +Accordingly, the return value of this function should be used identically as the `auth` parameter of `http_archive`. +Reference: https://github.com/bazelbuild/bazel/blob/6.3.2/tools/build_defs/repo/http.bzl#L109 +""" + +# TODO @aignas 2023-12-18: use the following instead when available. +# load("@bazel_tools//tools/build_defs/repo:utils.bzl", "get_auth") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "read_netrc", "read_user_netrc", "use_netrc") + +def get_auth(rctx, urls): + """Utility for retrieving netrc-based authentication parameters for repository download rules used in python_repository. + + Args: + rctx (repository_ctx): The repository rule's context object. + urls: A list of URLs from which assets will be downloaded. + + Returns: + dict: A map of authentication parameters by URL. + """ + if rctx.attr.netrc: + netrc = read_netrc(rctx, rctx.attr.netrc) + elif "NETRC" in rctx.os.environ: + netrc = read_netrc(rctx, rctx.os.environ["NETRC"]) + else: + netrc = read_user_netrc(rctx) + return use_netrc(netrc, urls, rctx.attr.auth_patterns) diff --git a/python/private/bzlmod/BUILD.bazel b/python/private/bzlmod/BUILD.bazel index fc8449ecaf..a31292287c 100644 --- a/python/private/bzlmod/BUILD.bazel +++ b/python/private/bzlmod/BUILD.bazel @@ -13,8 +13,9 @@ # limitations under the License. load("@bazel_skylib//:bzl_library.bzl", "bzl_library") +load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED") -package(default_visibility = ["//visibility:private"]) +package(default_visibility = ["//:__subpackages__"]) licenses(["notice"]) @@ -24,6 +25,28 @@ filegroup( visibility = ["//python/private:__pkg__"], ) +bzl_library( + name = "pip_bzl", + srcs = ["pip.bzl"], + deps = [ + ":pip_repository_bzl", + "//python/pip_install:pip_repository_bzl", + "//python/pip_install:requirements_parser_bzl", + "//python/private:full_version_bzl", + "//python/private:normalize_name_bzl", + "//python/private:parse_whl_name_bzl", + "//python/private:version_label_bzl", + ":bazel_features_bzl", + ] + [ + "@pythons_hub//:interpreters_bzl", + ] if BZLMOD_ENABLED else [], +) + +bzl_library( + name = "bazel_features_bzl", + srcs = ["@bazel_features//:bzl_files"] if BZLMOD_ENABLED else [], +) + bzl_library( name = "pip_repository_bzl", srcs = ["pip_repository.bzl"], @@ -33,3 +56,23 @@ bzl_library( "//python/private:text_util_bzl", ], ) + +bzl_library( + name = "python_bzl", + srcs = ["python.bzl"], + deps = [ + ":pythons_hub_bzl", + "//python:repositories_bzl", + "//python/private:toolchains_repo_bzl", + ], +) + +bzl_library( + name = "pythons_hub_bzl", + srcs = ["pythons_hub.bzl"], + deps = [ + "//python:versions_bzl", + "//python/private:full_version_bzl", + "//python/private:toolchains_repo_bzl", + ], +) diff --git a/python/private/bzlmod/pip.bzl b/python/private/bzlmod/pip.bzl index 305039fb2e..6d45a26d7b 100644 --- a/python/private/bzlmod/pip.bzl +++ b/python/private/bzlmod/pip.bzl @@ -86,7 +86,7 @@ def _create_whl_repos(module_ctx, pip_attr, whl_map, whl_overrides): # if we do not have the python_interpreter set in the attributes # we programmatically find it. hub_name = pip_attr.hub_name - if python_interpreter_target == None: + if python_interpreter_target == None and not pip_attr.python_interpreter: python_name = "python_" + version_label(pip_attr.python_version, sep = "_") if python_name not in INTERPRETER_LABELS.keys(): fail(( @@ -109,7 +109,19 @@ def _create_whl_repos(module_ctx, pip_attr, whl_map, whl_overrides): # needed for the whl_libary declarations below. requirements_lock_content = module_ctx.read(requrements_lock) parse_result = parse_requirements(requirements_lock_content) - requirements = parse_result.requirements + + # Replicate a surprising behavior that WORKSPACE builds allowed: + # Defining a repo with the same name multiple times, but only the last + # definition is respected. + # The requirement lines might have duplicate names because lines for extras + # are returned as just the base package name. e.g., `foo[bar]` results + # in an entry like `("foo", "foo[bar] == 1.0 ...")`. + requirements = { + normalize_name(entry[0]): entry + # The WORKSPACE pip_parse sorted entries, so mimic that ordering. + for entry in sorted(parse_result.requirements) + }.values() + extra_pip_args = pip_attr.extra_pip_args + parse_result.options if hub_name not in whl_map: @@ -158,6 +170,7 @@ def _create_whl_repos(module_ctx, pip_attr, whl_map, whl_overrides): p: json.encode(args) for p, args in whl_overrides.get(whl_name, {}).items() }, + experimental_target_platforms = pip_attr.experimental_target_platforms, python_interpreter = pip_attr.python_interpreter, python_interpreter_target = python_interpreter_target, quiet = pip_attr.quiet, @@ -356,13 +369,14 @@ Targets from different hubs should not be used together. "python_version": attr.string( mandatory = True, doc = """ -The Python version to use for resolving the pip dependencies, in Major.Minor -format (e.g. "3.11"). Patch level granularity (e.g. "3.11.1") is not supported. +The Python version the dependencies are targetting, in Major.Minor format +(e.g., "3.11"). Patch level granularity (e.g. "3.11.1") is not supported. If not specified, then the default Python version (as set by the root module or rules_python) will be used. -The version specified here must have a corresponding `python.toolchain()` -configured. +If an interpreter isn't explicitly provided (using `python_interpreter` or +`python_interpreter_target`), then the version specified here must have +a corresponding `python.toolchain()` configured. """, ), "whl_modifications": attr.label_keyed_string_dict( diff --git a/python/private/bzlmod/pythons_hub.bzl b/python/private/bzlmod/pythons_hub.bzl index f36ce45521..5f536f3b67 100644 --- a/python/private/bzlmod/pythons_hub.bzl +++ b/python/private/bzlmod/pythons_hub.bzl @@ -29,7 +29,19 @@ def _have_same_length(*lists): fail("expected at least one list") return len({len(length): None for length in lists}) == 1 -def _python_toolchain_build_file_content( +_HUB_BUILD_FILE_TEMPLATE = """\ +load("@bazel_skylib//:bzl_library.bzl", "bzl_library") + +bzl_library( + name = "interpreters_bzl", + srcs = ["interpreters.bzl"], + visibility = ["@rules_python//:__subpackages__"], +) + +{toolchains} +""" + +def _hub_build_file_content( prefixes, python_versions, set_python_version_constraints, @@ -48,7 +60,7 @@ def _python_toolchain_build_file_content( # Iterate over the length of python_versions and call # build the toolchain content by calling python_toolchain_build_file_content - return "\n".join([python_toolchain_build_file_content( + toolchains = "\n".join([python_toolchain_build_file_content( prefix = prefixes[i], python_version = full_version(python_versions[i]), set_python_version_constraint = set_python_version_constraints[i], @@ -56,7 +68,9 @@ def _python_toolchain_build_file_content( rules_python = rules_python, ) for i in range(len(python_versions))]) -_build_file_for_hub_template = """ + return _HUB_BUILD_FILE_TEMPLATE.format(toolchains = toolchains) + +_interpreters_bzl_template = """ INTERPRETER_LABELS = {{ {interpreter_labels} }} @@ -72,7 +86,7 @@ def _hub_repo_impl(rctx): # write them to the BUILD file. rctx.file( "BUILD.bazel", - _python_toolchain_build_file_content( + _hub_build_file_content( rctx.attr.toolchain_prefixes, rctx.attr.toolchain_python_versions, rctx.attr.toolchain_set_python_version_constraints, @@ -97,7 +111,7 @@ def _hub_repo_impl(rctx): rctx.file( "interpreters.bzl", - _build_file_for_hub_template.format( + _interpreters_bzl_template.format( interpreter_labels = interpreter_labels, default_python_version = rctx.attr.default_python_version, ), diff --git a/python/private/common/py_runtime_rule.bzl b/python/private/common/py_runtime_rule.bzl index 8072affb5a..28b525bf49 100644 --- a/python/private/common/py_runtime_rule.bzl +++ b/python/private/common/py_runtime_rule.bzl @@ -122,6 +122,8 @@ interpreter. # Example ``` +load("@rules_python//python:py_runtime.bzl", "py_runtime") + py_runtime( name = "python-2.7.12", files = glob(["python-2.7.12/**"]), diff --git a/python/private/py_runtime_pair_rule.bzl b/python/private/py_runtime_pair_rule.bzl index 574e1fec5e..d17b008676 100644 --- a/python/private/py_runtime_pair_rule.bzl +++ b/python/private/py_runtime_pair_rule.bzl @@ -16,6 +16,7 @@ load("//python:py_runtime_info.bzl", "PyRuntimeInfo") load("//python/private:reexports.bzl", "BuiltinPyRuntimeInfo") +load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER") def _py_runtime_pair_impl(ctx): if ctx.attr.py2_runtime != None: @@ -45,7 +46,11 @@ def _py_runtime_pair_impl(ctx): )] def _get_py_runtime_info(target): - if PyRuntimeInfo in target: + # Prior to Bazel 7, the builtin PyRuntimeInfo object must be used because + # py_binary (implemented in Java) performs a type check on the provider + # value to verify it is an instance of the Java-implemented PyRuntimeInfo + # class. + if IS_BAZEL_7_OR_HIGHER and PyRuntimeInfo in target: return target[PyRuntimeInfo] else: return target[BuiltinPyRuntimeInfo] @@ -108,7 +113,8 @@ Example usage: ```python # In your BUILD file... -load("@rules_python//python:defs.bzl", "py_runtime_pair") +load("@rules_python//python:py_runtime.bzl", "py_runtime") +load("@rules_python//python:py_runtime_pair.bzl", "py_runtime_pair") py_runtime( name = "my_py2_runtime", diff --git a/python/private/text_util.bzl b/python/private/text_util.bzl index da67001ce8..78f62be1aa 100644 --- a/python/private/text_util.bzl +++ b/python/private/text_util.bzl @@ -20,26 +20,33 @@ def _indent(text, indent = " " * 4): return "\n".join([indent + line for line in text.splitlines()]) -def _render_alias(name, actual): +def _render_alias(name, actual, *, visibility = None): + args = [ + "name = \"{}\",".format(name), + "actual = {},".format(actual), + ] + + if visibility: + args.append("visibility = {},".format(render.list(visibility))) + return "\n".join([ "alias(", - _indent("name = \"{}\",".format(name)), - _indent("actual = {},".format(actual)), + ] + [_indent(arg) for arg in args] + [ ")", ]) -def _render_dict(d): +def _render_dict(d, *, value_repr = repr): return "\n".join([ "{", _indent("\n".join([ - "{}: {},".format(repr(k), repr(v)) + "{}: {},".format(repr(k), value_repr(v)) for k, v in d.items() ])), "}", ]) -def _render_select(selects, *, no_match_error = None): - dict_str = _render_dict(selects) + "," +def _render_select(selects, *, no_match_error = None, value_repr = repr): + dict_str = _render_dict(selects, value_repr = value_repr) + "," if no_match_error: args = "\n".join([ @@ -58,6 +65,12 @@ def _render_select(selects, *, no_match_error = None): return "select({})".format(args) def _render_list(items): + if not items: + return "[]" + + if len(items) == 1: + return "[{}]".format(repr(items[0])) + return "\n".join([ "[", _indent("\n".join([ diff --git a/python/py_runtime_pair.bzl b/python/py_runtime_pair.bzl index 30df002f13..1728dcdab7 100644 --- a/python/py_runtime_pair.bzl +++ b/python/py_runtime_pair.bzl @@ -47,7 +47,8 @@ def py_runtime_pair(name, py2_runtime = None, py3_runtime = None, **attrs): ```python # In your BUILD file... - load("@rules_python//python:defs.bzl", "py_runtime_pair") + load("@rules_python//python:py_runtime.bzl", "py_runtime") + load("@rules_python//python:py_runtime_pair.bzl", "py_runtime_pair") py_runtime( name = "my_py3_runtime", diff --git a/python/repositories.bzl b/python/repositories.bzl index 37cc34e271..e444c49a2b 100644 --- a/python/repositories.bzl +++ b/python/repositories.bzl @@ -18,8 +18,9 @@ For historic reasons, pip_repositories() is defined in //python:pip.bzl. """ load("@bazel_tools//tools/build_defs/repo:http.bzl", _http_archive = "http_archive") -load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe", "read_netrc", "read_user_netrc", "use_netrc") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") load("//python/pip_install:repositories.bzl", "pip_install_dependencies") +load("//python/private:auth.bzl", "get_auth") load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED") load("//python/private:coverage_deps.bzl", "coverage_dep") load("//python/private:full_version.bzl", "full_version") @@ -92,28 +93,6 @@ def is_standalone_interpreter(rctx, python_interpreter_path): ), ]).return_code == 0 -def _get_auth(rctx, urls): - """Utility for retrieving netrc-based authentication parameters for repository download rules used in python_repository. - - The implementation below is copied directly from Bazel's implementation of `http_archive`. - Accordingly, the return value of this function should be used identically as the `auth` parameter of `http_archive`. - Reference: https://github.com/bazelbuild/bazel/blob/6.3.2/tools/build_defs/repo/http.bzl#L109 - - Args: - rctx (repository_ctx): The repository rule's context object. - urls: A list of URLs from which assets will be downloaded. - - Returns: - dict: A map of authentication parameters by URL. - """ - if rctx.attr.netrc: - netrc = read_netrc(rctx, rctx.attr.netrc) - elif "NETRC" in rctx.os.environ: - netrc = read_netrc(rctx, rctx.os.environ["NETRC"]) - else: - netrc = read_user_netrc(rctx) - return use_netrc(netrc, urls, rctx.attr.auth_patterns) - def _python_repository_impl(rctx): if rctx.attr.distutils and rctx.attr.distutils_content: fail("Only one of (distutils, distutils_content) should be set.") @@ -125,7 +104,7 @@ def _python_repository_impl(rctx): python_short_version = python_version.rpartition(".")[0] release_filename = rctx.attr.release_filename urls = rctx.attr.urls or [rctx.attr.url] - auth = _get_auth(rctx, urls) + auth = get_auth(rctx, urls) if release_filename.endswith(".zst"): rctx.download( @@ -289,7 +268,8 @@ def _python_repository_impl(rctx): build_content = """\ # Generated by python/repositories.bzl -load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") +load("@rules_python//python:py_runtime.bzl", "py_runtime") +load("@rules_python//python:py_runtime_pair.bzl", "py_runtime_pair") load("@rules_python//python/cc:py_cc_toolchain.bzl", "py_cc_toolchain") package(default_visibility = ["//visibility:public"]) diff --git a/sphinxdocs/private/BUILD.bazel b/sphinxdocs/private/BUILD.bazel index a8701d956d..01758b3cad 100644 --- a/sphinxdocs/private/BUILD.bazel +++ b/sphinxdocs/private/BUILD.bazel @@ -13,7 +13,9 @@ # limitations under the License. load("@bazel_skylib//:bzl_library.bzl", "bzl_library") +load("//python:proto.bzl", "py_proto_library") load("//python:py_binary.bzl", "py_binary") +load("//python:py_library.bzl", "py_library") package( default_visibility = ["//sphinxdocs:__subpackages__"], @@ -70,3 +72,28 @@ py_binary( # Only public because it's an implicit attribute visibility = ["//:__subpackages__"], ) + +py_binary( + name = "proto_to_markdown", + srcs = ["proto_to_markdown.py"], + # Only public because it's an implicit attribute + visibility = ["//:__subpackages__"], + deps = [":proto_to_markdown_lib"], +) + +py_library( + name = "proto_to_markdown_lib", + srcs = ["proto_to_markdown.py"], + # Only public because it's an implicit attribute + visibility = ["//:__subpackages__"], + deps = [ + ":stardoc_output_proto_py_pb2", + ], +) + +py_proto_library( + name = "stardoc_output_proto_py_pb2", + deps = [ + "@io_bazel_stardoc//stardoc/proto:stardoc_output_proto", + ], +) diff --git a/sphinxdocs/private/func_template.vm b/sphinxdocs/private/func_template.vm deleted file mode 100644 index 81dd2036ac..0000000000 --- a/sphinxdocs/private/func_template.vm +++ /dev/null @@ -1,57 +0,0 @@ -#set( $nl = " -" ) -#set( $fn = $funcInfo.functionName) -#set( $fnl = $fn.replaceAll("[.]", "_").toLowerCase()) -{.starlark-object} -#[[##]]# $fn - -#set( $hasParams = false) -{.starlark-signature} -${funcInfo.functionName}(## Comment to consume newline -#foreach ($param in $funcInfo.getParameterList()) -#if($param.name != "self") -#set( $hasParams = true) -[${param.name}](#${fnl}_${param.name})## Comment to consume newline -#if(!$param.getDefaultValue().isEmpty()) -=$param.getDefaultValue()#end#if($foreach.hasNext), -#end -#end -#end -) - -${funcInfo.docString} - -#if ($hasParams) -{#${fnl}_parameters} -**PARAMETERS** [¶](#${fnl}_parameters){.headerlink} - -#foreach ($param in $funcInfo.getParameterList()) -#if($param.name != "self") -#set($link = $fnl + "_" + $param.name) -#if($foreach.first) -{.params-box} -#end -## The .span wrapper is necessary so the trailing colon doesn't wrap -:[${param.name}[¶](#$link){.headerlink}]{.span}: - {#$link} -#if(!$param.getDefaultValue().isEmpty()) (_default `${param.getDefaultValue()}`_) #end -#if(!$param.docString.isEmpty()) - $param.docString.replaceAll("$nl", "$nl ") -#else - _undocumented_ -#end -#end -#end -#end -#if (!$funcInfo.getReturn().docString.isEmpty()) - -{#${fnl}_returns} -RETURNS [¶](#${fnl}_returns){.headerlink} -: ${funcInfo.getReturn().docString.replaceAll("$nl", "$nl ")} -#end -#if (!$funcInfo.getDeprecated().docString.isEmpty()) - -**DEPRECATED** - -${funcInfo.getDeprecated().docString} -#end diff --git a/sphinxdocs/private/header_template.vm b/sphinxdocs/private/header_template.vm deleted file mode 100644 index 81496ffbba..0000000000 --- a/sphinxdocs/private/header_template.vm +++ /dev/null @@ -1,3 +0,0 @@ -# %%BZL_LOAD_PATH%% - -$moduleDocstring diff --git a/sphinxdocs/private/proto_to_markdown.py b/sphinxdocs/private/proto_to_markdown.py new file mode 100644 index 0000000000..18d4e1e045 --- /dev/null +++ b/sphinxdocs/private/proto_to_markdown.py @@ -0,0 +1,488 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import io +import itertools +import pathlib +import sys +import textwrap +from typing import Callable, TextIO, TypeVar + +from stardoc.proto import stardoc_output_pb2 + +_AttributeType = stardoc_output_pb2.AttributeType + +_T = TypeVar("_T") + + +def _anchor_id(text: str) -> str: + # MyST/Sphinx's markdown processing doesn't like dots in anchor ids. + return "#" + text.replace(".", "_").lower() + + +# Create block attribute line. +# See https://myst-parser.readthedocs.io/en/latest/syntax/optional.html#block-attributes +def _block_attrs(*attrs: str) -> str: + return "{" + " ".join(attrs) + "}\n" + + +def _link(display: str, link: str = "", *, ref: str = "", classes: str = "") -> str: + if ref: + ref = f"[{ref}]" + if link: + link = f"({link})" + if classes: + classes = "{" + classes + "}" + return f"[{display}]{ref}{link}{classes}" + + +def _span(display: str, classes: str = ".span") -> str: + return f"[{display}]{{" + classes + "}" + + +def _link_here_icon(anchor: str) -> str: + # The headerlink class activates some special logic to show/hide + # text upon mouse-over; it's how headings show a clickable link. + return _link("¶", anchor, classes=".headerlink") + + +def _inline_anchor(anchor: str) -> str: + return _span("", anchor) + + +def _indent_block_text(text: str) -> str: + return text.strip().replace("\n", "\n ") + + +def _join_csv_and(values: list[str]) -> str: + if len(values) == 1: + return values[0] + + values = list(values) + values[-1] = "and " + values[-1] + return ", ".join(values) + + +def _position_iter(values: list[_T]) -> tuple[bool, bool, _T]: + for i, value in enumerate(values): + yield i == 0, i == len(values) - 1, value + + +class _MySTRenderer: + def __init__( + self, + module: stardoc_output_pb2.ModuleInfo, + out_stream: TextIO, + public_load_path: str, + ): + self._module = module + self._out_stream = out_stream + self._public_load_path = public_load_path + + def render(self): + self._render_module(self._module) + + def _render_module(self, module: stardoc_output_pb2.ModuleInfo): + if self._public_load_path: + bzl_path = self._public_load_path + else: + bzl_path = "//" + self._module.file.split("//")[1] + self._write( + f"# {bzl_path}\n", + "\n", + module.module_docstring.strip(), + "\n\n", + ) + + # Sort the objects by name + objects = itertools.chain( + ((r.rule_name, r, self._render_rule) for r in module.rule_info), + ((p.provider_name, p, self._render_provider) for p in module.provider_info), + ((f.function_name, f, self._render_func) for f in module.func_info), + ((a.aspect_name, a, self._render_aspect) for a in module.aspect_info), + ( + (m.extension_name, m, self._render_module_extension) + for m in module.module_extension_info + ), + ( + (r.rule_name, r, self._render_repository_rule) + for r in module.repository_rule_info + ), + ) + + objects = sorted(objects, key=lambda v: v[0].lower()) + + for _, obj, func in objects: + func(obj) + self._write("\n") + + def _render_aspect(self, aspect: stardoc_output_pb2.AspectInfo): + aspect_anchor = _anchor_id(aspect.aspect_name) + self._write( + _block_attrs(".starlark-object"), + f"## {aspect.aspect_name}\n\n", + "_Propagates on attributes:_ ", # todo add link here + ", ".join(sorted(f"`{attr}`" for attr in aspect.aspect_attribute)), + "\n\n", + aspect.doc_string.strip(), + "\n\n", + ) + + if aspect.attribute: + self._render_attributes(aspect_anchor, aspect.attribute) + self._write("\n") + + def _render_module_extension(self, mod_ext: stardoc_output_pb2.ModuleExtensionInfo): + self._write( + _block_attrs(".starlark-object"), + f"## {mod_ext.extension_name}\n\n", + ) + + self._write(mod_ext.doc_string.strip(), "\n\n") + + mod_ext_anchor = _anchor_id(mod_ext.extension_name) + for tag in mod_ext.tag_class: + tag_name = f"{mod_ext.extension_name}.{tag.tag_name}" + tag_anchor = f"{mod_ext_anchor}_{tag.tag_name}" + self._write( + _block_attrs(".starlark-module-extension-tag-class"), + f"### {tag_name}\n\n", + ) + self._render_signature( + tag_name, + tag_anchor, + tag.attribute, + get_name=lambda a: a.name, + get_default=lambda a: a.default_value, + ) + + self._write(tag.doc_string.strip(), "\n\n") + self._render_attributes(tag_anchor, tag.attribute) + self._write("\n") + + def _render_repository_rule(self, repo_rule: stardoc_output_pb2.RepositoryRuleInfo): + self._write( + _block_attrs(".starlark-object"), + f"## {repo_rule.rule_name}\n\n", + ) + repo_anchor = _anchor_id(repo_rule.rule_name) + self._render_signature( + repo_rule.rule_name, + repo_anchor, + repo_rule.attribute, + get_name=lambda a: a.name, + get_default=lambda a: a.default_value, + ) + self._write(repo_rule.doc_string.strip(), "\n\n") + if repo_rule.attribute: + self._render_attributes(repo_anchor, repo_rule.attribute) + if repo_rule.environ: + self._write( + "**ENVIRONMENT VARIABLES** ", + _link_here_icon(repo_anchor + "_env"), + "\n", + ) + for name in sorted(repo_rule.environ): + self._write(f"* `{name}`\n") + self._write("\n") + + def _render_rule(self, rule: stardoc_output_pb2.RuleInfo): + rule_name = rule.rule_name + rule_anchor = _anchor_id(rule_name) + self._write( + _block_attrs(".starlark-object"), + f"## {rule_name}\n\n", + ) + + self._render_signature( + rule_name, + rule_anchor, + rule.attribute, + get_name=lambda r: r.name, + get_default=lambda r: r.default_value, + ) + + self._write(rule.doc_string.strip(), "\n\n") + + if len(rule.advertised_providers.provider_name) == 0: + self._write("_Provides_: no providers advertised.") + else: + self._write( + "_Provides_: ", + ", ".join(rule.advertised_providers.provider_name), + ) + self._write("\n\n") + + if rule.attribute: + self._render_attributes(rule_anchor, rule.attribute) + + def _rule_attr_type_string(self, attr: stardoc_output_pb2.AttributeInfo) -> str: + if attr.type == _AttributeType.NAME: + return _link("Name", ref="target-name") + elif attr.type == _AttributeType.INT: + return _link("int", ref="int") + elif attr.type == _AttributeType.LABEL: + return _link("label", ref="attr-label") + elif attr.type == _AttributeType.STRING: + return _link("string", ref="str") + elif attr.type == _AttributeType.STRING_LIST: + return "list of " + _link("string", ref="str") + elif attr.type == _AttributeType.INT_LIST: + return "list of " + _link("int", ref="int") + elif attr.type == _AttributeType.LABEL_LIST: + return "list of " + _link("label", ref="attr-label") + "s" + elif attr.type == _AttributeType.BOOLEAN: + return _link("bool", ref="bool") + elif attr.type == _AttributeType.LABEL_STRING_DICT: + return "dict of {key} to {value}".format( + key=_link("label", ref="attr-label"), value=_link("string", ref="str") + ) + elif attr.type == _AttributeType.STRING_DICT: + return "dict of {key} to {value}".format( + key=_link("string", ref="str"), value=_link("string", ref="str") + ) + elif attr.type == _AttributeType.STRING_LIST_DICT: + return "dict of {key} to list of {value}".format( + key=_link("string", ref="str"), value=_link("string", ref="str") + ) + elif attr.type == _AttributeType.OUTPUT: + return _link("label", ref="attr-label") + elif attr.type == _AttributeType.OUTPUT_LIST: + return "list of " + _link("label", ref="attr-label") + else: + # If we get here, it means the value was unknown for some reason. + # Rather than error, give some somewhat understandable value. + return _AttributeType.Name(attr.type) + + def _render_func(self, func: stardoc_output_pb2.StarlarkFunctionInfo): + func_name = func.function_name + func_anchor = _anchor_id(func_name) + self._write( + _block_attrs(".starlark-object"), + f"## {func_name}\n\n", + ) + + parameters = [param for param in func.parameter if param.name != "self"] + + self._render_signature( + func_name, + func_anchor, + parameters, + get_name=lambda p: p.name, + get_default=lambda p: p.default_value, + ) + + self._write(func.doc_string.strip(), "\n\n") + + if parameters: + self._write( + _block_attrs(f"{func_anchor}_parameters"), + "**PARAMETERS** ", + _link_here_icon(f"{func_anchor}_parameters"), + "\n\n", + ) + entries = [] + for param in parameters: + entries.append( + [ + f"{func_anchor}_{param.name}", + param.name, + f"(_default `{param.default_value}`_) " + if param.default_value + else "", + param.doc_string if param.doc_string else "_undocumented_", + ] + ) + self._render_field_list(entries) + + if getattr(func, "return").doc_string: + return_doc = _indent_block_text(getattr(func, "return").doc_string) + self._write( + _block_attrs(f"{func_anchor}_returns"), + "RETURNS", + _link_here_icon(func_anchor + "_returns"), + "\n", + ": ", + return_doc, + "\n", + ) + if func.deprecated.doc_string: + self._write( + "\n\n**DEPRECATED**\n\n", func.deprecated.doc_string.strip(), "\n" + ) + + def _render_provider(self, provider: stardoc_output_pb2.ProviderInfo): + self._write( + _block_attrs(".starlark-object"), + f"## {provider.provider_name}\n\n", + ) + + provider_anchor = _anchor_id(provider.provider_name) + self._render_signature( + provider.provider_name, + provider_anchor, + provider.field_info, + get_name=lambda f: f.name, + ) + + self._write(provider.doc_string.strip(), "\n\n") + + if provider.field_info: + self._write( + _block_attrs(provider_anchor), + "**FIELDS** ", + _link_here_icon(provider_anchor + "_fields"), + "\n", + "\n", + ) + entries = [] + for field in provider.field_info: + entries.append( + [ + f"{provider_anchor}_{field.name}", + field.name, + field.doc_string, + ] + ) + self._render_field_list(entries) + + def _render_attributes( + self, base_anchor: str, attributes: list[stardoc_output_pb2.AttributeInfo] + ): + self._write( + _block_attrs(f"{base_anchor}_attributes"), + "**ATTRIBUTES** ", + _link_here_icon(f"{base_anchor}_attributes"), + "\n", + ) + entries = [] + for attr in attributes: + anchor = f"{base_anchor}_{attr.name}" + required = "required" if attr.mandatory else "optional" + attr_type = self._rule_attr_type_string(attr) + default = f", default `{attr.default_value}`" if attr.default_value else "" + providers_parts = [] + if attr.provider_name_group: + providers_parts.append("\n\n_Required providers_: ") + if len(attr.provider_name_group) == 1: + provider_group = attr.provider_name_group[0] + if len(provider_group.provider_name) == 1: + providers_parts.append(provider_group.provider_name[0]) + else: + providers_parts.extend( + ["all of ", _join_csv_and(provider_group.provider_name)] + ) + elif len(attr.provider_name_group) > 1: + providers_parts.append("any of \n") + for group in attr.provider_name_group: + providers_parts.extend(["* ", _join_csv_and(group.provider_name)]) + if providers_parts: + providers_parts.append("\n") + + entries.append( + [ + anchor, + attr.name, + f"_({required} {attr_type}{default})_\n", + attr.doc_string, + *providers_parts, + ] + ) + self._render_field_list(entries) + + def _render_signature( + self, + name: str, + base_anchor: str, + parameters: list[_T], + *, + get_name: Callable[_T, str], + get_default: Callable[_T, str] = lambda v: None, + ): + self._write(_block_attrs(".starlark-signature"), name, "(") + for _, is_last, param in _position_iter(parameters): + param_name = get_name(param) + self._write(_link(param_name, f"{base_anchor}_{param_name}")) + default_value = get_default(param) + if default_value: + self._write(f"={default_value}") + if not is_last: + self._write(",\n") + self._write(")\n\n") + + def _render_field_list(self, entries: list[list[str]]): + """Render a list of field lists. + + Args: + entries: list of field list entries. Each element is 3 + pieces: an anchor, field description, and one or more + text strings for the body of the field list entry. + """ + for anchor, description, *body_pieces in entries: + body_pieces = [_block_attrs(anchor), *body_pieces] + self._write( + ":", + _span(description + _link_here_icon(anchor)), + ":\n ", + # The text has to be indented to be associated with the block correctly. + "".join(body_pieces).strip().replace("\n", "\n "), + "\n", + ) + # Ensure there is an empty line after the field list, otherwise + # the next line of content will fold into the field list + self._write("\n") + + def _write(self, *lines: str): + self._out_stream.writelines(lines) + + +def _convert( + *, + proto: pathlib.Path, + output: pathlib.Path, + footer: pathlib.Path, + public_load_path: str, +): + if footer: + footer_content = footer.read_text() + + module = stardoc_output_pb2.ModuleInfo.FromString(proto.read_bytes()) + with output.open("wt", encoding="utf8") as out_stream: + _MySTRenderer(module, out_stream, public_load_path).render() + out_stream.write(footer_content) + + +def _create_parser(): + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument("--footer", dest="footer", type=pathlib.Path) + parser.add_argument("--proto", dest="proto", type=pathlib.Path) + parser.add_argument("--output", dest="output", type=pathlib.Path) + parser.add_argument("--public-load-path", dest="public_load_path") + return parser + + +def main(args): + options = _create_parser().parse_args(args) + _convert( + proto=options.proto, + output=options.output, + footer=options.footer, + public_load_path=options.public_load_path, + ) + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/sphinxdocs/private/provider_template.vm b/sphinxdocs/private/provider_template.vm deleted file mode 100644 index 49ae8947ad..0000000000 --- a/sphinxdocs/private/provider_template.vm +++ /dev/null @@ -1,30 +0,0 @@ -#set( $nl = " -" ) -#set( $pn = $providerInfo.providerName) -#set( $pnl = $pn.replaceAll("[.]", "_").toLowerCase()) -{.starlark-object} -#[[##]]# ${providerName} - -#set( $hasFields = false) -{.starlark-signature} -${providerInfo.providerName}(## Comment to consume newline -#foreach ($field in $providerInfo.getFieldInfoList()) -#set( $hasFields = true) -[${field.name}](#${pnl}_${field.name})## Comment to consume newline -#if($foreach.hasNext), -#end -#end -) - -$providerInfo.docString - -#if ($hasFields) -{#${pnl}_fields} -**FIELDS** [¶](#${pnl}_fields){.headerlink} - -#foreach ($field in $providerInfo.getFieldInfoList()) -#set($link = $pnl + "_" + $field.name) -:[${field.name}[¶](#$link){.headerlink}]{.span}: []{#$link} - $field.docString.replaceAll("$nl", "$nl ") -#end -#end diff --git a/sphinxdocs/private/rule_template.vm b/sphinxdocs/private/rule_template.vm deleted file mode 100644 index d91bad20cb..0000000000 --- a/sphinxdocs/private/rule_template.vm +++ /dev/null @@ -1,48 +0,0 @@ -#set( $nl = " -" ) -#set( $rn = $ruleInfo.ruleName) -#set( $rnl = $rn.replaceAll("[.]", "_").toLowerCase()) -{.starlark-object} -#[[##]]# $ruleName - -#set( $hasAttrs = false) -{.starlark-signature} -${ruleInfo.ruleName}(## Comment to consume newline -#foreach ($attr in $ruleInfo.getAttributeList()) -#set( $hasAttrs = true) -[${attr.name}](#${rnl}_${attr.name})## Comment to consume newline -#if(!$attr.getDefaultValue().isEmpty()) -=$attr.getDefaultValue()#end#if($foreach.hasNext), -#end -#end -) - -$ruleInfo.docString - -#if ($hasAttrs) -{#${rnl}_attributes} -**ATTRIBUTES** [¶](#${rnl}_attributes){.headerlink} - -#foreach ($attr in $ruleInfo.getAttributeList()) -#set($link = $rnl + "_" + $attr.name) -#if($attr.mandatory) -#set($opt = "required") -#else -#set($opt = "optional") -#end -#if($attr.type == "NAME") -#set($type = "[Name][target-name]") -#elseif($attr.type == "LABEL_LIST") -#set($type = "list of [label][attr-label]s") -#end -#if(!$attr.getDefaultValue().isEmpty()) -#set($default = ", default `" + $attr.getDefaultValue() + "`") -#else -#set($default = "") -#end -:[${attr.name}[¶](#$link){.headerlink}]{.span}: []{#$link} - _($opt $type$default)_ - $attr.docString.replaceAll("$nl", "$nl ") - -#end -#end diff --git a/sphinxdocs/private/sphinx.bzl b/sphinxdocs/private/sphinx.bzl index 8b3244b607..daff02d112 100644 --- a/sphinxdocs/private/sphinx.bzl +++ b/sphinxdocs/private/sphinx.bzl @@ -45,7 +45,17 @@ def sphinx_build_binary(name, py_binary_rule = py_binary, **kwargs): **kwargs ) -def sphinx_docs(name, *, srcs = [], sphinx, config, formats, strip_prefix = "", extra_opts = [], **kwargs): +def sphinx_docs( + name, + *, + srcs = [], + renamed_srcs = {}, + sphinx, + config, + formats, + strip_prefix = "", + extra_opts = [], + **kwargs): """Generate docs using Sphinx. This generates three public targets: @@ -62,6 +72,9 @@ def sphinx_docs(name, *, srcs = [], sphinx, config, formats, strip_prefix = "", Args: name: (str) name of the docs rule. srcs: (label list) The source files for Sphinx to process. + renamed_srcs: (label_keyed_string_dict) Doc source files for Sphinx that + are renamed. This is typically used for files elsewhere, such as top + level files in the repo. sphinx: (label) the Sphinx tool to use for building documentation. Because Sphinx supports various plugins, you must construct your own binary with the necessary dependencies. The @@ -83,6 +96,7 @@ def sphinx_docs(name, *, srcs = [], sphinx, config, formats, strip_prefix = "", _sphinx_docs( name = name, srcs = srcs, + renamed_srcs = renamed_srcs, sphinx = sphinx, config = config, formats = formats, @@ -143,6 +157,12 @@ _sphinx_docs = rule( "other options, but before the source/output args.", ), "formats": attr.string_list(doc = "Output formats for Sphinx to create."), + "renamed_srcs": attr.label_keyed_string_dict( + allow_files = True, + doc = "Doc source files for Sphinx that are renamed. This is " + + "typically used for files elsewhere, such as top level " + + "files in the repo.", + ), "sphinx": attr.label( executable = True, cfg = "exec", @@ -189,6 +209,23 @@ def _create_sphinx_source_tree(ctx): for orig_file in ctx.files.srcs: _symlink_source(orig_file) + for src_target, dest in ctx.attr.renamed_srcs.items(): + src_files = src_target.files.to_list() + if len(src_files) != 1: + fail("A single file must be specified to be renamed. Target {} " + + "generate {} files: {}".format( + src_target, + len(src_files), + src_files, + )) + sphinx_src = ctx.actions.declare_file(paths.join(source_prefix, dest)) + ctx.actions.symlink( + output = sphinx_src, + target_file = src_files[0], + progress_message = "Symlinking (renamed) Sphinx source %{input} to %{output}", + ) + sphinx_source_files.append(sphinx_src) + return sphinx_source_dir_path, source_conf_file, sphinx_source_files def _run_sphinx(ctx, format, source_path, inputs, output_prefix): diff --git a/sphinxdocs/private/sphinx_stardoc.bzl b/sphinxdocs/private/sphinx_stardoc.bzl index 1371d907f7..810dca3524 100644 --- a/sphinxdocs/private/sphinx_stardoc.bzl +++ b/sphinxdocs/private/sphinx_stardoc.bzl @@ -19,11 +19,6 @@ load("@bazel_skylib//rules:build_test.bzl", "build_test") load("@io_bazel_stardoc//stardoc:stardoc.bzl", "stardoc") load("//python/private:util.bzl", "add_tag", "copy_propagating_kwargs") # buildifier: disable=bzl-visibility -_FUNC_TEMPLATE = Label("//sphinxdocs/private:func_template.vm") -_HEADER_TEMPLATE = Label("//sphinxdocs/private:header_template.vm") -_RULE_TEMPLATE = Label("//sphinxdocs/private:rule_template.vm") -_PROVIDER_TEMPLATE = Label("//sphinxdocs/private:provider_template.vm") - def sphinx_stardocs(name, docs, footer = None, **kwargs): """Generate Sphinx-friendly Markdown docs using Stardoc for bzl libraries. @@ -83,58 +78,62 @@ def sphinx_stardocs(name, docs, footer = None, **kwargs): ) def _sphinx_stardoc(*, name, out, footer = None, public_load_path = None, **kwargs): - if footer: - stardoc_name = "_{}_stardoc".format(name.lstrip("_")) - stardoc_out = "_{}_stardoc.out".format(name.lstrip("_")) - else: - stardoc_name = name - stardoc_out = out + stardoc_name = "_{}_stardoc".format(name.lstrip("_")) + stardoc_pb = stardoc_name + ".binaryproto" if not public_load_path: public_load_path = str(kwargs["input"]) - header_name = "_{}_header".format(name.lstrip("_")) - _expand_stardoc_template( - name = header_name, - template = _HEADER_TEMPLATE, - substitutions = { - "%%BZL_LOAD_PATH%%": public_load_path, - }, - ) - stardoc( name = stardoc_name, - func_template = _FUNC_TEMPLATE, - header_template = header_name, - rule_template = _RULE_TEMPLATE, - provider_template = _PROVIDER_TEMPLATE, - out = stardoc_out, + out = stardoc_pb, + format = "proto", **kwargs ) - if footer: - native.genrule( - name = name, - srcs = [stardoc_out, footer], - outs = [out], - cmd = "cat $(SRCS) > $(OUTS)", - message = "SphinxStardoc: Adding footer to {}".format(name), - **copy_propagating_kwargs(kwargs) - ) - -def _expand_stardoc_template_impl(ctx): - out = ctx.actions.declare_file(ctx.label.name + ".vm") - ctx.actions.expand_template( - template = ctx.file.template, + _stardoc_proto_to_markdown( + name = name, + src = stardoc_pb, output = out, - substitutions = ctx.attr.substitutions, + footer = footer, + public_load_path = public_load_path, + ) + +def _stardoc_proto_to_markdown_impl(ctx): + args = ctx.actions.args() + args.use_param_file("@%s") + args.set_param_file_format("multiline") + + inputs = [ctx.file.src] + args.add("--proto", ctx.file.src) + args.add("--output", ctx.outputs.output) + + if ctx.file.footer: + args.add("--footer", ctx.file.footer) + inputs.append(ctx.file.footer) + if ctx.attr.public_load_path: + args.add("--public-load-path={}".format(ctx.attr.public_load_path)) + + ctx.actions.run( + executable = ctx.executable._proto_to_markdown, + arguments = [args], + inputs = inputs, + outputs = [ctx.outputs.output], + mnemonic = "SphinxStardocProtoToMd", + progress_message = "SphinxStardoc: converting proto to markdown: %{input} -> %{output}", ) - return [DefaultInfo(files = depset([out]))] -_expand_stardoc_template = rule( - implementation = _expand_stardoc_template_impl, +_stardoc_proto_to_markdown = rule( + implementation = _stardoc_proto_to_markdown_impl, attrs = { - "substitutions": attr.string_dict(), - "template": attr.label(allow_single_file = True), + "footer": attr.label(allow_single_file = True), + "output": attr.output(mandatory = True), + "public_load_path": attr.string(), + "src": attr.label(allow_single_file = True, mandatory = True), + "_proto_to_markdown": attr.label( + default = "//sphinxdocs/private:proto_to_markdown", + executable = True, + cfg = "exec", + ), }, ) diff --git a/sphinxdocs/tests/BUILD.bazel b/sphinxdocs/tests/BUILD.bazel new file mode 100644 index 0000000000..41010956cf --- /dev/null +++ b/sphinxdocs/tests/BUILD.bazel @@ -0,0 +1,13 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/sphinxdocs/tests/proto_to_markdown/BUILD.bazel b/sphinxdocs/tests/proto_to_markdown/BUILD.bazel new file mode 100644 index 0000000000..2964785eed --- /dev/null +++ b/sphinxdocs/tests/proto_to_markdown/BUILD.bazel @@ -0,0 +1,24 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//python:py_test.bzl", "py_test") + +py_test( + name = "proto_to_markdown_test", + srcs = ["proto_to_markdown_test.py"], + deps = [ + "//sphinxdocs/private:proto_to_markdown_lib", + "@dev_pip//absl_py", + ], +) diff --git a/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py b/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py new file mode 100644 index 0000000000..2f5b22e60b --- /dev/null +++ b/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py @@ -0,0 +1,203 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import re + +from absl.testing import absltest +from google.protobuf import text_format +from stardoc.proto import stardoc_output_pb2 + +from sphinxdocs.private import proto_to_markdown + +_EVERYTHING_MODULE = """\ +module_docstring: "MODULE_DOC_STRING" +file: "@repo//pkg:foo.bzl" + +rule_info: { + rule_name: "rule_1" + doc_string: "RULE_1_DOC_STRING" + attribute: { + name: "rule_1_attr_1", + doc_string: "RULE_1_ATTR_1_DOC_STRING" + type: STRING + default_value: "RULE_1_ATTR_1_DEFAULT_VALUE" + } +} +provider_info: { + provider_name: "ProviderAlpha" + doc_string: "PROVIDER_ALPHA_DOC_STRING" + field_info: { + name: "ProviderAlpha_field_a" + doc_string: "PROVIDER_ALPHA_FIELD_A_DOC_STRING" + } +} +func_info: { + function_name: "function_1" + doc_string: "FUNCTION_1_DOC_STRING" + parameter: { + name: "function_1_param_a" + doc_string: "FUNCTION_1_PARAM_A_DOC_STRING" + default_value: "FUNCTION_1_PARAM_A_DEFAULT_VALUE" + } + return: { + doc_string: "FUNCTION_1_RETURN_DOC_STRING" + } + deprecated: { + doc_string: "FUNCTION_1_DEPRECATED_DOC_STRING" + } +} +aspect_info: { + aspect_name: "aspect_1" + doc_string: "ASPECT_1_DOC_STRING" + aspect_attribute: "aspect_1_aspect_attribute_a" + attribute: { + name: "aspect_1_attribute_a", + doc_string: "ASPECT_1_ATTRIBUTE_A_DOC_STRING" + type: INT + default_value: "694638" + } +} +module_extension_info: { + extension_name: "bzlmod_ext" + doc_string: "BZLMOD_EXT_DOC_STRING" + tag_class: { + tag_name: "bzlmod_ext_tag_a" + doc_string: "BZLMOD_EXT_TAG_A_DOC_STRING" + attribute: { + name: "bzlmod_ext_tag_a_attribute_1", + doc_string: "BZLMOD_EXT_TAG_A_ATTRIBUTE_1_DOC_STRING" + type: STRING_LIST + default_value: "[BZLMOD_EXT_TAG_A_ATTRIBUTE_1_DEFAULT_VALUE]" + } + } +} +repository_rule_info: { + rule_name: "repository_rule", + doc_string: "REPOSITORY_RULE_DOC_STRING" + attribute: { + name: "repository_rule_attribute_a", + doc_string: "REPOSITORY_RULE_ATTRIBUTE_A_DOC_STRING" + type: BOOLEAN + default_value: "True" + } + environ: "ENV_VAR_A" +} +""" + + +class ProtoToMarkdownTest(absltest.TestCase): + def setUp(self): + super().setUp() + self.stream = io.StringIO() + + def _render(self, module_text): + renderer = proto_to_markdown._MySTRenderer( + module=text_format.Parse(module_text, stardoc_output_pb2.ModuleInfo()), + out_stream=self.stream, + public_load_path="", + ) + renderer.render() + return self.stream.getvalue() + + def test_basic_rendering_everything(self): + actual = self._render(_EVERYTHING_MODULE) + + self.assertRegex(actual, "# //pkg:foo.bzl") + self.assertRegex(actual, "MODULE_DOC_STRING") + + self.assertRegex(actual, "## rule_1.*") + self.assertRegex(actual, "RULE_1_DOC_STRING") + self.assertRegex(actual, "rule_1_attr_1") + self.assertRegex(actual, "RULE_1_ATTR_1_DOC_STRING") + self.assertRegex(actual, "RULE_1_ATTR_1_DEFAULT_VALUE") + + self.assertRegex(actual, "## ProviderAlpha") + self.assertRegex(actual, "PROVIDER_ALPHA_DOC_STRING") + self.assertRegex(actual, "ProviderAlpha_field_a") + self.assertRegex(actual, "PROVIDER_ALPHA_FIELD_A_DOC_STRING") + + self.assertRegex(actual, "## function_1") + self.assertRegex(actual, "FUNCTION_1_DOC_STRING") + self.assertRegex(actual, "function_1_param_a") + self.assertRegex(actual, "FUNCTION_1_PARAM_A_DOC_STRING") + self.assertRegex(actual, "FUNCTION_1_PARAM_A_DEFAULT_VALUE") + self.assertRegex(actual, "FUNCTION_1_RETURN_DOC_STRING") + self.assertRegex(actual, "FUNCTION_1_DEPRECATED_DOC_STRING") + + self.assertRegex(actual, "## aspect_1") + self.assertRegex(actual, "ASPECT_1_DOC_STRING") + self.assertRegex(actual, "aspect_1_aspect_attribute_a") + self.assertRegex(actual, "aspect_1_attribute_a") + self.assertRegex(actual, "ASPECT_1_ATTRIBUTE_A_DOC_STRING") + self.assertRegex(actual, "694638") + + self.assertRegex(actual, "## bzlmod_ext") + self.assertRegex(actual, "BZLMOD_EXT_DOC_STRING") + self.assertRegex(actual, "### bzlmod_ext.bzlmod_ext_tag_a") + self.assertRegex(actual, "BZLMOD_EXT_TAG_A_DOC_STRING") + self.assertRegex(actual, "bzlmod_ext_tag_a_attribute_1") + self.assertRegex(actual, "BZLMOD_EXT_TAG_A_ATTRIBUTE_1_DOC_STRING") + self.assertRegex(actual, "BZLMOD_EXT_TAG_A_ATTRIBUTE_1_DEFAULT_VALUE") + + self.assertRegex(actual, "## repository_rule") + self.assertRegex(actual, "REPOSITORY_RULE_DOC_STRING") + self.assertRegex(actual, "repository_rule_attribute_a") + self.assertRegex(actual, "REPOSITORY_RULE_ATTRIBUTE_A_DOC_STRING") + self.assertRegex(actual, "repository_rule_attribute_a.*=.*True") + self.assertRegex(actual, "ENV_VAR_A") + + def test_render_signature(self): + actual = self._render( + """\ +file: "@repo//pkg:foo.bzl" +func_info: { + function_name: "func" + parameter: { + name: "param_with_default" + default_value: "DEFAULT" + } + parameter: { + name: "param_without_default" + } + parameter: { + name: "last_param" + } +} + """ + ) + self.assertIn("[param_with_default](#func_param_with_default)=DEFAULT,", actual) + self.assertIn("[param_without_default](#func_param_without_default),", actual) + + def test_render_field_list(self): + actual = self._render( + """\ +file: "@repo//pkg:foo.bzl" +func_info: { + function_name: "func" + parameter: { + name: "param" + default_value: "DEFAULT" + } +} +""" + ) + self.assertRegex( + actual, re.compile("^:.*param.*¶.*headerlink.*:\n", re.MULTILINE) + ) + self.assertRegex(actual, re.compile("^ .*#func_param", re.MULTILINE)) + + +if __name__ == "__main__": + absltest.main() diff --git a/tests/config_settings/transition/BUILD.bazel b/tests/config_settings/transition/BUILD.bazel index 21fa50e16d..19d4958669 100644 --- a/tests/config_settings/transition/BUILD.bazel +++ b/tests/config_settings/transition/BUILD.bazel @@ -1,3 +1,6 @@ +load(":multi_version_tests.bzl", "multi_version_test_suite") load(":py_args_tests.bzl", "py_args_test_suite") py_args_test_suite(name = "py_args_tests") + +multi_version_test_suite(name = "multi_version_tests") diff --git a/tests/config_settings/transition/multi_version_tests.bzl b/tests/config_settings/transition/multi_version_tests.bzl new file mode 100644 index 0000000000..32f7209c9f --- /dev/null +++ b/tests/config_settings/transition/multi_version_tests.bzl @@ -0,0 +1,68 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for py_test.""" + +load("@rules_testing//lib:analysis_test.bzl", "analysis_test") +load("@rules_testing//lib:test_suite.bzl", "test_suite") +load("@rules_testing//lib:util.bzl", rt_util = "util") +load("//python:versions.bzl", "TOOL_VERSIONS") +load("//python/config_settings:transition.bzl", py_binary_transitioned = "py_binary", py_test_transitioned = "py_test") + +_tests = [] + +def _test_py_test_with_transition(name): + rt_util.helper_target( + py_test_transitioned, + name = name + "_subject", + srcs = [name + "_subject.py"], + python_version = TOOL_VERSIONS.keys()[0], + ) + + analysis_test( + name = name, + target = name + "_subject", + impl = _test_py_test_with_transition_impl, + ) + +def _test_py_test_with_transition_impl(env, target): + # Nothing to assert; we just want to make sure it builds + _ = env, target # @unused + +_tests.append(_test_py_test_with_transition) + +def _test_py_binary_with_transition(name): + rt_util.helper_target( + py_binary_transitioned, + name = name + "_subject", + srcs = [name + "_subject.py"], + python_version = TOOL_VERSIONS.keys()[0], + ) + + analysis_test( + name = name, + target = name + "_subject", + impl = _test_py_binary_with_transition_impl, + ) + +def _test_py_binary_with_transition_impl(env, target): + # Nothing to assert; we just want to make sure it builds + _ = env, target # @unused + +_tests.append(_test_py_binary_with_transition) + +def multi_version_test_suite(name): + test_suite( + name = name, + tests = _tests, + ) diff --git a/tests/pip_install/whl_library/generate_build_bazel_tests.bzl b/tests/pip_install/whl_library/generate_build_bazel_tests.bzl index c65beb54ae..b89477fd4c 100644 --- a/tests/pip_install/whl_library/generate_build_bazel_tests.bzl +++ b/tests/pip_install/whl_library/generate_build_bazel_tests.bzl @@ -39,7 +39,15 @@ filegroup( filegroup( name = "_whl", srcs = ["foo.whl"], - data = ["@pypi_bar_baz//:whl", "@pypi_foo//:whl"], + data = [ + "@pypi_bar_baz//:whl", + "@pypi_foo//:whl", + ] + select( + { + "@platforms//os:windows": ["@pypi_colorama//:whl"], + "//conditions:default": [], + }, + ), visibility = ["//visibility:private"], ) @@ -59,7 +67,15 @@ py_library( # This makes this directory a top-level in the python import # search path for anything that depends on this. imports = ["site-packages"], - deps = ["@pypi_bar_baz//:pkg", "@pypi_foo//:pkg"], + deps = [ + "@pypi_bar_baz//:pkg", + "@pypi_foo//:pkg", + ] + select( + { + "@platforms//os:windows": ["@pypi_colorama//:pkg"], + "//conditions:default": [], + }, + ), tags = ["tag1", "tag2"], visibility = ["//visibility:private"], ) @@ -78,6 +94,7 @@ alias( repo_prefix = "pypi_", whl_name = "foo.whl", dependencies = ["foo", "bar-baz"], + dependencies_by_platform = {"@platforms//os:windows": ["colorama"]}, data_exclude = [], tags = ["tag1", "tag2"], entry_points = {}, @@ -107,7 +124,10 @@ filegroup( filegroup( name = "_whl", srcs = ["foo.whl"], - data = ["@pypi_bar_baz//:whl", "@pypi_foo//:whl"], + data = [ + "@pypi_bar_baz//:whl", + "@pypi_foo//:whl", + ], visibility = ["//visibility:private"], ) @@ -127,7 +147,10 @@ py_library( # This makes this directory a top-level in the python import # search path for anything that depends on this. imports = ["site-packages"], - deps = ["@pypi_bar_baz//:pkg", "@pypi_foo//:pkg"], + deps = [ + "@pypi_bar_baz//:pkg", + "@pypi_foo//:pkg", + ], tags = ["tag1", "tag2"], visibility = ["//visibility:private"], ) @@ -162,6 +185,7 @@ copy_file( repo_prefix = "pypi_", whl_name = "foo.whl", dependencies = ["foo", "bar-baz"], + dependencies_by_platform = {}, data_exclude = [], tags = ["tag1", "tag2"], entry_points = {}, @@ -198,7 +222,10 @@ filegroup( filegroup( name = "_whl", srcs = ["foo.whl"], - data = ["@pypi_bar_baz//:whl", "@pypi_foo//:whl"], + data = [ + "@pypi_bar_baz//:whl", + "@pypi_foo//:whl", + ], visibility = ["//visibility:private"], ) @@ -218,7 +245,10 @@ py_library( # This makes this directory a top-level in the python import # search path for anything that depends on this. imports = ["site-packages"], - deps = ["@pypi_bar_baz//:pkg", "@pypi_foo//:pkg"], + deps = [ + "@pypi_bar_baz//:pkg", + "@pypi_foo//:pkg", + ], tags = ["tag1", "tag2"], visibility = ["//visibility:private"], ) @@ -246,6 +276,7 @@ py_binary( repo_prefix = "pypi_", whl_name = "foo.whl", dependencies = ["foo", "bar-baz"], + dependencies_by_platform = {}, data_exclude = [], tags = ["tag1", "tag2"], entry_points = {"fizz": "buzz.py"}, @@ -275,7 +306,16 @@ filegroup( filegroup( name = "_whl", srcs = ["foo.whl"], - data = ["@pypi_bar_baz//:whl"], + data = ["@pypi_bar_baz//:whl"] + select( + { + ":is_linux_x86_64": [ + "@pypi_box//:whl", + "@pypi_box_amd64//:whl", + ], + "@platforms//os:linux": ["@pypi_box//:whl"], + "//conditions:default": [], + }, + ), visibility = ["@pypi__groups//:__pkg__"], ) @@ -295,7 +335,16 @@ py_library( # This makes this directory a top-level in the python import # search path for anything that depends on this. imports = ["site-packages"], - deps = ["@pypi_bar_baz//:pkg"], + deps = ["@pypi_bar_baz//:pkg"] + select( + { + ":is_linux_x86_64": [ + "@pypi_box//:pkg", + "@pypi_box_amd64//:pkg", + ], + "@platforms//os:linux": ["@pypi_box//:pkg"], + "//conditions:default": [], + }, + ), tags = [], visibility = ["@pypi__groups//:__pkg__"], ) @@ -309,17 +358,31 @@ alias( name = "whl", actual = "@pypi__groups//:qux_whl", ) + +config_setting( + name = "is_linux_x86_64", + constraint_values = [ + "@platforms//cpu:x86_64", + "@platforms//os:linux", + ], + visibility = ["//visibility:private"], +) """ actual = generate_whl_library_build_bazel( repo_prefix = "pypi_", whl_name = "foo.whl", dependencies = ["foo", "bar-baz", "qux"], + dependencies_by_platform = { + "linux_x86_64": ["box", "box-amd64"], + "windows_x86_64": ["fox"], + "@platforms//os:linux": ["box"], # buildifier: disable=unsorted-dict-items + }, tags = [], entry_points = {}, data_exclude = [], annotation = None, group_name = "qux", - group_deps = ["foo", "qux"], + group_deps = ["foo", "fox", "qux"], ) env.expect.that_str(actual).equals(want) diff --git a/tests/private/text_util/BUILD.bazel b/tests/private/text_util/BUILD.bazel new file mode 100644 index 0000000000..c9c2106a12 --- /dev/null +++ b/tests/private/text_util/BUILD.bazel @@ -0,0 +1,17 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load(":render_tests.bzl", "render_test_suite") + +render_test_suite(name = "render_tests") diff --git a/tests/private/text_util/render_tests.bzl b/tests/private/text_util/render_tests.bzl new file mode 100644 index 0000000000..7c3dddfc7f --- /dev/null +++ b/tests/private/text_util/render_tests.bzl @@ -0,0 +1,63 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"" + +load("@rules_testing//lib:test_suite.bzl", "test_suite") +load("//python/private:text_util.bzl", "render") # buildifier: disable=bzl-visibility + +_tests = [] + +def _test_render_alias(env): + tests = [ + struct( + args = dict( + name = "foo", + actual = repr("bar"), + ), + want = [ + "alias(", + ' name = "foo",', + ' actual = "bar",', + ")", + ], + ), + struct( + args = dict( + name = "foo", + actual = repr("bar"), + visibility = ["//:__pkg__"], + ), + want = [ + "alias(", + ' name = "foo",', + ' actual = "bar",', + ' visibility = ["//:__pkg__"],', + ")", + ], + ), + ] + for test in tests: + got = render.alias(**test.args) + env.expect.that_str(got).equals("\n".join(test.want).strip()) + +_tests.append(_test_render_alias) + +def render_test_suite(name): + """Create the test suite. + + Args: + name: the name of the test suite + """ + test_suite(name = name, basic_tests = _tests) diff --git a/tests/py_runtime_pair/py_runtime_pair_tests.bzl b/tests/py_runtime_pair/py_runtime_pair_tests.bzl index 74da1818cf..236f1ba3a5 100644 --- a/tests/py_runtime_pair/py_runtime_pair_tests.bzl +++ b/tests/py_runtime_pair/py_runtime_pair_tests.bzl @@ -17,6 +17,7 @@ load("@rules_testing//lib:analysis_test.bzl", "analysis_test") load("@rules_testing//lib:test_suite.bzl", "test_suite") load("@rules_testing//lib:truth.bzl", "matching", "subjects") load("@rules_testing//lib:util.bzl", rt_util = "util") +load("//python:py_binary.bzl", "py_binary") load("//python:py_runtime.bzl", "py_runtime") load("//python:py_runtime_pair.bzl", "py_runtime_pair") load("//python/private:reexports.bzl", "BuiltinPyRuntimeInfo") # buildifier: disable=bzl-visibility @@ -99,6 +100,46 @@ def _test_builtin_py_info_accepted_impl(env, target): _tests.append(_test_builtin_py_info_accepted) +def _test_py_runtime_pair_and_binary(name): + rt_util.helper_target( + py_runtime, + name = name + "_runtime", + interpreter_path = "/fake_interpreter", + python_version = "PY3", + ) + rt_util.helper_target( + py_runtime_pair, + name = name + "_pair", + py3_runtime = name + "_runtime", + ) + native.toolchain( + name = name + "_toolchain", + toolchain = name + "_pair", + toolchain_type = "//python:toolchain_type", + ) + rt_util.helper_target( + py_binary, + name = name + "_subject", + srcs = [name + "_subject.py"], + ) + analysis_test( + name = name, + target = name + "_subject", + impl = _test_py_runtime_pair_and_binary_impl, + config_settings = { + "//command_line_option:extra_toolchains": [ + "//tests/py_runtime_pair:{}_toolchain".format(name), + "//tests/cc:all", + ], + }, + ) + +def _test_py_runtime_pair_and_binary_impl(env, target): + # Building indicates success, so nothing to assert + _ = env, target # @unused + +_tests.append(_test_py_runtime_pair_and_binary) + def py_runtime_pair_test_suite(name): test_suite( name = name, diff --git a/tests/pycross/0001-Add-new-file-for-testing-patch-support.patch b/tests/pycross/0001-Add-new-file-for-testing-patch-support.patch new file mode 100644 index 0000000000..fcbc3096ef --- /dev/null +++ b/tests/pycross/0001-Add-new-file-for-testing-patch-support.patch @@ -0,0 +1,17 @@ +From b2ebe6fe67ff48edaf2ae937d24b1f0b67c16f81 Mon Sep 17 00:00:00 2001 +From: Philipp Schrader +Date: Thu, 28 Sep 2023 09:02:44 -0700 +Subject: [PATCH] Add new file for testing patch support + +--- + site-packages/numpy/file_added_via_patch.txt | 1 + + 1 file changed, 1 insertion(+) + create mode 100644 site-packages/numpy/file_added_via_patch.txt + +diff --git a/site-packages/numpy/file_added_via_patch.txt b/site-packages/numpy/file_added_via_patch.txt +new file mode 100644 +index 0000000..9d947a4 +--- /dev/null ++++ b/site-packages/numpy/file_added_via_patch.txt +@@ -0,0 +1 @@ ++Hello from a patch! diff --git a/tests/pycross/BUILD.bazel b/tests/pycross/BUILD.bazel index 4f01272b7c..52d1d18480 100644 --- a/tests/pycross/BUILD.bazel +++ b/tests/pycross/BUILD.bazel @@ -32,3 +32,33 @@ py_test( "//python/runfiles", ], ) + +py_wheel_library( + name = "patched_extracted_wheel_for_testing", + patch_args = [ + "-p1", + ], + patch_tool = "patch", + patches = [ + "0001-Add-new-file-for-testing-patch-support.patch", + ], + target_compatible_with = select({ + # We don't have `patch` available on the Windows CI machines. + "@platforms//os:windows": ["@platforms//:incompatible"], + "//conditions:default": [], + }), + wheel = "@wheel_for_testing//file", +) + +py_test( + name = "patched_py_wheel_library_test", + srcs = [ + "patched_py_wheel_library_test.py", + ], + data = [ + ":patched_extracted_wheel_for_testing", + ], + deps = [ + "//python/runfiles", + ], +) diff --git a/tests/pycross/patched_py_wheel_library_test.py b/tests/pycross/patched_py_wheel_library_test.py new file mode 100644 index 0000000000..4591187f57 --- /dev/null +++ b/tests/pycross/patched_py_wheel_library_test.py @@ -0,0 +1,38 @@ +# Copyright 2023 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +from pathlib import Path + +from python.runfiles import runfiles + +RUNFILES = runfiles.Create() + + +class TestPyWheelLibrary(unittest.TestCase): + def setUp(self): + self.extraction_dir = Path( + RUNFILES.Rlocation("rules_python/tests/pycross/patched_extracted_wheel_for_testing") + ) + self.assertTrue(self.extraction_dir.exists(), self.extraction_dir) + self.assertTrue(self.extraction_dir.is_dir(), self.extraction_dir) + + def test_patched_file_contents(self): + """Validate that the patch got applied correctly.""" + file = self.extraction_dir / "site-packages/numpy/file_added_via_patch.txt" + self.assertEqual(file.read_text(), "Hello from a patch!\n") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/pycross/py_wheel_library_test.py b/tests/pycross/py_wheel_library_test.py index fa8e20e563..25d896a1ae 100644 --- a/tests/pycross/py_wheel_library_test.py +++ b/tests/pycross/py_wheel_library_test.py @@ -23,9 +23,7 @@ class TestPyWheelLibrary(unittest.TestCase): def setUp(self): self.extraction_dir = Path( - RUNFILES.Rlocation( - "rules_python/tests/pycross/extracted_wheel_for_testing" - ) + RUNFILES.Rlocation("rules_python/tests/pycross/extracted_wheel_for_testing") ) self.assertTrue(self.extraction_dir.exists(), self.extraction_dir) self.assertTrue(self.extraction_dir.is_dir(), self.extraction_dir) diff --git a/third_party/rules_pycross/pycross/private/tools/wheel_installer.py b/third_party/rules_pycross/pycross/private/tools/wheel_installer.py index 8367f08d41..0c352cf129 100644 --- a/third_party/rules_pycross/pycross/private/tools/wheel_installer.py +++ b/third_party/rules_pycross/pycross/private/tools/wheel_installer.py @@ -20,6 +20,7 @@ import argparse import os import shutil +import subprocess import sys import tempfile from pathlib import Path @@ -97,6 +98,29 @@ def main(args: Any) -> None: setup_namespace_pkg_compatibility(lib_dir) + if args.patch: + if not args.patch_tool and not args.patch_tool_target: + raise ValueError("Specify one of 'patch_tool' or 'patch_tool_target'.") + + patch_args = [ + args.patch_tool or Path.cwd() / args.patch_tool_target + ] + args.patch_arg + for patch in args.patch: + with patch.open("r") as stdin: + try: + subprocess.run( + patch_args, + stdin=stdin, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=args.directory, + ) + except subprocess.CalledProcessError as error: + print(f"Patch {patch} failed to apply:") + print(error.stdout.decode("utf-8")) + raise + def parse_flags(argv) -> Any: parser = argparse.ArgumentParser(description="Extract a Python wheel.") @@ -127,6 +151,40 @@ def parse_flags(argv) -> Any: help="The output path.", ) + parser.add_argument( + "--patch", + type=Path, + default=[], + action="append", + help="A patch file to apply.", + ) + + parser.add_argument( + "--patch-arg", + type=str, + default=[], + action="append", + help="An argument for the patch tool when applying the patches.", + ) + + parser.add_argument( + "--patch-tool", + type=str, + help=( + "The tool from PATH to invoke when applying patches. " + "If set, --patch-tool-target is ignored." + ), + ) + + parser.add_argument( + "--patch-tool-target", + type=Path, + help=( + "The path to the tool to invoke when applying patches. " + "Ignored when --patch-tool is set." + ), + ) + return parser.parse_args(argv[1:]) diff --git a/third_party/rules_pycross/pycross/private/wheel_library.bzl b/third_party/rules_pycross/pycross/private/wheel_library.bzl index 381511a2f1..166e1d06eb 100644 --- a/third_party/rules_pycross/pycross/private/wheel_library.bzl +++ b/third_party/rules_pycross/pycross/private/wheel_library.bzl @@ -33,19 +33,31 @@ def _py_wheel_library_impl(ctx): args = ctx.actions.args().use_param_file("--flagfile=%s") args.add("--wheel", wheel_file) args.add("--directory", out.path) + args.add_all(ctx.files.patches, format_each = "--patch=%s") + args.add_all(ctx.attr.patch_args, format_each = "--patch-arg=%s") + args.add("--patch-tool", ctx.attr.patch_tool) - inputs = [wheel_file] + tools = [] + inputs = [wheel_file] + ctx.files.patches if name_file: inputs.append(name_file) args.add("--wheel-name-file", name_file) + if ctx.attr.patch_tool_target: + args.add("--patch-tool-target", ctx.attr.patch_tool_target.files_to_run.executable) + tools.append(ctx.executable.patch_tool_target) + if ctx.attr.enable_implicit_namespace_pkgs: args.add("--enable-implicit-namespace-pkgs") + # We apply patches in the same action as the extraction to minimize the + # number of times we cache the wheel contents. If we were to split this + # into 2 actions, then the wheel contents would be cached twice. ctx.actions.run( inputs = inputs, outputs = [out], executable = ctx.executable._tool, + tools = tools, arguments = [args], # Set environment variables to make generated .pyc files reproducible. env = { @@ -119,6 +131,31 @@ and py_test targets must specify either `legacy_create_init=False` or the global This option is required to support some packages which cannot handle the conversion to pkg-util style. """, ), + "patch_args": attr.string_list( + default = ["-p0"], + doc = + "The arguments given to the patch tool. Defaults to -p0, " + + "however -p1 will usually be needed for patches generated by " + + "git. If multiple -p arguments are specified, the last one will take effect.", + ), + "patch_tool": attr.string( + doc = "The patch(1) utility from the host to use. " + + "If set, overrides `patch_tool_target`. Please note that setting " + + "this means that builds are not completely hermetic.", + ), + "patch_tool_target": attr.label( + executable = True, + cfg = "exec", + doc = "The label of the patch(1) utility to use. " + + "Only used if `patch_tool` is not set.", + ), + "patches": attr.label_list( + allow_files = True, + default = [], + doc = + "A list of files that are to be applied as patches after " + + "extracting the archive. This will use the patch command line tool.", + ), "python_version": attr.string( doc = "The python version required for this wheel ('PY2' or 'PY3')", values = ["PY2", "PY3", ""], diff --git a/tools/private/update_deps/update_coverage_deps.py b/tools/private/update_deps/update_coverage_deps.py index 72baa44796..6152d70b3a 100755 --- a/tools/private/update_deps/update_coverage_deps.py +++ b/tools/private/update_deps/update_coverage_deps.py @@ -151,7 +151,7 @@ def _parse_args() -> argparse.Namespace: def main(): args = _parse_args() - api_url = f"https://pypi.python.org/pypi/{args.name}/{args.version}/json" + api_url = f"https://pypi.org/pypi/{args.name}/{args.version}/json" req = request.Request(api_url) with request.urlopen(req) as response: data = json.loads(response.read().decode("utf-8")) diff --git a/version.bzl b/version.bzl index bf6f822d98..bcc2b20eb3 100644 --- a/version.bzl +++ b/version.bzl @@ -17,11 +17,11 @@ # against. # This version should be updated together with the version of Bazel # in .bazelversion. -BAZEL_VERSION = "6.2.0" +BAZEL_VERSION = "7.0.0" # NOTE: Keep in sync with .bazelci/presubmit.yml # This is the minimum supported bazel version, that we have some tests for. -MINIMUM_BAZEL_VERSION = "5.4.0" +MINIMUM_BAZEL_VERSION = "6.2.0" # Versions of Bazel which users should be able to use. # Ensures we don't break backwards-compatibility,