refactor: move the remaining PyPI related functions to private/pypi (#2006)

A continuation of #2003, this time we finish moving the `pip_compile`
stuff.

Summary:
- move multi_pip_parse to private/pypi and re-export
- remove unused files leftover from #2003
- move repositories.bzl to private/pypi/deps.bzl
- move pip_compile to private/pypi
- move the pip_install tools to private
diff --git a/python/private/bzlmod/internal_deps.bzl b/python/private/bzlmod/internal_deps.bzl
index 62ca71f..e0eca9e 100644
--- a/python/private/bzlmod/internal_deps.bzl
+++ b/python/private/bzlmod/internal_deps.bzl
@@ -9,12 +9,12 @@
 "Python toolchain module extension for internal rule use"
 
 load("@bazel_skylib//lib:modules.bzl", "modules")
-load("//python/pip_install:repositories.bzl", "pip_install_dependencies")
 load("//python/private:internal_config_repo.bzl", "internal_config_repo")
+load("//python/private/pypi:deps.bzl", "pypi_deps")
 
 def _internal_deps():
     internal_config_repo(name = "rules_python_internal")
-    pip_install_dependencies()
+    pypi_deps()
 
 internal_deps = modules.as_extension(
     _internal_deps,
diff --git a/python/private/normalize_name.bzl b/python/private/normalize_name.bzl
index aaeca80..7898222 100644
--- a/python/private/normalize_name.bzl
+++ b/python/private/normalize_name.bzl
@@ -38,7 +38,6 @@
 https://packaging.python.org/en/latest/specifications/name-normalization/
 """
 
-# Keep in sync with ../pip_install/tools/lib/bazel.py
 def normalize_name(name):
     """normalize a PyPI package name and return a valid bazel label.
 
diff --git a/python/private/pypi/BUILD.bazel b/python/private/pypi/BUILD.bazel
index 1530837..e7ae735 100644
--- a/python/private/pypi/BUILD.bazel
+++ b/python/private/pypi/BUILD.bazel
@@ -21,7 +21,13 @@
 
 filegroup(
     name = "distribution",
-    srcs = glob(["**"]),
+    srcs = glob(
+        ["**"],
+        exclude = ["requirements.txt"],
+    ) + [
+        "//python/private/pypi/dependency_resolver:distribution",
+        "//python/private/pypi/whl_installer:distribution",
+    ],
     visibility = ["//python/private:__pkg__"],
 )
 
@@ -29,7 +35,16 @@
 filegroup(
     name = "bzl",
     srcs = glob(["**/*.bzl"]),
-    visibility = ["//python/private:__pkg__"],
+    visibility = [
+        "//python/private:__pkg__",
+        "//tools/private:__pkg__",
+    ],
+)
+
+filegroup(
+    name = "requirements_txt",
+    srcs = ["requirements.txt"],
+    visibility = ["//tools/private/update_deps:__pkg__"],
 )
 
 # Keep sorted by library name and keep the files named by the main symbol they export
@@ -67,6 +82,14 @@
 )
 
 bzl_library(
+    name = "deps_bzl",
+    srcs = ["deps.bzl"],
+    deps = [
+        "//python/private:bazel_tools_bzl",
+    ],
+)
+
+bzl_library(
     name = "flags_bzl",
     srcs = ["flags.bzl"],
     deps = ["//python/private:enum_bzl"],
@@ -119,6 +142,12 @@
 )
 
 bzl_library(
+    name = "multi_pip_parse_bzl",
+    srcs = ["multi_pip_parse.bzl"],
+    deps = ["pip_repository_bzl"],
+)
+
+bzl_library(
     name = "package_annotation_bzl",
     srcs = ["package_annotation.bzl"],
 )
@@ -159,6 +188,15 @@
 )
 
 bzl_library(
+    name = "pip_compile_bzl",
+    srcs = ["pip_compile.bzl"],
+    deps = [
+        ":deps_bzl",
+        "//python:defs_bzl",
+    ],
+)
+
+bzl_library(
     name = "pip_repository_bzl",
     srcs = ["pip_repository.bzl"],
     deps = [
@@ -204,17 +242,26 @@
 )
 
 bzl_library(
+    name = "whl_library_alias_bzl",
+    srcs = ["whl_library_alias.bzl"],
+    deps = [
+        ":render_pkg_aliases_bzl",
+        "//python/private:full_version_bzl",
+    ],
+)
+
+bzl_library(
     name = "whl_library_bzl",
     srcs = ["whl_library.bzl"],
     deps = [
         ":attrs_bzl",
+        ":deps_bzl",
         ":generate_whl_library_build_bazel_bzl",
         ":parse_whl_name_bzl",
         ":patch_whl_bzl",
         ":whl_target_platforms_bzl",
         "//python:repositories_bzl",
         "//python:versions_bzl",
-        "//python/pip_install:repositories_bzl",
         "//python/private:auth_bzl",
         "//python/private:envsubst_bzl",
         "//python/private:repo_utils_bzl",
diff --git a/python/private/pypi/dependency_resolver/BUILD.bazel b/python/private/pypi/dependency_resolver/BUILD.bazel
new file mode 100644
index 0000000..9531b55
--- /dev/null
+++ b/python/private/pypi/dependency_resolver/BUILD.bazel
@@ -0,0 +1,7 @@
+exports_files(["dependency_resolver.py"])
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]),
+    visibility = ["//python/private/pypi:__subpackages__"],
+)
diff --git a/python/private/pypi/dependency_resolver/__init__.py b/python/private/pypi/dependency_resolver/__init__.py
new file mode 100644
index 0000000..4101095
--- /dev/null
+++ b/python/private/pypi/dependency_resolver/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/python/private/pypi/dependency_resolver/dependency_resolver.py b/python/private/pypi/dependency_resolver/dependency_resolver.py
new file mode 100644
index 0000000..afe5076
--- /dev/null
+++ b/python/private/pypi/dependency_resolver/dependency_resolver.py
@@ -0,0 +1,232 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"Set defaults for the pip-compile command to run it under Bazel"
+
+import atexit
+import os
+import shutil
+import sys
+from pathlib import Path
+from typing import Optional, Tuple
+
+import click
+import piptools.writer as piptools_writer
+from piptools.scripts.compile import cli
+
+from python.runfiles import runfiles
+
+# Replace the os.replace function with shutil.copy to work around os.replace not being able to
+# replace or move files across filesystems.
+os.replace = shutil.copy
+
+# Next, we override the annotation_style_split and annotation_style_line functions to replace the
+# backslashes in the paths with forward slashes. This is so that we can have the same requirements
+# file on Windows and Unix-like.
+original_annotation_style_split = piptools_writer.annotation_style_split
+original_annotation_style_line = piptools_writer.annotation_style_line
+
+
+def annotation_style_split(required_by) -> str:
+    required_by = set([v.replace("\\", "/") for v in required_by])
+    return original_annotation_style_split(required_by)
+
+
+def annotation_style_line(required_by) -> str:
+    required_by = set([v.replace("\\", "/") for v in required_by])
+    return original_annotation_style_line(required_by)
+
+
+piptools_writer.annotation_style_split = annotation_style_split
+piptools_writer.annotation_style_line = annotation_style_line
+
+
+def _select_golden_requirements_file(
+    requirements_txt, requirements_linux, requirements_darwin, requirements_windows
+):
+    """Switch the golden requirements file, used to validate if updates are needed,
+    to a specified platform specific one.  Fallback on the platform independent one.
+    """
+
+    plat = sys.platform
+    if plat == "linux" and requirements_linux is not None:
+        return requirements_linux
+    elif plat == "darwin" and requirements_darwin is not None:
+        return requirements_darwin
+    elif plat == "win32" and requirements_windows is not None:
+        return requirements_windows
+    else:
+        return requirements_txt
+
+
+def _locate(bazel_runfiles, file):
+    """Look up the file via Rlocation"""
+
+    if not file:
+        return file
+
+    return bazel_runfiles.Rlocation(file)
+
+
+@click.command(context_settings={"ignore_unknown_options": True})
+@click.argument("requirements_in")
+@click.argument("requirements_txt")
+@click.argument("update_target_label")
+@click.option("--requirements-linux")
+@click.option("--requirements-darwin")
+@click.option("--requirements-windows")
+@click.argument("extra_args", nargs=-1, type=click.UNPROCESSED)
+def main(
+    requirements_in: str,
+    requirements_txt: str,
+    update_target_label: str,
+    requirements_linux: Optional[str],
+    requirements_darwin: Optional[str],
+    requirements_windows: Optional[str],
+    extra_args: Tuple[str, ...],
+) -> None:
+    bazel_runfiles = runfiles.Create()
+
+    requirements_file = _select_golden_requirements_file(
+        requirements_txt=requirements_txt,
+        requirements_linux=requirements_linux,
+        requirements_darwin=requirements_darwin,
+        requirements_windows=requirements_windows,
+    )
+
+    resolved_requirements_in = _locate(bazel_runfiles, requirements_in)
+    resolved_requirements_file = _locate(bazel_runfiles, requirements_file)
+
+    # Files in the runfiles directory has the following naming schema:
+    # Main repo: __main__/<path_to_file>
+    # External repo: <workspace name>/<path_to_file>
+    # We want to strip both __main__ and <workspace name> from the absolute prefix
+    # to keep the requirements lock file agnostic.
+    repository_prefix = requirements_file[: requirements_file.index("/") + 1]
+    absolute_path_prefix = resolved_requirements_file[
+        : -(len(requirements_file) - len(repository_prefix))
+    ]
+
+    # As requirements_in might contain references to generated files we want to
+    # use the runfiles file first. Thus, we need to compute the relative path
+    # from the execution root.
+    # Note: Windows cannot reference generated files without runfiles support enabled.
+    requirements_in_relative = requirements_in[len(repository_prefix) :]
+    requirements_file_relative = requirements_file[len(repository_prefix) :]
+
+    # Before loading click, set the locale for its parser.
+    # If it leaks through to the system setting, it may fail:
+    # RuntimeError: Click will abort further execution because Python 3 was configured to use ASCII
+    # as encoding for the environment. Consult https://click.palletsprojects.com/python3/ for
+    # mitigation steps.
+    os.environ["LC_ALL"] = "C.UTF-8"
+    os.environ["LANG"] = "C.UTF-8"
+
+    argv = []
+
+    UPDATE = True
+    # Detect if we are running under `bazel test`.
+    if "TEST_TMPDIR" in os.environ:
+        UPDATE = False
+        # pip-compile wants the cache files to be writeable, but if we point
+        # to the real user cache, Bazel sandboxing makes the file read-only
+        # and we fail.
+        # In theory this makes the test more hermetic as well.
+        argv.append(f"--cache-dir={os.environ['TEST_TMPDIR']}")
+        # Make a copy for pip-compile to read and mutate.
+        requirements_out = os.path.join(
+            os.environ["TEST_TMPDIR"], os.path.basename(requirements_file) + ".out"
+        )
+        # Those two files won't necessarily be on the same filesystem, so we can't use os.replace
+        # or shutil.copyfile, as they will fail with OSError: [Errno 18] Invalid cross-device link.
+        shutil.copy(resolved_requirements_file, requirements_out)
+
+    update_command = os.getenv("CUSTOM_COMPILE_COMMAND") or "bazel run %s" % (
+        update_target_label,
+    )
+
+    os.environ["CUSTOM_COMPILE_COMMAND"] = update_command
+    os.environ["PIP_CONFIG_FILE"] = os.getenv("PIP_CONFIG_FILE") or os.devnull
+
+    argv.append(
+        f"--output-file={requirements_file_relative if UPDATE else requirements_out}"
+    )
+    argv.append(
+        requirements_in_relative
+        if Path(requirements_in_relative).exists()
+        else resolved_requirements_in
+    )
+    argv.extend(extra_args)
+
+    if UPDATE:
+        print("Updating " + requirements_file_relative)
+        if "BUILD_WORKSPACE_DIRECTORY" in os.environ:
+            workspace = os.environ["BUILD_WORKSPACE_DIRECTORY"]
+            requirements_file_tree = os.path.join(workspace, requirements_file_relative)
+            # In most cases, requirements_file will be a symlink to the real file in the source tree.
+            # If symlinks are not enabled (e.g. on Windows), then requirements_file will be a copy,
+            # and we should copy the updated requirements back to the source tree.
+            if not os.path.samefile(resolved_requirements_file, requirements_file_tree):
+                atexit.register(
+                    lambda: shutil.copy(
+                        resolved_requirements_file, requirements_file_tree
+                    )
+                )
+        cli(argv)
+        requirements_file_relative_path = Path(requirements_file_relative)
+        content = requirements_file_relative_path.read_text()
+        content = content.replace(absolute_path_prefix, "")
+        requirements_file_relative_path.write_text(content)
+    else:
+        # cli will exit(0) on success
+        try:
+            print("Checking " + requirements_file)
+            cli(argv)
+            print("cli() should exit", file=sys.stderr)
+            sys.exit(1)
+        except SystemExit as e:
+            if e.code == 2:
+                print(
+                    "pip-compile exited with code 2. This means that pip-compile found "
+                    "incompatible requirements or could not find a version that matches "
+                    f"the install requirement in {requirements_in_relative}.",
+                    file=sys.stderr,
+                )
+                sys.exit(1)
+            elif e.code == 0:
+                golden = open(_locate(bazel_runfiles, requirements_file)).readlines()
+                out = open(requirements_out).readlines()
+                out = [line.replace(absolute_path_prefix, "") for line in out]
+                if golden != out:
+                    import difflib
+
+                    print("".join(difflib.unified_diff(golden, out)), file=sys.stderr)
+                    print(
+                        "Lock file out of date. Run '"
+                        + update_command
+                        + "' to update.",
+                        file=sys.stderr,
+                    )
+                    sys.exit(1)
+                sys.exit(0)
+            else:
+                print(
+                    f"pip-compile unexpectedly exited with code {e.code}.",
+                    file=sys.stderr,
+                )
+                sys.exit(1)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/python/private/pypi/deps.bzl b/python/private/pypi/deps.bzl
new file mode 100644
index 0000000..81bef7a
--- /dev/null
+++ b/python/private/pypi/deps.bzl
@@ -0,0 +1,143 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+
+_RULE_DEPS = [
+    # START: maintained by 'bazel run //tools/private/update_deps:update_pip_deps'
+    (
+        "pypi__build",
+        "https://files.pythonhosted.org/packages/e2/03/f3c8ba0a6b6e30d7d18c40faab90807c9bb5e9a1e3b2fe2008af624a9c97/build-1.2.1-py3-none-any.whl",
+        "75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4",
+    ),
+    (
+        "pypi__click",
+        "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl",
+        "ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28",
+    ),
+    (
+        "pypi__colorama",
+        "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl",
+        "4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6",
+    ),
+    (
+        "pypi__importlib_metadata",
+        "https://files.pythonhosted.org/packages/2d/0a/679461c511447ffaf176567d5c496d1de27cbe34a87df6677d7171b2fbd4/importlib_metadata-7.1.0-py3-none-any.whl",
+        "30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570",
+    ),
+    (
+        "pypi__installer",
+        "https://files.pythonhosted.org/packages/e5/ca/1172b6638d52f2d6caa2dd262ec4c811ba59eee96d54a7701930726bce18/installer-0.7.0-py3-none-any.whl",
+        "05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53",
+    ),
+    (
+        "pypi__more_itertools",
+        "https://files.pythonhosted.org/packages/50/e2/8e10e465ee3987bb7c9ab69efb91d867d93959095f4807db102d07995d94/more_itertools-10.2.0-py3-none-any.whl",
+        "686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684",
+    ),
+    (
+        "pypi__packaging",
+        "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl",
+        "2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5",
+    ),
+    (
+        "pypi__pep517",
+        "https://files.pythonhosted.org/packages/25/6e/ca4a5434eb0e502210f591b97537d322546e4833dcb4d470a48c375c5540/pep517-0.13.1-py3-none-any.whl",
+        "31b206f67165b3536dd577c5c3f1518e8fbaf38cbc57efff8369a392feff1721",
+    ),
+    (
+        "pypi__pip",
+        "https://files.pythonhosted.org/packages/8a/6a/19e9fe04fca059ccf770861c7d5721ab4c2aebc539889e97c7977528a53b/pip-24.0-py3-none-any.whl",
+        "ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc",
+    ),
+    (
+        "pypi__pip_tools",
+        "https://files.pythonhosted.org/packages/0d/dc/38f4ce065e92c66f058ea7a368a9c5de4e702272b479c0992059f7693941/pip_tools-7.4.1-py3-none-any.whl",
+        "4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9",
+    ),
+    (
+        "pypi__pyproject_hooks",
+        "https://files.pythonhosted.org/packages/ae/f3/431b9d5fe7d14af7a32340792ef43b8a714e7726f1d7b69cc4e8e7a3f1d7/pyproject_hooks-1.1.0-py3-none-any.whl",
+        "7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2",
+    ),
+    (
+        "pypi__setuptools",
+        "https://files.pythonhosted.org/packages/de/88/70c5767a0e43eb4451c2200f07d042a4bcd7639276003a9c54a68cfcc1f8/setuptools-70.0.0-py3-none-any.whl",
+        "54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4",
+    ),
+    (
+        "pypi__tomli",
+        "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl",
+        "939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
+    ),
+    (
+        "pypi__wheel",
+        "https://files.pythonhosted.org/packages/7d/cd/d7460c9a869b16c3dd4e1e403cce337df165368c71d6af229a74699622ce/wheel-0.43.0-py3-none-any.whl",
+        "55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81",
+    ),
+    (
+        "pypi__zipp",
+        "https://files.pythonhosted.org/packages/da/55/a03fd7240714916507e1fcf7ae355bd9d9ed2e6db492595f1a67f61681be/zipp-3.18.2-py3-none-any.whl",
+        "dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e",
+    ),
+    # END: maintained by 'bazel run //tools/private/update_deps:update_pip_deps'
+]
+
+_GENERIC_WHEEL = """\
+package(default_visibility = ["//visibility:public"])
+
+load("@rules_python//python:defs.bzl", "py_library")
+
+py_library(
+    name = "lib",
+    srcs = glob(["**/*.py"]),
+    data = glob(["**/*"], exclude=[
+        # These entries include those put into user-installed dependencies by
+        # data_exclude to avoid non-determinism.
+        "**/*.py",
+        "**/*.pyc",
+        "**/*.pyc.*",  # During pyc creation, temp files named *.pyc.NNN are created
+        "**/* *",
+        "**/*.dist-info/RECORD",
+        "BUILD",
+        "WORKSPACE",
+    ]),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["."],
+)
+"""
+
+# Collate all the repository names so they can be easily consumed
+all_requirements = [name for (name, _, _) in _RULE_DEPS]
+
+def requirement(pkg):
+    return Label("@pypi__" + pkg + "//:lib")
+
+def pypi_deps():
+    """
+    Fetch dependencies these rules depend on. Workspaces that use the pip_parse rule can call this.
+    """
+    for (name, url, sha256) in _RULE_DEPS:
+        maybe(
+            http_archive,
+            name,
+            url = url,
+            sha256 = sha256,
+            type = "zip",
+            build_file_content = _GENERIC_WHEEL,
+        )
diff --git a/python/private/pypi/multi_pip_parse.bzl b/python/private/pypi/multi_pip_parse.bzl
new file mode 100644
index 0000000..fe9e2db
--- /dev/null
+++ b/python/private/pypi/multi_pip_parse.bzl
@@ -0,0 +1,160 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A pip_parse implementation for version aware toolchains in WORKSPACE."""
+
+load(":pip_repository.bzl", pip_parse = "pip_repository")
+
+def _multi_pip_parse_impl(rctx):
+    rules_python = rctx.attr._rules_python_workspace.workspace_name
+    load_statements = []
+    install_deps_calls = []
+    process_requirements_calls = []
+    for python_version, pypi_repository in rctx.attr.pip_parses.items():
+        sanitized_python_version = python_version.replace(".", "_")
+        load_statement = """\
+load(
+    "@{pypi_repository}//:requirements.bzl",
+    _{sanitized_python_version}_install_deps = "install_deps",
+    _{sanitized_python_version}_all_requirements = "all_requirements",
+)""".format(
+            pypi_repository = pypi_repository,
+            sanitized_python_version = sanitized_python_version,
+        )
+        load_statements.append(load_statement)
+        process_requirements_call = """\
+_process_requirements(
+    pkg_labels = _{sanitized_python_version}_all_requirements,
+    python_version = "{python_version}",
+    repo_prefix = "{pypi_repository}_",
+)""".format(
+            pypi_repository = pypi_repository,
+            python_version = python_version,
+            sanitized_python_version = sanitized_python_version,
+        )
+        process_requirements_calls.append(process_requirements_call)
+        install_deps_call = """    _{sanitized_python_version}_install_deps(**whl_library_kwargs)""".format(
+            sanitized_python_version = sanitized_python_version,
+        )
+        install_deps_calls.append(install_deps_call)
+
+    # NOTE @aignas 2023-10-31: I am not sure it is possible to render aliases
+    # for all of the packages using the `render_pkg_aliases` function because
+    # we need to know what the list of packages for each version is and then
+    # we would be creating directories for each.
+    macro_tmpl = "@%s_{}//:{}" % rctx.attr.name
+
+    requirements_bzl = """\
+# Generated by python/pip.bzl
+
+load("@{rules_python}//python:pip.bzl", "whl_library_alias", "pip_utils")
+{load_statements}
+
+_wheel_names = []
+_version_map = dict()
+def _process_requirements(pkg_labels, python_version, repo_prefix):
+    for pkg_label in pkg_labels:
+        wheel_name = Label(pkg_label).package
+        if not wheel_name:
+            # We are dealing with the cases where we don't have aliases.
+            workspace_name = Label(pkg_label).workspace_name
+            wheel_name = workspace_name[len(repo_prefix):]
+
+        _wheel_names.append(wheel_name)
+        if not wheel_name in _version_map:
+            _version_map[wheel_name] = dict()
+        _version_map[wheel_name][python_version] = repo_prefix
+
+{process_requirements_calls}
+
+def requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "pkg")
+
+def whl_requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "whl")
+
+def data_requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "data")
+
+def dist_info_requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "dist_info")
+
+def install_deps(**whl_library_kwargs):
+{install_deps_calls}
+    for wheel_name in _wheel_names:
+        whl_library_alias(
+            name = "{name}_" + wheel_name,
+            wheel_name = wheel_name,
+            default_version = "{default_version}",
+            version_map = _version_map[wheel_name],
+        )
+""".format(
+        name = rctx.attr.name,
+        install_deps_calls = "\n".join(install_deps_calls),
+        load_statements = "\n".join(load_statements),
+        macro_tmpl = macro_tmpl,
+        process_requirements_calls = "\n".join(process_requirements_calls),
+        rules_python = rules_python,
+        default_version = rctx.attr.default_version,
+    )
+    rctx.file("requirements.bzl", requirements_bzl)
+    rctx.file("BUILD.bazel", "exports_files(['requirements.bzl'])")
+
+_multi_pip_parse = repository_rule(
+    _multi_pip_parse_impl,
+    attrs = {
+        "default_version": attr.string(),
+        "pip_parses": attr.string_dict(),
+        "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
+    },
+)
+
+def multi_pip_parse(name, default_version, python_versions, python_interpreter_target, requirements_lock, **kwargs):
+    """NOT INTENDED FOR DIRECT USE!
+
+    This is intended to be used by the multi_pip_parse implementation in the template of the
+    multi_toolchain_aliases repository rule.
+
+    Args:
+        name: the name of the multi_pip_parse repository.
+        default_version: the default Python version.
+        python_versions: all Python toolchain versions currently registered.
+        python_interpreter_target: a dictionary which keys are Python versions and values are resolved host interpreters.
+        requirements_lock: a dictionary which keys are Python versions and values are locked requirements files.
+        **kwargs: extra arguments passed to all wrapped pip_parse.
+
+    Returns:
+        The internal implementation of multi_pip_parse repository rule.
+    """
+    pip_parses = {}
+    for python_version in python_versions:
+        if not python_version in python_interpreter_target:
+            fail("Missing python_interpreter_target for Python version %s in '%s'" % (python_version, name))
+        if not python_version in requirements_lock:
+            fail("Missing requirements_lock for Python version %s in '%s'" % (python_version, name))
+
+        pip_parse_name = name + "_" + python_version.replace(".", "_")
+        pip_parse(
+            name = pip_parse_name,
+            python_interpreter_target = python_interpreter_target[python_version],
+            requirements_lock = requirements_lock[python_version],
+            **kwargs
+        )
+        pip_parses[python_version] = pip_parse_name
+
+    return _multi_pip_parse(
+        name = name,
+        default_version = default_version,
+        pip_parses = pip_parses,
+    )
diff --git a/python/private/pypi/pip_compile.bzl b/python/private/pypi/pip_compile.bzl
new file mode 100644
index 0000000..7389e72
--- /dev/null
+++ b/python/private/pypi/pip_compile.bzl
@@ -0,0 +1,167 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Rules to verify and update pip-compile locked requirements.txt.
+
+NOTE @aignas 2024-06-23: We are using the implementation specific name here to
+make it possible to have multiple tools inside the `pypi` directory
+"""
+
+load("//python:defs.bzl", _py_binary = "py_binary", _py_test = "py_test")
+load(":deps.bzl", "requirement")
+
+def pip_compile(
+        name,
+        src = None,
+        extra_args = [],
+        extra_deps = [],
+        generate_hashes = True,
+        py_binary = _py_binary,
+        py_test = _py_test,
+        requirements_in = None,
+        requirements_txt = None,
+        requirements_darwin = None,
+        requirements_linux = None,
+        requirements_windows = None,
+        visibility = ["//visibility:private"],
+        tags = None,
+        **kwargs):
+    """Generates targets for managing pip dependencies with pip-compile.
+
+    By default this rules generates a filegroup named "[name]" which can be included in the data
+    of some other compile_pip_requirements rule that references these requirements
+    (e.g. with `-r ../other/requirements.txt`).
+
+    It also generates two targets for running pip-compile:
+
+    - validate with `bazel test [name]_test`
+    - update with   `bazel run [name].update`
+
+    If you are using a version control system, the requirements.txt generated by this rule should
+    be checked into it to ensure that all developers/users have the same dependency versions.
+
+    Args:
+        name: base name for generated targets, typically "requirements".
+        src: file containing inputs to dependency resolution. If not specified,
+            defaults to `pyproject.toml`. Supported formats are:
+            * a requirements text file, usually named `requirements.in`
+            * A `.toml` file, where the `project.dependencies` list is used as per
+              [PEP621](https://peps.python.org/pep-0621/).
+        extra_args: passed to pip-compile.
+        extra_deps: extra dependencies passed to pip-compile.
+        generate_hashes: whether to put hashes in the requirements_txt file.
+        py_binary: the py_binary rule to be used.
+        py_test: the py_test rule to be used.
+        requirements_in: file expressing desired dependencies. Deprecated, use src instead.
+        requirements_txt: result of "compiling" the requirements.in file.
+        requirements_linux: File of linux specific resolve output to check validate if requirement.in has changes.
+        requirements_darwin: File of darwin specific resolve output to check validate if requirement.in has changes.
+        requirements_windows: File of windows specific resolve output to check validate if requirement.in has changes.
+        tags: tagging attribute common to all build rules, passed to both the _test and .update rules.
+        visibility: passed to both the _test and .update rules.
+        **kwargs: other bazel attributes passed to the "_test" rule.
+    """
+    if requirements_in and src:
+        fail("Only one of 'src' and 'requirements_in' attributes can be used")
+    else:
+        src = requirements_in or src or "pyproject.toml"
+
+    requirements_txt = name + ".txt" if requirements_txt == None else requirements_txt
+
+    # "Default" target produced by this macro
+    # Allow a compile_pip_requirements rule to include another one in the data
+    # for a requirements file that does `-r ../other/requirements.txt`
+    native.filegroup(
+        name = name,
+        srcs = kwargs.pop("data", []) + [requirements_txt],
+        visibility = visibility,
+    )
+
+    data = [name, requirements_txt, src] + [f for f in (requirements_linux, requirements_darwin, requirements_windows) if f != None]
+
+    # Use the Label constructor so this is expanded in the context of the file
+    # where it appears, which is to say, in @rules_python
+    pip_compile = Label("//python/private/pypi/dependency_resolver:dependency_resolver.py")
+
+    loc = "$(rlocationpath {})"
+
+    args = [
+        loc.format(src),
+        loc.format(requirements_txt),
+        "//%s:%s.update" % (native.package_name(), name),
+        "--resolver=backtracking",
+        "--allow-unsafe",
+    ]
+    if generate_hashes:
+        args.append("--generate-hashes")
+    if requirements_linux:
+        args.append("--requirements-linux={}".format(loc.format(requirements_linux)))
+    if requirements_darwin:
+        args.append("--requirements-darwin={}".format(loc.format(requirements_darwin)))
+    if requirements_windows:
+        args.append("--requirements-windows={}".format(loc.format(requirements_windows)))
+    args.extend(extra_args)
+
+    deps = [
+        requirement("build"),
+        requirement("click"),
+        requirement("colorama"),
+        requirement("importlib_metadata"),
+        requirement("more_itertools"),
+        requirement("packaging"),
+        requirement("pep517"),
+        requirement("pip"),
+        requirement("pip_tools"),
+        requirement("pyproject_hooks"),
+        requirement("setuptools"),
+        requirement("tomli"),
+        requirement("zipp"),
+        Label("//python/runfiles:runfiles"),
+    ] + extra_deps
+
+    tags = tags or []
+    tags.append("requires-network")
+    tags.append("no-remote-exec")
+    tags.append("no-sandbox")
+    attrs = {
+        "args": args,
+        "data": data,
+        "deps": deps,
+        "main": pip_compile,
+        "srcs": [pip_compile],
+        "tags": tags,
+        "visibility": visibility,
+    }
+
+    # cheap way to detect the bazel version
+    _bazel_version_4_or_greater = "propeller_optimize" in dir(native)
+
+    # Bazel 4.0 added the "env" attribute to py_test/py_binary
+    if _bazel_version_4_or_greater:
+        attrs["env"] = kwargs.pop("env", {})
+
+    py_binary(
+        name = name + ".update",
+        **attrs
+    )
+
+    timeout = kwargs.pop("timeout", "short")
+
+    py_test(
+        name = name + "_test",
+        timeout = timeout,
+        # kwargs could contain test-specific attributes like size or timeout
+        **dict(attrs, **kwargs)
+    )
diff --git a/python/private/pypi/requirements.txt b/python/private/pypi/requirements.txt
new file mode 100755
index 0000000..006ef21
--- /dev/null
+++ b/python/private/pypi/requirements.txt
@@ -0,0 +1,14 @@
+build
+click
+colorama
+importlib_metadata
+installer
+more_itertools
+packaging
+pep517
+pip
+pip_tools >= 7.4.0
+setuptools
+tomli
+wheel
+zipp
diff --git a/python/private/pypi/whl_installer/BUILD.bazel b/python/private/pypi/whl_installer/BUILD.bazel
new file mode 100644
index 0000000..58231ce
--- /dev/null
+++ b/python/private/pypi/whl_installer/BUILD.bazel
@@ -0,0 +1,36 @@
+load("//python:defs.bzl", "py_binary", "py_library")
+load("//python/private/pypi:deps.bzl", "requirement")
+
+py_library(
+    name = "lib",
+    srcs = [
+        "arguments.py",
+        "namespace_pkgs.py",
+        "wheel.py",
+        "wheel_installer.py",
+    ],
+    visibility = [
+        "//tests:__subpackages__",
+        "//third_party/rules_pycross/pycross/private:__subpackages__",
+    ],
+    deps = [
+        requirement("installer"),
+        requirement("pip"),
+        requirement("packaging"),
+        requirement("setuptools"),
+    ],
+)
+
+py_binary(
+    name = "wheel_installer",
+    srcs = [
+        "wheel_installer.py",
+    ],
+    deps = [":lib"],
+)
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["*"]),
+    visibility = ["//python/private/pypi:__subpackages__"],
+)
diff --git a/python/private/pypi/whl_installer/arguments.py b/python/private/pypi/whl_installer/arguments.py
new file mode 100644
index 0000000..173d3a3
--- /dev/null
+++ b/python/private/pypi/whl_installer/arguments.py
@@ -0,0 +1,106 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import json
+import pathlib
+from typing import Any, Dict, Set
+
+from python.private.pypi.whl_installer import wheel
+
+
+def parser(**kwargs: Any) -> argparse.ArgumentParser:
+    """Create a parser for the wheel_installer tool."""
+    parser = argparse.ArgumentParser(
+        **kwargs,
+    )
+    parser.add_argument(
+        "--requirement",
+        action="store",
+        required=True,
+        help="A single PEP508 requirement specifier string.",
+    )
+    parser.add_argument(
+        "--isolated",
+        action="store_true",
+        help="Whether or not to include the `--isolated` pip flag.",
+    )
+    parser.add_argument(
+        "--extra_pip_args",
+        action="store",
+        help="Extra arguments to pass down to pip.",
+    )
+    parser.add_argument(
+        "--platform",
+        action="extend",
+        type=wheel.Platform.from_string,
+        help="Platforms to target dependencies. Can be used multiple times.",
+    )
+    parser.add_argument(
+        "--pip_data_exclude",
+        action="store",
+        help="Additional data exclusion parameters to add to the pip packages BUILD file.",
+    )
+    parser.add_argument(
+        "--enable_implicit_namespace_pkgs",
+        action="store_true",
+        help="Disables conversion of implicit namespace packages into pkg-util style packages.",
+    )
+    parser.add_argument(
+        "--environment",
+        action="store",
+        help="Extra environment variables to set on the pip environment.",
+    )
+    parser.add_argument(
+        "--download_only",
+        action="store_true",
+        help="Use 'pip download' instead of 'pip wheel'. Disables building wheels from source, but allows use of "
+        "--platform, --python-version, --implementation, and --abi in --extra_pip_args.",
+    )
+    parser.add_argument(
+        "--whl-file",
+        type=pathlib.Path,
+        help="Extract a whl file to be used within Bazel.",
+    )
+    return parser
+
+
+def deserialize_structured_args(args: Dict[str, str]) -> Dict:
+    """Deserialize structured arguments passed from the starlark rules.
+
+    Args:
+        args: dict of parsed command line arguments
+    """
+    structured_args = ("extra_pip_args", "pip_data_exclude", "environment")
+    for arg_name in structured_args:
+        if args.get(arg_name) is not None:
+            args[arg_name] = json.loads(args[arg_name])["arg"]
+        else:
+            args[arg_name] = []
+    return args
+
+
+def get_platforms(args: argparse.Namespace) -> Set:
+    """Aggregate platforms into a single set.
+
+    Args:
+        args: dict of parsed command line arguments
+    """
+    platforms = set()
+    if args.platform is None:
+        return platforms
+
+    platforms.update(args.platform)
+
+    return platforms
diff --git a/python/private/pypi/whl_installer/namespace_pkgs.py b/python/private/pypi/whl_installer/namespace_pkgs.py
new file mode 100644
index 0000000..7d23c0e
--- /dev/null
+++ b/python/private/pypi/whl_installer/namespace_pkgs.py
@@ -0,0 +1,121 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility functions to discover python package types"""
+import os
+import textwrap
+from pathlib import Path  # supported in >= 3.4
+from typing import List, Optional, Set
+
+
+def implicit_namespace_packages(
+    directory: str, ignored_dirnames: Optional[List[str]] = None
+) -> Set[Path]:
+    """Discovers namespace packages implemented using the 'native namespace packages' method.
+
+    AKA 'implicit namespace packages', which has been supported since Python 3.3.
+    See: https://packaging.python.org/guides/packaging-namespace-packages/#native-namespace-packages
+
+    Args:
+        directory: The root directory to recursively find packages in.
+        ignored_dirnames: A list of directories to exclude from the search
+
+    Returns:
+        The set of directories found under root to be packages using the native namespace method.
+    """
+    namespace_pkg_dirs: Set[Path] = set()
+    standard_pkg_dirs: Set[Path] = set()
+    directory_path = Path(directory)
+    ignored_dirname_paths: List[Path] = [Path(p) for p in ignored_dirnames or ()]
+    # Traverse bottom-up because a directory can be a namespace pkg because its child contains module files.
+    for dirpath, dirnames, filenames in map(
+        lambda t: (Path(t[0]), *t[1:]), os.walk(directory_path, topdown=False)
+    ):
+        if "__init__.py" in filenames:
+            standard_pkg_dirs.add(dirpath)
+            continue
+        elif ignored_dirname_paths:
+            is_ignored_dir = dirpath in ignored_dirname_paths
+            child_of_ignored_dir = any(
+                d in dirpath.parents for d in ignored_dirname_paths
+            )
+            if is_ignored_dir or child_of_ignored_dir:
+                continue
+
+        dir_includes_py_modules = _includes_python_modules(filenames)
+        parent_of_namespace_pkg = any(
+            Path(dirpath, d) in namespace_pkg_dirs for d in dirnames
+        )
+        parent_of_standard_pkg = any(
+            Path(dirpath, d) in standard_pkg_dirs for d in dirnames
+        )
+        parent_of_pkg = parent_of_namespace_pkg or parent_of_standard_pkg
+        if (
+            (dir_includes_py_modules or parent_of_pkg)
+            and
+            # The root of the directory should never be an implicit namespace
+            dirpath != directory_path
+        ):
+            namespace_pkg_dirs.add(dirpath)
+    return namespace_pkg_dirs
+
+
+def add_pkgutil_style_namespace_pkg_init(dir_path: Path) -> None:
+    """Adds 'pkgutil-style namespace packages' init file to the given directory
+
+    See: https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages
+
+    Args:
+        dir_path: The directory to create an __init__.py for.
+
+    Raises:
+        ValueError: If the directory already contains an __init__.py file
+    """
+    ns_pkg_init_filepath = os.path.join(dir_path, "__init__.py")
+
+    if os.path.isfile(ns_pkg_init_filepath):
+        raise ValueError("%s already contains an __init__.py file." % dir_path)
+
+    with open(ns_pkg_init_filepath, "w") as ns_pkg_init_f:
+        # See https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages
+        ns_pkg_init_f.write(
+            textwrap.dedent(
+                """\
+                # __path__ manipulation added by bazelbuild/rules_python to support namespace pkgs.
+                __path__ = __import__('pkgutil').extend_path(__path__, __name__)
+                """
+            )
+        )
+
+
+def _includes_python_modules(files: List[str]) -> bool:
+    """
+    In order to only transform directories that Python actually considers namespace pkgs
+    we need to detect if a directory includes Python modules.
+
+    Which files are loadable as modules is extension based, and the particular set of extensions
+    varies by platform.
+
+    See:
+    1. https://github.com/python/cpython/blob/7d9d25dbedfffce61fc76bc7ccbfa9ae901bf56f/Lib/importlib/machinery.py#L19
+    2. PEP 420 -- Implicit Namespace Packages, Specification - https://www.python.org/dev/peps/pep-0420/#specification
+    3. dynload_shlib.c and dynload_win.c in python/cpython.
+    """
+    module_suffixes = {
+        ".py",  # Source modules
+        ".pyc",  # Compiled bytecode modules
+        ".so",  # Unix extension modules
+        ".pyd",  # https://docs.python.org/3/faq/windows.html#is-a-pyd-file-the-same-as-a-dll
+    }
+    return any(Path(f).suffix in module_suffixes for f in files)
diff --git a/python/private/pypi/whl_installer/wheel.py b/python/private/pypi/whl_installer/wheel.py
new file mode 100644
index 0000000..3d6780d
--- /dev/null
+++ b/python/private/pypi/whl_installer/wheel.py
@@ -0,0 +1,657 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility class to inspect an extracted wheel directory"""
+
+import email
+import platform
+import re
+import sys
+from collections import defaultdict
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+from typing import Any, Dict, Iterator, List, Optional, Set, Tuple, Union
+
+import installer
+from packaging.requirements import Requirement
+from pip._vendor.packaging.utils import canonicalize_name
+
+
+class OS(Enum):
+    linux = 1
+    osx = 2
+    windows = 3
+    darwin = osx
+    win32 = windows
+
+    @classmethod
+    def interpreter(cls) -> "OS":
+        "Return the interpreter operating system."
+        return cls[sys.platform.lower()]
+
+    def __str__(self) -> str:
+        return self.name.lower()
+
+
+class Arch(Enum):
+    x86_64 = 1
+    x86_32 = 2
+    aarch64 = 3
+    ppc = 4
+    s390x = 5
+    arm = 6
+    amd64 = x86_64
+    arm64 = aarch64
+    i386 = x86_32
+    i686 = x86_32
+    x86 = x86_32
+    ppc64le = ppc
+
+    @classmethod
+    def interpreter(cls) -> "Arch":
+        "Return the currently running interpreter architecture."
+        # FIXME @aignas 2023-12-13: Hermetic toolchain on Windows 3.11.6
+        # is returning an empty string here, so lets default to x86_64
+        return cls[platform.machine().lower() or "x86_64"]
+
+    def __str__(self) -> str:
+        return self.name.lower()
+
+
+def _as_int(value: Optional[Union[OS, Arch]]) -> int:
+    """Convert one of the enums above to an int for easier sorting algorithms.
+
+    Args:
+        value: The value of an enum or None.
+
+    Returns:
+        -1 if we get None, otherwise, the numeric value of the given enum.
+    """
+    if value is None:
+        return -1
+
+    return int(value.value)
+
+
+def host_interpreter_minor_version() -> int:
+    return sys.version_info.minor
+
+
+@dataclass(frozen=True)
+class Platform:
+    os: Optional[OS] = None
+    arch: Optional[Arch] = None
+    minor_version: Optional[int] = None
+
+    @classmethod
+    def all(
+        cls,
+        want_os: Optional[OS] = None,
+        minor_version: Optional[int] = None,
+    ) -> List["Platform"]:
+        return sorted(
+            [
+                cls(os=os, arch=arch, minor_version=minor_version)
+                for os in OS
+                for arch in Arch
+                if not want_os or want_os == os
+            ]
+        )
+
+    @classmethod
+    def host(cls) -> List["Platform"]:
+        """Use the Python interpreter to detect the platform.
+
+        We extract `os` from sys.platform and `arch` from platform.machine
+
+        Returns:
+            A list of parsed values which makes the signature the same as
+            `Platform.all` and `Platform.from_string`.
+        """
+        return [
+            Platform(
+                os=OS.interpreter(),
+                arch=Arch.interpreter(),
+                minor_version=host_interpreter_minor_version(),
+            )
+        ]
+
+    def all_specializations(self) -> Iterator["Platform"]:
+        """Return the platform itself and all its unambiguous specializations.
+
+        For more info about specializations see
+        https://bazel.build/docs/configurable-attributes
+        """
+        yield self
+        if self.arch is None:
+            for arch in Arch:
+                yield Platform(os=self.os, arch=arch, minor_version=self.minor_version)
+        if self.os is None:
+            for os in OS:
+                yield Platform(os=os, arch=self.arch, minor_version=self.minor_version)
+        if self.arch is None and self.os is None:
+            for os in OS:
+                for arch in Arch:
+                    yield Platform(os=os, arch=arch, minor_version=self.minor_version)
+
+    def __lt__(self, other: Any) -> bool:
+        """Add a comparison method, so that `sorted` returns the most specialized platforms first."""
+        if not isinstance(other, Platform) or other is None:
+            raise ValueError(f"cannot compare {other} with Platform")
+
+        self_arch, self_os = _as_int(self.arch), _as_int(self.os)
+        other_arch, other_os = _as_int(other.arch), _as_int(other.os)
+
+        if self_os == other_os:
+            return self_arch < other_arch
+        else:
+            return self_os < other_os
+
+    def __str__(self) -> str:
+        if self.minor_version is None:
+            if self.os is None and self.arch is None:
+                return "//conditions:default"
+
+            if self.arch is None:
+                return f"@platforms//os:{self.os}"
+            else:
+                return f"{self.os}_{self.arch}"
+
+        if self.arch is None and self.os is None:
+            return f"@//python/config_settings:is_python_3.{self.minor_version}"
+
+        if self.arch is None:
+            return f"cp3{self.minor_version}_{self.os}_anyarch"
+
+        if self.os is None:
+            return f"cp3{self.minor_version}_anyos_{self.arch}"
+
+        return f"cp3{self.minor_version}_{self.os}_{self.arch}"
+
+    @classmethod
+    def from_string(cls, platform: Union[str, List[str]]) -> List["Platform"]:
+        """Parse a string and return a list of platforms"""
+        platform = [platform] if isinstance(platform, str) else list(platform)
+        ret = set()
+        for p in platform:
+            if p == "host":
+                ret.update(cls.host())
+                continue
+
+            abi, _, tail = p.partition("_")
+            if not abi.startswith("cp"):
+                # The first item is not an abi
+                tail = p
+                abi = ""
+            os, _, arch = tail.partition("_")
+            arch = arch or "*"
+
+            minor_version = int(abi[len("cp3") :]) if abi else None
+
+            if arch != "*":
+                ret.add(
+                    cls(
+                        os=OS[os] if os != "*" else None,
+                        arch=Arch[arch],
+                        minor_version=minor_version,
+                    )
+                )
+
+            else:
+                ret.update(
+                    cls.all(
+                        want_os=OS[os] if os != "*" else None,
+                        minor_version=minor_version,
+                    )
+                )
+
+        return sorted(ret)
+
+    # NOTE @aignas 2023-12-05: below is the minimum number of accessors that are defined in
+    # https://peps.python.org/pep-0496/ to make rules_python generate dependencies.
+    #
+    # WARNING: It may not work in cases where the python implementation is different between
+    # different platforms.
+
+    # derived from OS
+    @property
+    def os_name(self) -> str:
+        if self.os == OS.linux or self.os == OS.osx:
+            return "posix"
+        elif self.os == OS.windows:
+            return "nt"
+        else:
+            return ""
+
+    @property
+    def sys_platform(self) -> str:
+        if self.os == OS.linux:
+            return "linux"
+        elif self.os == OS.osx:
+            return "darwin"
+        elif self.os == OS.windows:
+            return "win32"
+        else:
+            return ""
+
+    @property
+    def platform_system(self) -> str:
+        if self.os == OS.linux:
+            return "Linux"
+        elif self.os == OS.osx:
+            return "Darwin"
+        elif self.os == OS.windows:
+            return "Windows"
+        else:
+            return ""
+
+    # derived from OS and Arch
+    @property
+    def platform_machine(self) -> str:
+        """Guess the target 'platform_machine' marker.
+
+        NOTE @aignas 2023-12-05: this may not work on really new systems, like
+        Windows if they define the platform markers in a different way.
+        """
+        if self.arch == Arch.x86_64:
+            return "x86_64"
+        elif self.arch == Arch.x86_32 and self.os != OS.osx:
+            return "i386"
+        elif self.arch == Arch.x86_32:
+            return ""
+        elif self.arch == Arch.aarch64 and self.os == OS.linux:
+            return "aarch64"
+        elif self.arch == Arch.aarch64:
+            # Assuming that OSX and Windows use this one since the precedent is set here:
+            # https://github.com/cgohlke/win_arm64-wheels
+            return "arm64"
+        elif self.os != OS.linux:
+            return ""
+        elif self.arch == Arch.ppc64le:
+            return "ppc64le"
+        elif self.arch == Arch.s390x:
+            return "s390x"
+        else:
+            return ""
+
+    def env_markers(self, extra: str) -> Dict[str, str]:
+        # If it is None, use the host version
+        minor_version = self.minor_version or host_interpreter_minor_version()
+
+        return {
+            "extra": extra,
+            "os_name": self.os_name,
+            "sys_platform": self.sys_platform,
+            "platform_machine": self.platform_machine,
+            "platform_system": self.platform_system,
+            "platform_release": "",  # unset
+            "platform_version": "",  # unset
+            "python_version": f"3.{minor_version}",
+            # FIXME @aignas 2024-01-14: is putting zero last a good idea? Maybe we should
+            # use `20` or something else to avoid having weird issues where the full version is used for
+            # matching and the author decides to only support 3.y.5 upwards.
+            "implementation_version": f"3.{minor_version}.0",
+            "python_full_version": f"3.{minor_version}.0",
+            # we assume that the following are the same as the interpreter used to setup the deps:
+            # "implementation_name": "cpython"
+            # "platform_python_implementation: "CPython",
+        }
+
+
+@dataclass(frozen=True)
+class FrozenDeps:
+    deps: List[str]
+    deps_select: Dict[str, List[str]]
+
+
+class Deps:
+    """Deps is a dependency builder that has a build() method to return FrozenDeps."""
+
+    def __init__(
+        self,
+        name: str,
+        requires_dist: List[str],
+        *,
+        extras: Optional[Set[str]] = None,
+        platforms: Optional[Set[Platform]] = None,
+    ):
+        """Create a new instance and parse the requires_dist
+
+        Args:
+            name (str): The name of the whl distribution
+            requires_dist (list[Str]): The Requires-Dist from the METADATA of the whl
+                distribution.
+            extras (set[str], optional): The list of requested extras, defaults to None.
+            platforms (set[Platform], optional): The list of target platforms, defaults to
+                None. If the list of platforms has multiple `minor_version` values, it
+                will change the code to generate the select statements using
+                `@rules_python//python/config_settings:is_python_3.y` conditions.
+        """
+        self.name: str = Deps._normalize(name)
+        self._platforms: Set[Platform] = platforms or set()
+        self._target_versions = {p.minor_version for p in platforms or {}}
+        self._add_version_select = platforms and len(self._target_versions) > 2
+        if None in self._target_versions and len(self._target_versions) > 2:
+            raise ValueError(
+                f"all python versions need to be specified explicitly, got: {platforms}"
+            )
+
+        # Sort so that the dictionary order in the FrozenDeps is deterministic
+        # without the final sort because Python retains insertion order. That way
+        # the sorting by platform is limited within the Platform class itself and
+        # the unit-tests for the Deps can be simpler.
+        reqs = sorted(
+            (Requirement(wheel_req) for wheel_req in requires_dist),
+            key=lambda x: f"{x.name}:{sorted(x.extras)}",
+        )
+
+        want_extras = self._resolve_extras(reqs, extras)
+
+        # Then add all of the requirements in order
+        self._deps: Set[str] = set()
+        self._select: Dict[Platform, Set[str]] = defaultdict(set)
+        for req in reqs:
+            self._add_req(req, want_extras)
+
+    def _add(self, dep: str, platform: Optional[Platform]):
+        dep = Deps._normalize(dep)
+
+        # Self-edges are processed in _resolve_extras
+        if dep == self.name:
+            return
+
+        if not platform:
+            self._deps.add(dep)
+
+            # If the dep is in the platform-specific list, remove it from the select.
+            pop_keys = []
+            for p, deps in self._select.items():
+                if dep not in deps:
+                    continue
+
+                deps.remove(dep)
+                if not deps:
+                    pop_keys.append(p)
+
+            for p in pop_keys:
+                self._select.pop(p)
+            return
+
+        if dep in self._deps:
+            # If the dep is already in the main dependency list, no need to add it in the
+            # platform-specific dependency list.
+            return
+
+        # Add the platform-specific dep
+        self._select[platform].add(dep)
+
+        # Add the dep to specializations of the given platform if they
+        # exist in the select statement.
+        for p in platform.all_specializations():
+            if p not in self._select:
+                continue
+
+            self._select[p].add(dep)
+
+        if len(self._select[platform]) == 1:
+            # We are adding a new item to the select and we need to ensure that
+            # existing dependencies from less specialized platforms are propagated
+            # to the newly added dependency set.
+            for p, deps in self._select.items():
+                # Check if the existing platform overlaps with the given platform
+                if p == platform or platform not in p.all_specializations():
+                    continue
+
+                self._select[platform].update(self._select[p])
+
+    def _maybe_add_common_dep(self, dep):
+        if len(self._target_versions) < 2:
+            return
+
+        platforms = [Platform()] + [
+            Platform(minor_version=v) for v in self._target_versions
+        ]
+
+        # If the dep is targeting all target python versions, lets add it to
+        # the common dependency list to simplify the select statements.
+        for p in platforms:
+            if p not in self._select:
+                return
+
+            if dep not in self._select[p]:
+                return
+
+        # All of the python version-specific branches have the dep, so lets add
+        # it to the common deps.
+        self._deps.add(dep)
+        for p in platforms:
+            self._select[p].remove(dep)
+            if not self._select[p]:
+                self._select.pop(p)
+
+    @staticmethod
+    def _normalize(name: str) -> str:
+        return re.sub(r"[-_.]+", "_", name).lower()
+
+    def _resolve_extras(
+        self, reqs: List[Requirement], extras: Optional[Set[str]]
+    ) -> Set[str]:
+        """Resolve extras which are due to depending on self[some_other_extra].
+
+        Some packages may have cyclic dependencies resulting from extras being used, one example is
+        `etils`, where we have one set of extras as aliases for other extras
+        and we have an extra called 'all' that includes all other extras.
+
+        Example: github.com/google/etils/blob/a0b71032095db14acf6b33516bca6d885fe09e35/pyproject.toml#L32.
+
+        When the `requirements.txt` is generated by `pip-tools`, then it is likely that
+        this step is not needed, but for other `requirements.txt` files this may be useful.
+
+        NOTE @aignas 2023-12-08: the extra resolution is not platform dependent,
+        but in order for it to become platform dependent we would have to have
+        separate targets for each extra in extras.
+        """
+
+        # Resolve any extra extras due to self-edges, empty string means no
+        # extras The empty string in the set is just a way to make the handling
+        # of no extras and a single extra easier and having a set of {"", "foo"}
+        # is equivalent to having {"foo"}.
+        extras = extras or {""}
+
+        self_reqs = []
+        for req in reqs:
+            if Deps._normalize(req.name) != self.name:
+                continue
+
+            if req.marker is None:
+                # I am pretty sure we cannot reach this code as it does not
+                # make sense to specify packages in this way, but since it is
+                # easy to handle, lets do it.
+                #
+                # TODO @aignas 2023-12-08: add a test
+                extras = extras | req.extras
+            else:
+                # process these in a separate loop
+                self_reqs.append(req)
+
+        # A double loop is not strictly optimal, but always correct without recursion
+        for req in self_reqs:
+            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
+                extras = extras | req.extras
+            else:
+                continue
+
+            # Iterate through all packages to ensure that we include all of the extras from previously
+            # visited packages.
+            for req_ in self_reqs:
+                if any(req_.marker.evaluate({"extra": extra}) for extra in extras):
+                    extras = extras | req_.extras
+
+        return extras
+
+    def _add_req(self, req: Requirement, extras: Set[str]) -> None:
+        if req.marker is None:
+            self._add(req.name, None)
+            return
+
+        marker_str = str(req.marker)
+
+        if not self._platforms:
+            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
+                self._add(req.name, None)
+            return
+
+        # NOTE @aignas 2023-12-08: in order to have reasonable select statements
+        # we do have to have some parsing of the markers, so it begs the question
+        # if packaging should be reimplemented in Starlark to have the best solution
+        # for now we will implement it in Python and see what the best parsing result
+        # can be before making this decision.
+        match_os = any(
+            tag in marker_str
+            for tag in [
+                "os_name",
+                "sys_platform",
+                "platform_system",
+            ]
+        )
+        match_arch = "platform_machine" in marker_str
+        match_version = "version" in marker_str
+
+        if not (match_os or match_arch or match_version):
+            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
+                self._add(req.name, None)
+            return
+
+        for plat in self._platforms:
+            if not any(
+                req.marker.evaluate(plat.env_markers(extra)) for extra in extras
+            ):
+                continue
+
+            if match_arch and self._add_version_select:
+                self._add(req.name, plat)
+                if plat.minor_version == host_interpreter_minor_version():
+                    self._add(req.name, Platform(plat.os, plat.arch))
+            elif match_arch:
+                self._add(req.name, plat)
+            elif match_os and self._add_version_select:
+                self._add(req.name, Platform(plat.os, minor_version=plat.minor_version))
+                if plat.minor_version == host_interpreter_minor_version():
+                    self._add(req.name, Platform(plat.os))
+            elif match_os:
+                self._add(req.name, Platform(plat.os))
+            elif match_version and self._add_version_select:
+                self._add(req.name, Platform(minor_version=plat.minor_version))
+                if plat.minor_version == host_interpreter_minor_version():
+                    self._add(req.name, Platform())
+            elif match_version:
+                self._add(req.name, None)
+
+        # Merge to common if possible after processing all platforms
+        self._maybe_add_common_dep(req.name)
+
+    def build(self) -> FrozenDeps:
+        return FrozenDeps(
+            deps=sorted(self._deps),
+            deps_select={str(p): sorted(deps) for p, deps in self._select.items()},
+        )
+
+
+class Wheel:
+    """Representation of the compressed .whl file"""
+
+    def __init__(self, path: Path):
+        self._path = path
+
+    @property
+    def path(self) -> Path:
+        return self._path
+
+    @property
+    def name(self) -> str:
+        # TODO Also available as installer.sources.WheelSource.distribution
+        name = str(self.metadata["Name"])
+        return canonicalize_name(name)
+
+    @property
+    def metadata(self) -> email.message.Message:
+        with installer.sources.WheelFile.open(self.path) as wheel_source:
+            metadata_contents = wheel_source.read_dist_info("METADATA")
+            metadata = installer.utils.parse_metadata_file(metadata_contents)
+        return metadata
+
+    @property
+    def version(self) -> str:
+        # TODO Also available as installer.sources.WheelSource.version
+        return str(self.metadata["Version"])
+
+    def entry_points(self) -> Dict[str, Tuple[str, str]]:
+        """Returns the entrypoints defined in the current wheel
+
+        See https://packaging.python.org/specifications/entry-points/ for more info
+
+        Returns:
+            Dict[str, Tuple[str, str]]: A mapping of the entry point's name to it's module and attribute
+        """
+        with installer.sources.WheelFile.open(self.path) as wheel_source:
+            if "entry_points.txt" not in wheel_source.dist_info_filenames:
+                return dict()
+
+            entry_points_mapping = dict()
+            entry_points_contents = wheel_source.read_dist_info("entry_points.txt")
+            entry_points = installer.utils.parse_entrypoints(entry_points_contents)
+            for script, module, attribute, script_section in entry_points:
+                if script_section == "console":
+                    entry_points_mapping[script] = (module, attribute)
+
+            return entry_points_mapping
+
+    def dependencies(
+        self,
+        extras_requested: Set[str] = None,
+        platforms: Optional[Set[Platform]] = None,
+    ) -> FrozenDeps:
+        return Deps(
+            self.name,
+            extras=extras_requested,
+            platforms=platforms,
+            requires_dist=self.metadata.get_all("Requires-Dist", []),
+        ).build()
+
+    def unzip(self, directory: str) -> None:
+        installation_schemes = {
+            "purelib": "/site-packages",
+            "platlib": "/site-packages",
+            "headers": "/include",
+            "scripts": "/bin",
+            "data": "/data",
+        }
+        destination = installer.destinations.SchemeDictionaryDestination(
+            installation_schemes,
+            # TODO Should entry_point scripts also be handled by installer rather than custom code?
+            interpreter="/dev/null",
+            script_kind="posix",
+            destdir=directory,
+            bytecode_optimization_levels=[],
+        )
+
+        with installer.sources.WheelFile.open(self.path) as wheel_source:
+            installer.install(
+                source=wheel_source,
+                destination=destination,
+                additional_metadata={
+                    "INSTALLER": b"https://github.com/bazelbuild/rules_python",
+                },
+            )
diff --git a/python/private/pypi/whl_installer/wheel_installer.py b/python/private/pypi/whl_installer/wheel_installer.py
new file mode 100644
index 0000000..ef8181c
--- /dev/null
+++ b/python/private/pypi/whl_installer/wheel_installer.py
@@ -0,0 +1,205 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Build and/or fetch a single wheel based on the requirement passed in"""
+
+import errno
+import glob
+import json
+import os
+import re
+import subprocess
+import sys
+from pathlib import Path
+from tempfile import NamedTemporaryFile
+from typing import Dict, List, Optional, Set, Tuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from python.private.pypi.whl_installer import arguments, namespace_pkgs, wheel
+
+
+def _configure_reproducible_wheels() -> None:
+    """Modifies the environment to make wheel building reproducible.
+    Wheels created from sdists are not reproducible by default. We can however workaround this by
+    patching in some configuration with environment variables.
+    """
+
+    # wheel, by default, enables debug symbols in GCC. This incidentally captures the build path in the .so file
+    # We can override this behavior by disabling debug symbols entirely.
+    # https://github.com/pypa/pip/issues/6505
+    if "CFLAGS" in os.environ:
+        os.environ["CFLAGS"] += " -g0"
+    else:
+        os.environ["CFLAGS"] = "-g0"
+
+    # set SOURCE_DATE_EPOCH to 1980 so that we can use python wheels
+    # https://github.com/NixOS/nixpkgs/blob/master/doc/languages-frameworks/python.section.md#python-setuppy-bdist_wheel-cannot-create-whl
+    if "SOURCE_DATE_EPOCH" not in os.environ:
+        os.environ["SOURCE_DATE_EPOCH"] = "315532800"
+
+    # Python wheel metadata files can be unstable.
+    # See https://bitbucket.org/pypa/wheel/pull-requests/74/make-the-output-of-metadata-files/diff
+    if "PYTHONHASHSEED" not in os.environ:
+        os.environ["PYTHONHASHSEED"] = "0"
+
+
+def _parse_requirement_for_extra(
+    requirement: str,
+) -> Tuple[Optional[str], Optional[Set[str]]]:
+    """Given a requirement string, returns the requirement name and set of extras, if extras specified.
+    Else, returns (None, None)
+    """
+
+    # https://www.python.org/dev/peps/pep-0508/#grammar
+    extras_pattern = re.compile(
+        r"^\s*([0-9A-Za-z][0-9A-Za-z_.\-]*)\s*\[\s*([0-9A-Za-z][0-9A-Za-z_.\-]*(?:\s*,\s*[0-9A-Za-z][0-9A-Za-z_.\-]*)*)\s*\]"
+    )
+
+    matches = extras_pattern.match(requirement)
+    if matches:
+        return (
+            canonicalize_name(matches.group(1)),
+            {extra.strip() for extra in matches.group(2).split(",")},
+        )
+
+    return None, None
+
+
+def _setup_namespace_pkg_compatibility(wheel_dir: str) -> None:
+    """Converts native namespace packages to pkgutil-style packages
+
+    Namespace packages can be created in one of three ways. They are detailed here:
+    https://packaging.python.org/guides/packaging-namespace-packages/#creating-a-namespace-package
+
+    'pkgutil-style namespace packages' (2) and 'pkg_resources-style namespace packages' (3) works in Bazel, but
+    'native namespace packages' (1) do not.
+
+    We ensure compatibility with Bazel of method 1 by converting them into method 2.
+
+    Args:
+        wheel_dir: the directory of the wheel to convert
+    """
+
+    namespace_pkg_dirs = namespace_pkgs.implicit_namespace_packages(
+        wheel_dir,
+        ignored_dirnames=["%s/bin" % wheel_dir],
+    )
+
+    for ns_pkg_dir in namespace_pkg_dirs:
+        namespace_pkgs.add_pkgutil_style_namespace_pkg_init(ns_pkg_dir)
+
+
+def _extract_wheel(
+    wheel_file: str,
+    extras: Dict[str, Set[str]],
+    enable_implicit_namespace_pkgs: bool,
+    platforms: List[wheel.Platform],
+    installation_dir: Path = Path("."),
+) -> None:
+    """Extracts wheel into given directory and creates py_library and filegroup targets.
+
+    Args:
+        wheel_file: the filepath of the .whl
+        installation_dir: the destination directory for installation of the wheel.
+        extras: a list of extras to add as dependencies for the installed wheel
+        enable_implicit_namespace_pkgs: if true, disables conversion of implicit namespace packages and will unzip as-is
+    """
+
+    whl = wheel.Wheel(wheel_file)
+    whl.unzip(installation_dir)
+
+    if not enable_implicit_namespace_pkgs:
+        _setup_namespace_pkg_compatibility(installation_dir)
+
+    extras_requested = extras[whl.name] if whl.name in extras else set()
+
+    dependencies = whl.dependencies(extras_requested, platforms)
+
+    with open(os.path.join(installation_dir, "metadata.json"), "w") as f:
+        metadata = {
+            "name": whl.name,
+            "version": whl.version,
+            "deps": dependencies.deps,
+            "deps_by_platform": dependencies.deps_select,
+            "entry_points": [
+                {
+                    "name": name,
+                    "module": module,
+                    "attribute": attribute,
+                }
+                for name, (module, attribute) in sorted(whl.entry_points().items())
+            ],
+        }
+        json.dump(metadata, f)
+
+
+def main() -> None:
+    args = arguments.parser(description=__doc__).parse_args()
+    deserialized_args = dict(vars(args))
+    arguments.deserialize_structured_args(deserialized_args)
+
+    _configure_reproducible_wheels()
+
+    if args.whl_file:
+        whl = Path(args.whl_file)
+
+        name, extras_for_pkg = _parse_requirement_for_extra(args.requirement)
+        extras = {name: extras_for_pkg} if extras_for_pkg and name else dict()
+        _extract_wheel(
+            wheel_file=whl,
+            extras=extras,
+            enable_implicit_namespace_pkgs=args.enable_implicit_namespace_pkgs,
+            platforms=arguments.get_platforms(args),
+        )
+        return
+
+    pip_args = (
+        [sys.executable, "-m", "pip"]
+        + (["--isolated"] if args.isolated else [])
+        + (["download", "--only-binary=:all:"] if args.download_only else ["wheel"])
+        + ["--no-deps"]
+        + deserialized_args["extra_pip_args"]
+    )
+
+    requirement_file = NamedTemporaryFile(mode="wb", delete=False)
+    try:
+        requirement_file.write(args.requirement.encode("utf-8"))
+        requirement_file.flush()
+        # Close the file so pip is allowed to read it when running on Windows.
+        # For more information, see: https://bugs.python.org/issue14243
+        requirement_file.close()
+        # Requirement specific args like --hash can only be passed in a requirements file,
+        # so write our single requirement into a temp file in case it has any of those flags.
+        pip_args.extend(["-r", requirement_file.name])
+
+        env = os.environ.copy()
+        env.update(deserialized_args["environment"])
+        # Assumes any errors are logged by pip so do nothing. This command will fail if pip fails
+        subprocess.run(pip_args, check=True, env=env)
+    finally:
+        try:
+            os.unlink(requirement_file.name)
+        except OSError as e:
+            if e.errno != errno.ENOENT:
+                raise
+
+    whl = Path(next(iter(glob.glob("*.whl"))))
+
+    with open("whl_file.json", "w") as f:
+        json.dump({"whl_file": f"{whl.resolve()}"}, f)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/python/private/pypi/whl_library.bzl b/python/private/pypi/whl_library.bzl
index cae0db3..77cbd4e 100644
--- a/python/private/pypi/whl_library.bzl
+++ b/python/private/pypi/whl_library.bzl
@@ -16,12 +16,12 @@
 
 load("//python:repositories.bzl", "is_standalone_interpreter")
 load("//python:versions.bzl", "WINDOWS_NAME")
-load("//python/pip_install:repositories.bzl", "all_requirements")
 load("//python/private:auth.bzl", "AUTH_ATTRS", "get_auth")
 load("//python/private:envsubst.bzl", "envsubst")
 load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
 load("//python/private:toolchains_repo.bzl", "get_host_os_arch")
 load(":attrs.bzl", "ATTRS", "use_isolated")
+load(":deps.bzl", "all_requirements")
 load(":generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")
 load(":parse_whl_name.bzl", "parse_whl_name")
 load(":patch_whl.bzl", "patch_whl")
@@ -241,7 +241,7 @@
     args = [
         python_interpreter,
         "-m",
-        "python.pip_install.tools.wheel_installer.wheel_installer",
+        "python.private.pypi.whl_installer.wheel_installer",
         "--requirement",
         rctx.attr.requirement,
     ]
diff --git a/python/private/pypi/whl_library_alias.bzl b/python/private/pypi/whl_library_alias.bzl
new file mode 100644
index 0000000..263d7ec
--- /dev/null
+++ b/python/private/pypi/whl_library_alias.bzl
@@ -0,0 +1,99 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""whl_library aliases for multi_pip_parse."""
+
+load("//python/private:full_version.bzl", "full_version")
+load(":render_pkg_aliases.bzl", "NO_MATCH_ERROR_MESSAGE_TEMPLATE")
+
+def _whl_library_alias_impl(rctx):
+    rules_python = rctx.attr._rules_python_workspace.workspace_name
+    if rctx.attr.default_version:
+        default_repo_prefix = rctx.attr.version_map[rctx.attr.default_version]
+    else:
+        default_repo_prefix = None
+    version_map = rctx.attr.version_map.items()
+    build_content = ["# Generated by python/pip.bzl"]
+    for alias_name in ["pkg", "whl", "data", "dist_info"]:
+        build_content.append(_whl_library_render_alias_target(
+            alias_name = alias_name,
+            default_repo_prefix = default_repo_prefix,
+            rules_python = rules_python,
+            version_map = version_map,
+            wheel_name = rctx.attr.wheel_name,
+        ))
+    rctx.file("BUILD.bazel", "\n".join(build_content))
+
+def _whl_library_render_alias_target(
+        alias_name,
+        default_repo_prefix,
+        rules_python,
+        version_map,
+        wheel_name):
+    alias = ["""\
+alias(
+    name = "{alias_name}",
+    actual = select({{""".format(alias_name = alias_name)]
+    for [python_version, repo_prefix] in version_map:
+        alias.append("""\
+        "@{rules_python}//python/config_settings:is_python_{full_python_version}": "{actual}",""".format(
+            full_python_version = full_version(python_version),
+            actual = "@{repo_prefix}{wheel_name}//:{alias_name}".format(
+                repo_prefix = repo_prefix,
+                wheel_name = wheel_name,
+                alias_name = alias_name,
+            ),
+            rules_python = rules_python,
+        ))
+    if default_repo_prefix:
+        default_actual = "@{repo_prefix}{wheel_name}//:{alias_name}".format(
+            repo_prefix = default_repo_prefix,
+            wheel_name = wheel_name,
+            alias_name = alias_name,
+        )
+        alias.append('        "//conditions:default": "{default_actual}",'.format(
+            default_actual = default_actual,
+        ))
+
+    alias.append("    },")  # Close select expression condition dict
+    if not default_repo_prefix:
+        supported_versions = sorted([python_version for python_version, _ in version_map])
+        alias.append('    no_match_error="""{}""",'.format(
+            NO_MATCH_ERROR_MESSAGE_TEMPLATE.format(
+                supported_versions = ", ".join(supported_versions),
+                rules_python = rules_python,
+            ),
+        ))
+    alias.append("    ),")  # Close the select expression
+    alias.append('    visibility = ["//visibility:public"],')
+    alias.append(")")  # Close the alias() expression
+    return "\n".join(alias)
+
+whl_library_alias = repository_rule(
+    _whl_library_alias_impl,
+    attrs = {
+        "default_version": attr.string(
+            mandatory = False,
+            doc = "Optional Python version in major.minor format, e.g. '3.10'." +
+                  "The Python version of the wheel to use when the versions " +
+                  "from `version_map` don't match. This allows the default " +
+                  "(version unaware) rules to match and select a wheel. If " +
+                  "not specified, then the default rules won't be able to " +
+                  "resolve a wheel and an error will occur.",
+        ),
+        "version_map": attr.string_dict(mandatory = True),
+        "wheel_name": attr.string(mandatory = True),
+        "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
+    },
+)