refactor: move PyPI related extension and repository_rule setup to its own dir (#2003)

This is to ensure that future work is easier and its clear where to add
tests.
Now all of the unit tests can be run by just `bazel test
//tests/pypi/...`.

Refactor summary:
- chore: add aignas to CODEOWNERS
- chore: add a new directory for storing PyPI related code
- move pypi_index_sources.bzl to private/pypi
- chore: move parse_requirements_txt to private/pypi
- move parse_whl_name to private/pypi
- move whl_target_platforms to private/pypi
- move parse_requirements to private/pypi
- move pip_repo_name to private/pypi
- remove unused file
- move pip_config_settings to private/pypi
- move pypi_index to pypi/private and rename
- move labels.bzl to private/pypi
- move generate_build_bazel to private/pypi
- move render_pkg_aliases.bzl to private/pypi
- move patch_whl.bzl to private/pypi
- Move envsubst and render_tests to top level of tests
- move pip_install_srcs to private/pypi
- split and move pip_install/pip_repository.bzl to private/pypi
- move the bzlmod extension to private/pypi
diff --git a/python/private/pypi/BUILD.bazel b/python/private/pypi/BUILD.bazel
new file mode 100644
index 0000000..1530837
--- /dev/null
+++ b/python/private/pypi/BUILD.bazel
@@ -0,0 +1,238 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
+
+package(default_visibility = ["//:__subpackages__"])
+
+licenses(["notice"])
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]),
+    visibility = ["//python/private:__pkg__"],
+)
+
+# Filegroup of bzl files that can be used by downstream rules for documentation generation
+filegroup(
+    name = "bzl",
+    srcs = glob(["**/*.bzl"]),
+    visibility = ["//python/private:__pkg__"],
+)
+
+# Keep sorted by library name and keep the files named by the main symbol they export
+
+bzl_library(
+    name = "attrs_bzl",
+    srcs = ["attrs.bzl"],
+)
+
+bzl_library(
+    name = "bzlmod_bzl",
+    srcs = ["bzlmod.bzl"],
+    deps = [
+        ":attrs_bzl",
+        ":hub_repository_bzl",
+        ":parse_requirements_bzl",
+        ":parse_whl_name_bzl",
+        ":pip_repository_attrs_bzl",
+        ":simpleapi_download_bzl",
+        ":whl_library_bzl",
+        ":whl_repo_name_bzl",
+        "//python/private:full_version_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:version_label_bzl",
+        "@bazel_features//:features",
+    ] + [
+        "@pythons_hub//:interpreters_bzl",
+    ] if BZLMOD_ENABLED else [],
+)
+
+bzl_library(
+    name = "config_settings_bzl",
+    srcs = ["config_settings.bzl"],
+    deps = ["flags_bzl"],
+)
+
+bzl_library(
+    name = "flags_bzl",
+    srcs = ["flags.bzl"],
+    deps = ["//python/private:enum_bzl"],
+)
+
+bzl_library(
+    name = "generate_whl_library_build_bazel_bzl",
+    srcs = ["generate_whl_library_build_bazel.bzl"],
+    deps = [
+        ":labels_bzl",
+        "//python/private:normalize_name_bzl",
+    ],
+)
+
+bzl_library(
+    name = "generate_group_library_build_bazel_bzl",
+    srcs = ["generate_group_library_build_bazel.bzl"],
+    deps = [
+        ":labels_bzl",
+        "//python/private:normalize_name_bzl",
+    ],
+)
+
+bzl_library(
+    name = "group_library_bzl",
+    srcs = ["group_library.bzl"],
+    deps = [
+        ":generate_group_library_build_bazel_bzl",
+    ],
+)
+
+bzl_library(
+    name = "hub_repository_bzl",
+    srcs = ["hub_repository.bzl"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        ":render_pkg_aliases_bzl",
+        "//python/private:text_util_bzl",
+    ],
+)
+
+bzl_library(
+    name = "index_sources_bzl",
+    srcs = ["index_sources.bzl"],
+)
+
+bzl_library(
+    name = "labels_bzl",
+    srcs = ["labels.bzl"],
+)
+
+bzl_library(
+    name = "package_annotation_bzl",
+    srcs = ["package_annotation.bzl"],
+)
+
+bzl_library(
+    name = "parse_requirements_bzl",
+    srcs = ["parse_requirements.bzl"],
+    deps = [
+        ":index_sources_bzl",
+        ":parse_requirements_txt_bzl",
+        ":whl_target_platforms_bzl",
+        "//python/private:normalize_name_bzl",
+    ],
+)
+
+bzl_library(
+    name = "parse_requirements_txt_bzl",
+    srcs = ["parse_requirements_txt.bzl"],
+)
+
+bzl_library(
+    name = "parse_simpleapi_html_bzl",
+    srcs = ["parse_simpleapi_html.bzl"],
+)
+
+bzl_library(
+    name = "parse_whl_name_bzl",
+    srcs = ["parse_whl_name.bzl"],
+)
+
+bzl_library(
+    name = "patch_whl_bzl",
+    srcs = ["patch_whl.bzl"],
+    deps = [
+        ":parse_whl_name_bzl",
+        "//python/private:repo_utils_bzl",
+    ],
+)
+
+bzl_library(
+    name = "pip_repository_bzl",
+    srcs = ["pip_repository.bzl"],
+    deps = [
+        ":attrs_bzl",
+        ":parse_requirements_bzl",
+        ":pip_repository_attrs_bzl",
+        ":render_pkg_aliases_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:repo_utils_bzl",
+        "//python/private:text_util_bzl",
+        "@bazel_skylib//lib:sets",
+    ],
+)
+
+bzl_library(
+    name = "pip_repository_attrs_bzl",
+    srcs = ["pip_repository_attrs.bzl"],
+)
+
+bzl_library(
+    name = "render_pkg_aliases_bzl",
+    srcs = ["render_pkg_aliases.bzl"],
+    deps = [
+        ":generate_group_library_build_bazel_bzl",
+        ":labels_bzl",
+        ":parse_whl_name_bzl",
+        ":whl_target_platforms_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:text_util_bzl",
+    ],
+)
+
+bzl_library(
+    name = "simpleapi_download_bzl",
+    srcs = ["simpleapi_download.bzl"],
+    deps = [
+        ":parse_simpleapi_html_bzl",
+        "//python/private:auth_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:text_util_bzl",
+        "@bazel_features//:features",
+    ],
+)
+
+bzl_library(
+    name = "whl_library_bzl",
+    srcs = ["whl_library.bzl"],
+    deps = [
+        ":attrs_bzl",
+        ":generate_whl_library_build_bazel_bzl",
+        ":parse_whl_name_bzl",
+        ":patch_whl_bzl",
+        ":whl_target_platforms_bzl",
+        "//python:repositories_bzl",
+        "//python:versions_bzl",
+        "//python/pip_install:repositories_bzl",
+        "//python/private:auth_bzl",
+        "//python/private:envsubst_bzl",
+        "//python/private:repo_utils_bzl",
+        "//python/private:toolchains_repo_bzl",
+    ],
+)
+
+bzl_library(
+    name = "whl_repo_name_bzl",
+    srcs = ["whl_repo_name.bzl"],
+    deps = [
+        ":parse_whl_name_bzl",
+        "//python/private:normalize_name_bzl",
+    ],
+)
+
+bzl_library(
+    name = "whl_target_platforms_bzl",
+    srcs = ["whl_target_platforms.bzl"],
+    deps = [":parse_whl_name_bzl"],
+)
diff --git a/python/private/pypi/README.md b/python/private/pypi/README.md
new file mode 100644
index 0000000..6be5703
--- /dev/null
+++ b/python/private/pypi/README.md
@@ -0,0 +1,9 @@
+# PyPI integration code
+
+This code is for integrating with PyPI and other compatible indexes. At the
+moment we have code for:
+* Downloading packages using `pip` or `repository_ctx.download`.
+* Interacting with PyPI compatible indexes via [SimpleAPI] spec.
+* Locking a `requirements.in` or [PEP621] compliant `pyproject.toml`.
+
+[PEP621]: https://peps.python.org/pep-0621/
diff --git a/python/private/pypi/attrs.bzl b/python/private/pypi/attrs.bzl
new file mode 100644
index 0000000..79ffea5
--- /dev/null
+++ b/python/private/pypi/attrs.bzl
@@ -0,0 +1,224 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"common attributes for whl_library and pip_repository"
+
+ATTRS = {
+    "download_only": attr.bool(
+        doc = """
+Whether to use "pip download" instead of "pip wheel". Disables building wheels from source, but allows use of
+--platform, --python-version, --implementation, and --abi in --extra_pip_args to download wheels for a different
+platform from the host platform.
+        """,
+    ),
+    "enable_implicit_namespace_pkgs": attr.bool(
+        default = False,
+        doc = """
+If true, disables conversion of native namespace packages into pkg-util style namespace packages. When set all py_binary
+and py_test targets must specify either `legacy_create_init=False` or the global Bazel option
+`--incompatible_default_to_explicit_init_py` to prevent `__init__.py` being automatically generated in every directory.
+
+This option is required to support some packages which cannot handle the conversion to pkg-util style.
+            """,
+    ),
+    "environment": attr.string_dict(
+        doc = """
+Environment variables to set in the pip subprocess.
+Can be used to set common variables such as `http_proxy`, `https_proxy` and `no_proxy`
+Note that pip is run with "--isolated" on the CLI so `PIP_<VAR>_<NAME>`
+style env vars are ignored, but env vars that control requests and urllib3
+can be passed. If you need `PIP_<VAR>_<NAME>`, take a look at `extra_pip_args`
+and `envsubst`.
+        """,
+        default = {},
+    ),
+    "envsubst": attr.string_list(
+        mandatory = False,
+        doc = """\
+A list of environment variables to substitute (e.g. `["PIP_INDEX_URL",
+"PIP_RETRIES"]`). The corresponding variables are expanded in `extra_pip_args`
+using the syntax `$VARNAME` or `${VARNAME}` (expanding to empty string if unset)
+or `${VARNAME:-default}` (expanding to default if the variable is unset or empty
+in the environment). Note: On Bazel 6 and Bazel 7.0 changes to the variables named
+here do not cause packages to be re-fetched. Don't fetch different things based
+on the value of these variables.
+""",
+    ),
+    "experimental_requirement_cycles": attr.string_list_dict(
+        default = {},
+        doc = """\
+A mapping of dependency cycle names to a list of requirements which form that cycle.
+
+Requirements which form cycles will be installed together and taken as
+dependencies together in order to ensure that the cycle is always satisified.
+
+Example:
+  `sphinx` depends on `sphinxcontrib-serializinghtml`
+  When listing both as requirements, ala
+
+  ```
+  py_binary(
+    name = "doctool",
+    ...
+    deps = [
+      "@pypi//sphinx:pkg",
+      "@pypi//sphinxcontrib_serializinghtml",
+     ]
+  )
+  ```
+
+  Will produce a Bazel error such as
+
+  ```
+  ERROR: .../external/pypi_sphinxcontrib_serializinghtml/BUILD.bazel:44:6: in alias rule @pypi_sphinxcontrib_serializinghtml//:pkg: cycle in dependency graph:
+      //:doctool (...)
+      @pypi//sphinxcontrib_serializinghtml:pkg (...)
+  .-> @pypi_sphinxcontrib_serializinghtml//:pkg (...)
+  |   @pypi_sphinxcontrib_serializinghtml//:_pkg (...)
+  |   @pypi_sphinx//:pkg (...)
+  |   @pypi_sphinx//:_pkg (...)
+  `-- @pypi_sphinxcontrib_serializinghtml//:pkg (...)
+  ```
+
+  Which we can resolve by configuring these two requirements to be installed together as a cycle
+
+  ```
+  pip_parse(
+    ...
+    experimental_requirement_cycles = {
+      "sphinx": [
+        "sphinx",
+        "sphinxcontrib-serializinghtml",
+      ]
+    },
+  )
+  ```
+
+Warning:
+  If a dependency participates in multiple cycles, all of those cycles must be
+  collapsed down to one. For instance `a <-> b` and `a <-> c` cannot be listed
+  as two separate cycles.
+""",
+    ),
+    "experimental_target_platforms": attr.string_list(
+        default = [],
+        doc = """\
+A list of platforms that we will generate the conditional dependency graph for
+cross platform wheels by parsing the wheel metadata. This will generate the
+correct dependencies for packages like `sphinx` or `pylint`, which include
+`colorama` when installed and used on Windows platforms.
+
+An empty list means falling back to the legacy behaviour where the host
+platform is the target platform.
+
+WARNING: It may not work as expected in cases where the python interpreter
+implementation that is being used at runtime is different between different platforms.
+This has been tested for CPython only.
+
+For specific target platforms use values of the form `<os>_<arch>` where `<os>`
+is one of `linux`, `osx`, `windows` and arch is one of `x86_64`, `x86_32`,
+`aarch64`, `s390x` and `ppc64le`.
+
+You can also target a specific Python version by using `cp3<minor_version>_<os>_<arch>`.
+If multiple python versions are specified as target platforms, then select statements
+of the `lib` and `whl` targets will include usage of version aware toolchain config
+settings like `@rules_python//python/config_settings:is_python_3.y`.
+
+Special values: `host` (for generating deps for the host platform only) and
+`<prefix>_*` values. For example, `cp39_*`, `linux_*`, `cp39_linux_*`.
+
+NOTE: this is not for cross-compiling Python wheels but rather for parsing the `whl` METADATA correctly.
+""",
+    ),
+    "extra_pip_args": attr.string_list(
+        doc = """Extra arguments to pass on to pip. Must not contain spaces.
+
+Supports environment variables using the syntax `$VARNAME` or
+`${VARNAME}` (expanding to empty string if unset) or
+`${VARNAME:-default}` (expanding to default if the variable is unset
+or empty in the environment), if `"VARNAME"` is listed in the
+`envsubst` attribute. See also `envsubst`.
+""",
+    ),
+    "isolated": attr.bool(
+        doc = """\
+Whether or not to pass the [--isolated](https://pip.pypa.io/en/stable/cli/pip/#cmdoption-isolated) flag to
+the underlying pip command. Alternatively, the `RULES_PYTHON_PIP_ISOLATED` environment variable can be used
+to control this flag.
+""",
+        default = True,
+    ),
+    "pip_data_exclude": attr.string_list(
+        doc = "Additional data exclusion parameters to add to the pip packages BUILD file.",
+    ),
+    "python_interpreter": attr.string(
+        doc = """\
+The python interpreter to use. This can either be an absolute path or the name
+of a binary found on the host's `PATH` environment variable. If no value is set
+`python3` is defaulted for Unix systems and `python.exe` for Windows.
+""",
+        # NOTE: This attribute should not have a default. See `_get_python_interpreter_attr`
+        # default = "python3"
+    ),
+    "python_interpreter_target": attr.label(
+        allow_single_file = True,
+        doc = """
+If you are using a custom python interpreter built by another repository rule,
+use this attribute to specify its BUILD target. This allows pip_repository to invoke
+pip using the same interpreter as your toolchain. If set, takes precedence over
+python_interpreter. An example value: "@python3_x86_64-unknown-linux-gnu//:python".
+""",
+    ),
+    "quiet": attr.bool(
+        default = True,
+        doc = """\
+If True, suppress printing stdout and stderr output to the terminal.
+
+If you would like to get more diagnostic output, please use:
+
+    RULES_PYTHON_REPO_DEBUG=1
+
+or
+
+    RULES_PYTHON_REPO_DEBUG_VERBOSITY=<INFO|DEBUG|TRACE>
+""",
+    ),
+    # 600 is documented as default here: https://docs.bazel.build/versions/master/skylark/lib/repository_ctx.html#execute
+    "timeout": attr.int(
+        default = 600,
+        doc = "Timeout (in seconds) on the rule's execution duration.",
+    ),
+}
+
+def use_isolated(ctx, attr):
+    """Determine whether or not to pass the pip `--isolated` flag to the pip invocation.
+
+    Args:
+        ctx: repository or module context
+        attr: attributes for the repo rule or tag extension
+
+    Returns:
+        True if --isolated should be passed
+    """
+    use_isolated = attr.isolated
+
+    # The environment variable will take precedence over the attribute
+    isolated_env = ctx.os.environ.get("RULES_PYTHON_PIP_ISOLATED", None)
+    if isolated_env != None:
+        if isolated_env.lower() in ("0", "false"):
+            use_isolated = False
+        else:
+            use_isolated = True
+
+    return use_isolated
diff --git a/python/private/pypi/bzlmod.bzl b/python/private/pypi/bzlmod.bzl
new file mode 100644
index 0000000..e98208a
--- /dev/null
+++ b/python/private/pypi/bzlmod.bzl
@@ -0,0 +1,818 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"pip module extension for use with bzlmod"
+
+load("@bazel_features//:features.bzl", "bazel_features")
+load("@pythons_hub//:interpreters.bzl", "DEFAULT_PYTHON_VERSION", "INTERPRETER_LABELS")
+load("//python/private:auth.bzl", "AUTH_ATTRS")
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:repo_utils.bzl", "repo_utils")
+load("//python/private:version_label.bzl", "version_label")
+load(":attrs.bzl", "use_isolated")
+load(":hub_repository.bzl", "hub_repository")
+load(":parse_requirements.bzl", "host_platform", "parse_requirements", "select_requirement")
+load(":parse_whl_name.bzl", "parse_whl_name")
+load(":pip_repository_attrs.bzl", "ATTRS")
+load(":render_pkg_aliases.bzl", "whl_alias")
+load(":simpleapi_download.bzl", "simpleapi_download")
+load(":whl_library.bzl", "whl_library")
+load(":whl_repo_name.bzl", "whl_repo_name")
+
+def _parse_version(version):
+    major, _, version = version.partition(".")
+    minor, _, version = version.partition(".")
+    patch, _, version = version.partition(".")
+    build, _, version = version.partition(".")
+
+    return struct(
+        # use semver vocabulary here
+        major = major,
+        minor = minor,
+        patch = patch,  # this is called `micro` in the Python interpreter versioning scheme
+        build = build,
+    )
+
+def _major_minor_version(version):
+    version = _parse_version(version)
+    return "{}.{}".format(version.major, version.minor)
+
+def _whl_mods_impl(mctx):
+    """Implementation of the pip.whl_mods tag class.
+
+    This creates the JSON files used to modify the creation of different wheels.
+"""
+    whl_mods_dict = {}
+    for mod in mctx.modules:
+        for whl_mod_attr in mod.tags.whl_mods:
+            if whl_mod_attr.hub_name not in whl_mods_dict.keys():
+                whl_mods_dict[whl_mod_attr.hub_name] = {whl_mod_attr.whl_name: whl_mod_attr}
+            elif whl_mod_attr.whl_name in whl_mods_dict[whl_mod_attr.hub_name].keys():
+                # We cannot have the same wheel name in the same hub, as we
+                # will create the same JSON file name.
+                fail("""\
+Found same whl_name '{}' in the same hub '{}', please use a different hub_name.""".format(
+                    whl_mod_attr.whl_name,
+                    whl_mod_attr.hub_name,
+                ))
+            else:
+                whl_mods_dict[whl_mod_attr.hub_name][whl_mod_attr.whl_name] = whl_mod_attr
+
+    for hub_name, whl_maps in whl_mods_dict.items():
+        whl_mods = {}
+
+        # create a struct that we can pass to the _whl_mods_repo rule
+        # to create the different JSON files.
+        for whl_name, mods in whl_maps.items():
+            build_content = mods.additive_build_content
+            if mods.additive_build_content_file != None and mods.additive_build_content != "":
+                fail("""\
+You cannot use both the additive_build_content and additive_build_content_file arguments at the same time.
+""")
+            elif mods.additive_build_content_file != None:
+                build_content = mctx.read(mods.additive_build_content_file)
+
+            whl_mods[whl_name] = json.encode(struct(
+                additive_build_content = build_content,
+                copy_files = mods.copy_files,
+                copy_executables = mods.copy_executables,
+                data = mods.data,
+                data_exclude_glob = mods.data_exclude_glob,
+                srcs_exclude_glob = mods.srcs_exclude_glob,
+            ))
+
+        _whl_mods_repo(
+            name = hub_name,
+            whl_mods = whl_mods,
+        )
+
+def _create_whl_repos(module_ctx, pip_attr, whl_map, whl_overrides, group_map, simpleapi_cache):
+    logger = repo_utils.logger(module_ctx)
+    python_interpreter_target = pip_attr.python_interpreter_target
+    is_hub_reproducible = True
+
+    # if we do not have the python_interpreter set in the attributes
+    # we programmatically find it.
+    hub_name = pip_attr.hub_name
+    if python_interpreter_target == None and not pip_attr.python_interpreter:
+        python_name = "python_{}_host".format(
+            pip_attr.python_version.replace(".", "_"),
+        )
+        if python_name not in INTERPRETER_LABELS:
+            fail((
+                "Unable to find interpreter for pip hub '{hub_name}' for " +
+                "python_version={version}: Make sure a corresponding " +
+                '`python.toolchain(python_version="{version}")` call exists.' +
+                "Expected to find {python_name} among registered versions:\n  {labels}"
+            ).format(
+                hub_name = hub_name,
+                version = pip_attr.python_version,
+                python_name = python_name,
+                labels = "  \n".join(INTERPRETER_LABELS),
+            ))
+        python_interpreter_target = INTERPRETER_LABELS[python_name]
+
+    pip_name = "{}_{}".format(
+        hub_name,
+        version_label(pip_attr.python_version),
+    )
+    major_minor = _major_minor_version(pip_attr.python_version)
+
+    if hub_name not in whl_map:
+        whl_map[hub_name] = {}
+
+    whl_modifications = {}
+    if pip_attr.whl_modifications != None:
+        for mod, whl_name in pip_attr.whl_modifications.items():
+            whl_modifications[whl_name] = mod
+
+    if pip_attr.experimental_requirement_cycles:
+        requirement_cycles = {
+            name: [normalize_name(whl_name) for whl_name in whls]
+            for name, whls in pip_attr.experimental_requirement_cycles.items()
+        }
+
+        whl_group_mapping = {
+            whl_name: group_name
+            for group_name, group_whls in requirement_cycles.items()
+            for whl_name in group_whls
+        }
+
+        # TODO @aignas 2024-04-05: how do we support different requirement
+        # cycles for different abis/oses? For now we will need the users to
+        # assume the same groups across all versions/platforms until we start
+        # using an alternative cycle resolution strategy.
+        group_map[hub_name] = pip_attr.experimental_requirement_cycles
+    else:
+        whl_group_mapping = {}
+        requirement_cycles = {}
+
+    # Create a new wheel library for each of the different whls
+
+    get_index_urls = None
+    if pip_attr.experimental_index_url:
+        if pip_attr.download_only:
+            fail("Currently unsupported to use `download_only` and `experimental_index_url`")
+
+        get_index_urls = lambda ctx, distributions: simpleapi_download(
+            ctx,
+            attr = struct(
+                index_url = pip_attr.experimental_index_url,
+                extra_index_urls = pip_attr.experimental_extra_index_urls or [],
+                index_url_overrides = pip_attr.experimental_index_url_overrides or {},
+                sources = distributions,
+                envsubst = pip_attr.envsubst,
+                # Auth related info
+                netrc = pip_attr.netrc,
+                auth_patterns = pip_attr.auth_patterns,
+            ),
+            cache = simpleapi_cache,
+            parallel_download = pip_attr.parallel_download,
+        )
+
+    requirements_by_platform = parse_requirements(
+        module_ctx,
+        requirements_by_platform = pip_attr.requirements_by_platform,
+        requirements_linux = pip_attr.requirements_linux,
+        requirements_lock = pip_attr.requirements_lock,
+        requirements_osx = pip_attr.requirements_darwin,
+        requirements_windows = pip_attr.requirements_windows,
+        extra_pip_args = pip_attr.extra_pip_args,
+        get_index_urls = get_index_urls,
+        python_version = major_minor,
+        logger = logger,
+    )
+
+    repository_platform = host_platform(module_ctx.os)
+    for whl_name, requirements in requirements_by_platform.items():
+        # We are not using the "sanitized name" because the user
+        # would need to guess what name we modified the whl name
+        # to.
+        annotation = whl_modifications.get(whl_name)
+        whl_name = normalize_name(whl_name)
+
+        group_name = whl_group_mapping.get(whl_name)
+        group_deps = requirement_cycles.get(group_name, [])
+
+        # Construct args separately so that the lock file can be smaller and does not include unused
+        # attrs.
+        whl_library_args = dict(
+            repo = pip_name,
+            dep_template = "@{}//{{name}}:{{target}}".format(hub_name),
+        )
+        maybe_args = dict(
+            # The following values are safe to omit if they have false like values
+            annotation = annotation,
+            download_only = pip_attr.download_only,
+            enable_implicit_namespace_pkgs = pip_attr.enable_implicit_namespace_pkgs,
+            environment = pip_attr.environment,
+            envsubst = pip_attr.envsubst,
+            experimental_target_platforms = pip_attr.experimental_target_platforms,
+            group_deps = group_deps,
+            group_name = group_name,
+            pip_data_exclude = pip_attr.pip_data_exclude,
+            python_interpreter = pip_attr.python_interpreter,
+            python_interpreter_target = python_interpreter_target,
+            whl_patches = {
+                p: json.encode(args)
+                for p, args in whl_overrides.get(whl_name, {}).items()
+            },
+        )
+        whl_library_args.update({k: v for k, v in maybe_args.items() if v})
+        maybe_args_with_default = dict(
+            # The following values have defaults next to them
+            isolated = (use_isolated(module_ctx, pip_attr), True),
+            quiet = (pip_attr.quiet, True),
+            timeout = (pip_attr.timeout, 600),
+        )
+        whl_library_args.update({
+            k: v
+            for k, (v, default) in maybe_args_with_default.items()
+            if v != default
+        })
+
+        if get_index_urls:
+            # TODO @aignas 2024-05-26: move to a separate function
+            found_something = False
+            for requirement in requirements:
+                for distribution in requirement.whls + [requirement.sdist]:
+                    if not distribution:
+                        # sdist may be None
+                        continue
+
+                    found_something = True
+                    is_hub_reproducible = False
+
+                    if pip_attr.netrc:
+                        whl_library_args["netrc"] = pip_attr.netrc
+                    if pip_attr.auth_patterns:
+                        whl_library_args["auth_patterns"] = pip_attr.auth_patterns
+
+                    # pip is not used to download wheels and the python `whl_library` helpers are only extracting things
+                    whl_library_args.pop("extra_pip_args", None)
+
+                    # This is no-op because pip is not used to download the wheel.
+                    whl_library_args.pop("download_only", None)
+
+                    repo_name = whl_repo_name(pip_name, distribution.filename, distribution.sha256)
+                    whl_library_args["requirement"] = requirement.srcs.requirement
+                    whl_library_args["urls"] = [distribution.url]
+                    whl_library_args["sha256"] = distribution.sha256
+                    whl_library_args["filename"] = distribution.filename
+                    whl_library_args["experimental_target_platforms"] = requirement.target_platforms
+
+                    # Pure python wheels or sdists may need to have a platform here
+                    target_platforms = None
+                    if distribution.filename.endswith("-any.whl") or not distribution.filename.endswith(".whl"):
+                        if len(requirements) > 1:
+                            target_platforms = requirement.target_platforms
+
+                    whl_library(name = repo_name, **dict(sorted(whl_library_args.items())))
+
+                    whl_map[hub_name].setdefault(whl_name, []).append(
+                        whl_alias(
+                            repo = repo_name,
+                            version = major_minor,
+                            filename = distribution.filename,
+                            target_platforms = target_platforms,
+                        ),
+                    )
+
+            if found_something:
+                continue
+
+        requirement = select_requirement(
+            requirements,
+            platform = repository_platform,
+        )
+        if not requirement:
+            # Sometimes the package is not present for host platform if there
+            # are whls specified only in particular requirements files, in that
+            # case just continue, however, if the download_only flag is set up,
+            # then the user can also specify the target platform of the wheel
+            # packages they want to download, in that case there will be always
+            # a requirement here, so we will not be in this code branch.
+            continue
+        elif get_index_urls:
+            logger.warn(lambda: "falling back to pip for installing the right file for {}".format(requirement.requirement_line))
+
+        whl_library_args["requirement"] = requirement.requirement_line
+        if requirement.extra_pip_args:
+            whl_library_args["extra_pip_args"] = requirement.extra_pip_args
+
+        # We sort so that the lock-file remains the same no matter the order of how the
+        # args are manipulated in the code going before.
+        repo_name = "{}_{}".format(pip_name, whl_name)
+        whl_library(name = repo_name, **dict(sorted(whl_library_args.items())))
+        whl_map[hub_name].setdefault(whl_name, []).append(
+            whl_alias(
+                repo = repo_name,
+                version = major_minor,
+            ),
+        )
+
+    return is_hub_reproducible
+
+def _pip_impl(module_ctx):
+    """Implementation of a class tag that creates the pip hub and corresponding pip spoke whl repositories.
+
+    This implementation iterates through all of the `pip.parse` calls and creates
+    different pip hub repositories based on the "hub_name".  Each of the
+    pip calls create spoke repos that uses a specific Python interpreter.
+
+    In a MODULES.bazel file we have:
+
+    pip.parse(
+        hub_name = "pip",
+        python_version = 3.9,
+        requirements_lock = "//:requirements_lock_3_9.txt",
+        requirements_windows = "//:requirements_windows_3_9.txt",
+    )
+    pip.parse(
+        hub_name = "pip",
+        python_version = 3.10,
+        requirements_lock = "//:requirements_lock_3_10.txt",
+        requirements_windows = "//:requirements_windows_3_10.txt",
+    )
+
+    For instance, we have a hub with the name of "pip".
+    A repository named the following is created. It is actually called last when
+    all of the pip spokes are collected.
+
+    - @@rules_python~override~pip~pip
+
+    As shown in the example code above we have the following.
+    Two different pip.parse statements exist in MODULE.bazel provide the hub_name "pip".
+    These definitions create two different pip spoke repositories that are
+    related to the hub "pip".
+    One spoke uses Python 3.9 and the other uses Python 3.10. This code automatically
+    determines the Python version and the interpreter.
+    Both of these pip spokes contain requirements files that includes websocket
+    and its dependencies.
+
+    We also need repositories for the wheels that the different pip spokes contain.
+    For each Python version a different wheel repository is created. In our example
+    each pip spoke had a requirements file that contained websockets. We
+    then create two different wheel repositories that are named the following.
+
+    - @@rules_python~override~pip~pip_39_websockets
+    - @@rules_python~override~pip~pip_310_websockets
+
+    And if the wheel has any other dependencies subsequent wheels are created in the same fashion.
+
+    The hub repository has aliases for `pkg`, `data`, etc, which have a select that resolves to
+    a spoke repository depending on the Python version.
+
+    Also we may have more than one hub as defined in a MODULES.bazel file.  So we could have multiple
+    hubs pointing to various different pip spokes.
+
+    Some other business rules notes. A hub can only have one spoke per Python version.  We cannot
+    have a hub named "pip" that has two spokes that use the Python 3.9 interpreter.  Second
+    we cannot have the same hub name used in sub-modules.  The hub name has to be globally
+    unique.
+
+    This implementation also handles the creation of whl_modification JSON files that are used
+    during the creation of wheel libraries. These JSON files used via the annotations argument
+    when calling wheel_installer.py.
+
+    Args:
+        module_ctx: module contents
+    """
+
+    # Build all of the wheel modifications if the tag class is called.
+    _whl_mods_impl(module_ctx)
+
+    _overriden_whl_set = {}
+    whl_overrides = {}
+
+    for module in module_ctx.modules:
+        for attr in module.tags.override:
+            if not module.is_root:
+                fail("overrides are only supported in root modules")
+
+            if not attr.file.endswith(".whl"):
+                fail("Only whl overrides are supported at this time")
+
+            whl_name = normalize_name(parse_whl_name(attr.file).distribution)
+
+            if attr.file in _overriden_whl_set:
+                fail("Duplicate module overrides for '{}'".format(attr.file))
+            _overriden_whl_set[attr.file] = None
+
+            for patch in attr.patches:
+                if whl_name not in whl_overrides:
+                    whl_overrides[whl_name] = {}
+
+                if patch not in whl_overrides[whl_name]:
+                    whl_overrides[whl_name][patch] = struct(
+                        patch_strip = attr.patch_strip,
+                        whls = [],
+                    )
+
+                whl_overrides[whl_name][patch].whls.append(attr.file)
+
+    # Used to track all the different pip hubs and the spoke pip Python
+    # versions.
+    pip_hub_map = {}
+
+    # Keeps track of all the hub's whl repos across the different versions.
+    # dict[hub, dict[whl, dict[version, str pip]]]
+    # Where hub, whl, and pip are the repo names
+    hub_whl_map = {}
+    hub_group_map = {}
+
+    simpleapi_cache = {}
+    is_extension_reproducible = True
+
+    for mod in module_ctx.modules:
+        for pip_attr in mod.tags.parse:
+            hub_name = pip_attr.hub_name
+            if hub_name not in pip_hub_map:
+                pip_hub_map[pip_attr.hub_name] = struct(
+                    module_name = mod.name,
+                    python_versions = [pip_attr.python_version],
+                )
+            elif pip_hub_map[hub_name].module_name != mod.name:
+                # We cannot have two hubs with the same name in different
+                # modules.
+                fail((
+                    "Duplicate cross-module pip hub named '{hub}': pip hub " +
+                    "names must be unique across modules. First defined " +
+                    "by module '{first_module}', second attempted by " +
+                    "module '{second_module}'"
+                ).format(
+                    hub = hub_name,
+                    first_module = pip_hub_map[hub_name].module_name,
+                    second_module = mod.name,
+                ))
+
+            elif pip_attr.python_version in pip_hub_map[hub_name].python_versions:
+                fail((
+                    "Duplicate pip python version '{version}' for hub " +
+                    "'{hub}' in module '{module}': the Python versions " +
+                    "used for a hub must be unique"
+                ).format(
+                    hub = hub_name,
+                    module = mod.name,
+                    version = pip_attr.python_version,
+                ))
+            else:
+                pip_hub_map[pip_attr.hub_name].python_versions.append(pip_attr.python_version)
+
+            is_hub_reproducible = _create_whl_repos(module_ctx, pip_attr, hub_whl_map, whl_overrides, hub_group_map, simpleapi_cache)
+            is_extension_reproducible = is_extension_reproducible and is_hub_reproducible
+
+    for hub_name, whl_map in hub_whl_map.items():
+        hub_repository(
+            name = hub_name,
+            repo_name = hub_name,
+            whl_map = {
+                key: json.encode(value)
+                for key, value in whl_map.items()
+            },
+            default_version = _major_minor_version(DEFAULT_PYTHON_VERSION),
+            groups = hub_group_map.get(hub_name),
+        )
+
+    if bazel_features.external_deps.extension_metadata_has_reproducible:
+        # If we are not using the `experimental_index_url feature, the extension is fully
+        # deterministic and we don't need to create a lock entry for it.
+        #
+        # In order to be able to dogfood the `experimental_index_url` feature before it gets
+        # stabilized, we have created the `_pip_non_reproducible` function, that will result
+        # in extra entries in the lock file.
+        return module_ctx.extension_metadata(reproducible = is_extension_reproducible)
+    else:
+        return None
+
+def _pip_non_reproducible(module_ctx):
+    _pip_impl(module_ctx)
+
+    # We default to calling the PyPI index and that will go into the
+    # MODULE.bazel.lock file, hence return nothing here.
+    return None
+
+def _pip_parse_ext_attrs(**kwargs):
+    """Get the attributes for the pip extension.
+
+    Args:
+        **kwargs: A kwarg for setting defaults for the specific attributes. The
+        key is expected to be the same as the attribute key.
+
+    Returns:
+        A dict of attributes.
+    """
+    attrs = dict({
+        "experimental_extra_index_urls": attr.string_list(
+            doc = """\
+The extra index URLs to use for downloading wheels using bazel downloader.
+Each value is going to be subject to `envsubst` substitutions if necessary.
+
+The indexes must support Simple API as described here:
+https://packaging.python.org/en/latest/specifications/simple-repository-api/
+
+This is equivalent to `--extra-index-urls` `pip` option.
+""",
+            default = [],
+        ),
+        "experimental_index_url": attr.string(
+            default = kwargs.get("experimental_index_url", ""),
+            doc = """\
+The index URL to use for downloading wheels using bazel downloader. This value is going
+to be subject to `envsubst` substitutions if necessary.
+
+The indexes must support Simple API as described here:
+https://packaging.python.org/en/latest/specifications/simple-repository-api/
+
+In the future this could be defaulted to `https://pypi.org` when this feature becomes
+stable.
+
+This is equivalent to `--index-url` `pip` option.
+""",
+        ),
+        "experimental_index_url_overrides": attr.string_dict(
+            doc = """\
+The index URL overrides for each package to use for downloading wheels using
+bazel downloader. This value is going to be subject to `envsubst` substitutions
+if necessary.
+
+The key is the package name (will be normalized before usage) and the value is the
+index URL.
+
+This design pattern has been chosen in order to be fully deterministic about which
+packages come from which source. We want to avoid issues similar to what happened in
+https://pytorch.org/blog/compromised-nightly-dependency/.
+
+The indexes must support Simple API as described here:
+https://packaging.python.org/en/latest/specifications/simple-repository-api/
+""",
+        ),
+        "hub_name": attr.string(
+            mandatory = True,
+            doc = """
+The name of the repo pip dependencies will be accessible from.
+
+This name must be unique between modules; unless your module is guaranteed to
+always be the root module, it's highly recommended to include your module name
+in the hub name. Repo mapping, `use_repo(..., pip="my_modules_pip_deps")`, can
+be used for shorter local names within your module.
+
+Within a module, the same `hub_name` can be specified to group different Python
+versions of pip dependencies under one repository name. This allows using a
+Python version-agnostic name when referring to pip dependencies; the
+correct version will be automatically selected.
+
+Typically, a module will only have a single hub of pip dependencies, but this
+is not required. Each hub is a separate resolution of pip dependencies. This
+means if different programs need different versions of some library, separate
+hubs can be created, and each program can use its respective hub's targets.
+Targets from different hubs should not be used together.
+""",
+        ),
+        "parallel_download": attr.bool(
+            doc = """\
+The flag allows to make use of parallel downloading feature in bazel 7.1 and above
+when the bazel downloader is used. This is by default enabled as it improves the
+performance by a lot, but in case the queries to the simple API are very expensive
+or when debugging authentication issues one may want to disable this feature.
+
+NOTE, This will download (potentially duplicate) data for multiple packages if
+there is more than one index available, but in general this should be negligible
+because the simple API calls are very cheap and the user should not notice any
+extra overhead.
+
+If we are in synchronous mode, then we will use the first result that we
+find in case extra indexes are specified.
+""",
+            default = True,
+        ),
+        "python_version": attr.string(
+            mandatory = True,
+            doc = """
+The Python version the dependencies are targetting, in Major.Minor format
+(e.g., "3.11") or patch level granularity (e.g. "3.11.1").
+
+If an interpreter isn't explicitly provided (using `python_interpreter` or
+`python_interpreter_target`), then the version specified here must have
+a corresponding `python.toolchain()` configured.
+""",
+        ),
+        "whl_modifications": attr.label_keyed_string_dict(
+            mandatory = False,
+            doc = """\
+A dict of labels to wheel names that is typically generated by the whl_modifications.
+The labels are JSON config files describing the modifications.
+""",
+        ),
+    }, **ATTRS)
+    attrs.update(AUTH_ATTRS)
+
+    return attrs
+
+def _whl_mod_attrs():
+    attrs = {
+        "additive_build_content": attr.string(
+            doc = "(str, optional): Raw text to add to the generated `BUILD` file of a package.",
+        ),
+        "additive_build_content_file": attr.label(
+            doc = """\
+(label, optional): path to a BUILD file to add to the generated
+`BUILD` file of a package. You cannot use both additive_build_content and additive_build_content_file
+arguments at the same time.""",
+        ),
+        "copy_executables": attr.string_dict(
+            doc = """\
+(dict, optional): A mapping of `src` and `out` files for
+[@bazel_skylib//rules:copy_file.bzl][cf]. Targets generated here will also be flagged as
+executable.""",
+        ),
+        "copy_files": attr.string_dict(
+            doc = """\
+(dict, optional): A mapping of `src` and `out` files for
+[@bazel_skylib//rules:copy_file.bzl][cf]""",
+        ),
+        "data": attr.string_list(
+            doc = """\
+(list, optional): A list of labels to add as `data` dependencies to
+the generated `py_library` target.""",
+        ),
+        "data_exclude_glob": attr.string_list(
+            doc = """\
+(list, optional): A list of exclude glob patterns to add as `data` to
+the generated `py_library` target.""",
+        ),
+        "hub_name": attr.string(
+            doc = """\
+Name of the whl modification, hub we use this name to set the modifications for
+pip.parse. If you have different pip hubs you can use a different name,
+otherwise it is best practice to just use one.
+
+You cannot have the same `hub_name` in different modules.  You can reuse the same
+name in the same module for different wheels that you put in the same hub, but you
+cannot have a child module that uses the same `hub_name`.
+""",
+            mandatory = True,
+        ),
+        "srcs_exclude_glob": attr.string_list(
+            doc = """\
+(list, optional): A list of labels to add as `srcs` to the generated
+`py_library` target.""",
+        ),
+        "whl_name": attr.string(
+            doc = "The whl name that the modifications are used for.",
+            mandatory = True,
+        ),
+    }
+    return attrs
+
+# NOTE: the naming of 'override' is taken from the bzlmod native
+# 'archive_override', 'git_override' bzlmod functions.
+_override_tag = tag_class(
+    attrs = {
+        "file": attr.string(
+            doc = """\
+The Python distribution file name which needs to be patched. This will be
+applied to all repositories that setup this distribution via the pip.parse tag
+class.""",
+            mandatory = True,
+        ),
+        "patch_strip": attr.int(
+            default = 0,
+            doc = """\
+The number of leading path segments to be stripped from the file name in the
+patches.""",
+        ),
+        "patches": attr.label_list(
+            doc = """\
+A list of patches to apply to the repository *after* 'whl_library' is extracted
+and BUILD.bazel file is generated.""",
+            mandatory = True,
+        ),
+    },
+    doc = """\
+Apply any overrides (e.g. patches) to a given Python distribution defined by
+other tags in this extension.""",
+)
+
+pypi = module_extension(
+    doc = """\
+This extension is used to make dependencies from pip available.
+
+pip.parse:
+To use, call `pip.parse()` and specify `hub_name` and your requirements file.
+Dependencies will be downloaded and made available in a repo named after the
+`hub_name` argument.
+
+Each `pip.parse()` call configures a particular Python version. Multiple calls
+can be made to configure different Python versions, and will be grouped by
+the `hub_name` argument. This allows the same logical name, e.g. `@pip//numpy`
+to automatically resolve to different, Python version-specific, libraries.
+
+pip.whl_mods:
+This tag class is used to help create JSON files to describe modifications to
+the BUILD files for wheels.
+""",
+    implementation = _pip_impl,
+    tag_classes = {
+        "override": _override_tag,
+        "parse": tag_class(
+            attrs = _pip_parse_ext_attrs(),
+            doc = """\
+This tag class is used to create a pip hub and all of the spokes that are part of that hub.
+This tag class reuses most of the pip attributes that are found in
+@rules_python//python/pip_install:pip_repository.bzl.
+The exception is it does not use the arg 'repo_prefix'.  We set the repository
+prefix for the user and the alias arg is always True in bzlmod.
+""",
+        ),
+        "whl_mods": tag_class(
+            attrs = _whl_mod_attrs(),
+            doc = """\
+This tag class is used to create JSON file that are used when calling wheel_builder.py.  These
+JSON files contain instructions on how to modify a wheel's project.  Each of the attributes
+create different modifications based on the type of attribute. Previously to bzlmod these
+JSON files where referred to as annotations, and were renamed to whl_modifications in this
+extension.
+""",
+        ),
+    },
+)
+
+pypi_internal = module_extension(
+    doc = """\
+This extension is used to make dependencies from pypi available.
+
+For now this is intended to be used internally so that usage of the `pip`
+extension in `rules_python` does not affect the evaluations of the extension
+for the consumers.
+
+pip.parse:
+To use, call `pip.parse()` and specify `hub_name` and your requirements file.
+Dependencies will be downloaded and made available in a repo named after the
+`hub_name` argument.
+
+Each `pip.parse()` call configures a particular Python version. Multiple calls
+can be made to configure different Python versions, and will be grouped by
+the `hub_name` argument. This allows the same logical name, e.g. `@pypi//numpy`
+to automatically resolve to different, Python version-specific, libraries.
+
+pip.whl_mods:
+This tag class is used to help create JSON files to describe modifications to
+the BUILD files for wheels.
+""",
+    implementation = _pip_non_reproducible,
+    tag_classes = {
+        "override": _override_tag,
+        "parse": tag_class(
+            attrs = _pip_parse_ext_attrs(
+                experimental_index_url = "https://pypi.org/simple",
+            ),
+            doc = """\
+This tag class is used to create a pypi hub and all of the spokes that are part of that hub.
+This tag class reuses most of the pypi attributes that are found in
+@rules_python//python/pip_install:pip_repository.bzl.
+The exception is it does not use the arg 'repo_prefix'.  We set the repository
+prefix for the user and the alias arg is always True in bzlmod.
+""",
+        ),
+        "whl_mods": tag_class(
+            attrs = _whl_mod_attrs(),
+            doc = """\
+This tag class is used to create JSON file that are used when calling wheel_builder.py.  These
+JSON files contain instructions on how to modify a wheel's project.  Each of the attributes
+create different modifications based on the type of attribute. Previously to bzlmod these
+JSON files where referred to as annotations, and were renamed to whl_modifications in this
+extension.
+""",
+        ),
+    },
+)
+
+def _whl_mods_repo_impl(rctx):
+    rctx.file("BUILD.bazel", "")
+    for whl_name, mods in rctx.attr.whl_mods.items():
+        rctx.file("{}.json".format(whl_name), mods)
+
+_whl_mods_repo = repository_rule(
+    doc = """\
+This rule creates json files based on the whl_mods attribute.
+""",
+    implementation = _whl_mods_repo_impl,
+    attrs = {
+        "whl_mods": attr.string_dict(
+            mandatory = True,
+            doc = "JSON endcoded string that is provided to wheel_builder.py",
+        ),
+    },
+)
diff --git a/python/private/pypi/config_settings.bzl b/python/private/pypi/config_settings.bzl
new file mode 100644
index 0000000..9741217
--- /dev/null
+++ b/python/private/pypi/config_settings.bzl
@@ -0,0 +1,320 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module is used to construct the config settings for selecting which distribution is used in the pip hub repository.
+
+Bazel's selects work by selecting the most-specialized configuration setting
+that matches the target platform. We can leverage this fact to ensure that the
+most specialized wheels are used by default with the users being able to
+configure string_flag values to select the less specialized ones.
+
+The list of specialization of the dists goes like follows:
+* sdist
+* py*-none-any.whl
+* py*-abi3-any.whl
+* py*-cpxy-any.whl
+* cp*-none-any.whl
+* cp*-abi3-any.whl
+* cp*-cpxy-plat.whl
+* py*-none-plat.whl
+* py*-abi3-plat.whl
+* py*-cpxy-plat.whl
+* cp*-none-plat.whl
+* cp*-abi3-plat.whl
+* cp*-cpxy-plat.whl
+
+Note, that here the specialization of musl vs manylinux wheels is the same in
+order to ensure that the matching fails if the user requests for `musl` and we don't have it or vice versa.
+"""
+
+load(":flags.bzl", "INTERNAL_FLAGS", "UniversalWhlFlag", "WhlLibcFlag")
+
+FLAGS = struct(
+    **{
+        f: str(Label("//python/config_settings:" + f))
+        for f in [
+            "python_version",
+            "pip_whl_glibc_version",
+            "pip_whl_muslc_version",
+            "pip_whl_osx_arch",
+            "pip_whl_osx_version",
+            "py_linux_libc",
+            "is_pip_whl_no",
+            "is_pip_whl_only",
+            "is_pip_whl_auto",
+        ]
+    }
+)
+
+# Here we create extra string flags that are just to work with the select
+# selecting the most specialized match. We don't allow the user to change
+# them.
+_flags = struct(
+    **{
+        f: str(Label("//python/config_settings:_internal_pip_" + f))
+        for f in INTERNAL_FLAGS
+    }
+)
+
+def config_settings(
+        *,
+        python_versions = [],
+        glibc_versions = [],
+        muslc_versions = [],
+        osx_versions = [],
+        target_platforms = [],
+        name = None,
+        visibility = None,
+        native = native):
+    """Generate all of the pip config settings.
+
+    Args:
+        name (str): Currently unused.
+        python_versions (list[str]): The list of python versions to configure
+            config settings for.
+        glibc_versions (list[str]): The list of glibc version of the wheels to
+            configure config settings for.
+        muslc_versions (list[str]): The list of musl version of the wheels to
+            configure config settings for.
+        osx_versions (list[str]): The list of OSX OS versions to configure
+            config settings for.
+        target_platforms (list[str]): The list of "{os}_{cpu}" for deriving
+            constraint values for each condition.
+        visibility (list[str], optional): The visibility to be passed to the
+            exposed labels. All other labels will be private.
+        native (struct): The struct containing alias and config_setting rules
+            to use for creating the objects. Can be overridden for unit tests
+            reasons.
+    """
+
+    glibc_versions = [""] + glibc_versions
+    muslc_versions = [""] + muslc_versions
+    osx_versions = [""] + osx_versions
+    target_platforms = [("", "")] + [
+        t.split("_", 1)
+        for t in target_platforms
+    ]
+
+    for python_version in [""] + python_versions:
+        is_python = "is_python_{}".format(python_version or "version_unset")
+        native.alias(
+            name = is_python,
+            actual = Label("//python/config_settings:" + is_python),
+            visibility = visibility,
+        )
+
+        for os, cpu in target_platforms:
+            constraint_values = []
+            suffix = ""
+            if os:
+                constraint_values.append("@platforms//os:" + os)
+                suffix += "_" + os
+            if cpu:
+                constraint_values.append("@platforms//cpu:" + cpu)
+                suffix += "_" + cpu
+
+            _dist_config_settings(
+                suffix = suffix,
+                plat_flag_values = _plat_flag_values(
+                    os = os,
+                    cpu = cpu,
+                    osx_versions = osx_versions,
+                    glibc_versions = glibc_versions,
+                    muslc_versions = muslc_versions,
+                ),
+                constraint_values = constraint_values,
+                python_version = python_version,
+                is_python = is_python,
+                visibility = visibility,
+                native = native,
+            )
+
+def _dist_config_settings(*, suffix, plat_flag_values, **kwargs):
+    flag_values = {_flags.dist: ""}
+
+    # First create an sdist, we will be building upon the flag values, which
+    # will ensure that each sdist config setting is the least specialized of
+    # all. However, we need at least one flag value to cover the case where we
+    # have `sdist` for any platform, hence we have a non-empty `flag_values`
+    # here.
+    _dist_config_setting(
+        name = "sdist{}".format(suffix),
+        flag_values = flag_values,
+        is_pip_whl = FLAGS.is_pip_whl_no,
+        **kwargs
+    )
+
+    for name, f in [
+        ("py_none", _flags.whl_py2_py3),
+        ("py3_none", _flags.whl_py3),
+        ("py3_abi3", _flags.whl_py3_abi3),
+        ("cp3x_none", _flags.whl_pycp3x),
+        ("cp3x_abi3", _flags.whl_pycp3x_abi3),
+        ("cp3x_cp", _flags.whl_pycp3x_abicp),
+    ]:
+        if f in flag_values:
+            # This should never happen as all of the different whls should have
+            # unique flag values.
+            fail("BUG: the flag {} is attempted to be added twice to the list".format(f))
+        else:
+            flag_values[f] = ""
+
+        _dist_config_setting(
+            name = "{}_any{}".format(name, suffix),
+            flag_values = flag_values,
+            is_pip_whl = FLAGS.is_pip_whl_only,
+            **kwargs
+        )
+
+    generic_flag_values = flag_values
+
+    for (suffix, flag_values) in plat_flag_values:
+        flag_values = flag_values | generic_flag_values
+
+        for name, f in [
+            ("py_none", _flags.whl_plat),
+            ("py3_none", _flags.whl_plat_py3),
+            ("py3_abi3", _flags.whl_plat_py3_abi3),
+            ("cp3x_none", _flags.whl_plat_pycp3x),
+            ("cp3x_abi3", _flags.whl_plat_pycp3x_abi3),
+            ("cp3x_cp", _flags.whl_plat_pycp3x_abicp),
+        ]:
+            if f in flag_values:
+                # This should never happen as all of the different whls should have
+                # unique flag values.
+                fail("BUG: the flag {} is attempted to be added twice to the list".format(f))
+            else:
+                flag_values[f] = ""
+
+            _dist_config_setting(
+                name = "{}_{}".format(name, suffix),
+                flag_values = flag_values,
+                is_pip_whl = FLAGS.is_pip_whl_only,
+                **kwargs
+            )
+
+def _to_version_string(version, sep = "."):
+    if not version:
+        return ""
+
+    return "{}{}{}".format(version[0], sep, version[1])
+
+def _plat_flag_values(os, cpu, osx_versions, glibc_versions, muslc_versions):
+    ret = []
+    if os == "":
+        return []
+    elif os == "windows":
+        ret.append(("{}_{}".format(os, cpu), {}))
+    elif os == "osx":
+        for cpu_, arch in {
+            cpu: UniversalWhlFlag.ARCH,
+            cpu + "_universal2": UniversalWhlFlag.UNIVERSAL,
+        }.items():
+            for osx_version in osx_versions:
+                flags = {
+                    FLAGS.pip_whl_osx_version: _to_version_string(osx_version),
+                }
+                if arch == UniversalWhlFlag.ARCH:
+                    flags[FLAGS.pip_whl_osx_arch] = arch
+
+                if not osx_version:
+                    suffix = "{}_{}".format(os, cpu_)
+                else:
+                    suffix = "{}_{}_{}".format(os, _to_version_string(osx_version, "_"), cpu_)
+
+                ret.append((suffix, flags))
+
+    elif os == "linux":
+        for os_prefix, linux_libc in {
+            os: WhlLibcFlag.GLIBC,
+            "many" + os: WhlLibcFlag.GLIBC,
+            "musl" + os: WhlLibcFlag.MUSL,
+        }.items():
+            if linux_libc == WhlLibcFlag.GLIBC:
+                libc_versions = glibc_versions
+                libc_flag = FLAGS.pip_whl_glibc_version
+            elif linux_libc == WhlLibcFlag.MUSL:
+                libc_versions = muslc_versions
+                libc_flag = FLAGS.pip_whl_muslc_version
+            else:
+                fail("Unsupported libc type: {}".format(linux_libc))
+
+            for libc_version in libc_versions:
+                if libc_version and os_prefix == os:
+                    continue
+                elif libc_version:
+                    suffix = "{}_{}_{}".format(os_prefix, _to_version_string(libc_version, "_"), cpu)
+                else:
+                    suffix = "{}_{}".format(os_prefix, cpu)
+
+                ret.append((
+                    suffix,
+                    {
+                        FLAGS.py_linux_libc: linux_libc,
+                        libc_flag: _to_version_string(libc_version),
+                    },
+                ))
+    else:
+        fail("Unsupported os: {}".format(os))
+
+    return ret
+
+def _dist_config_setting(*, name, is_pip_whl, is_python, python_version, native = native, **kwargs):
+    """A macro to create a target that matches is_pip_whl_auto and one more value.
+
+    Args:
+        name: The name of the public target.
+        is_pip_whl: The config setting to match in addition to
+            `is_pip_whl_auto` when evaluating the config setting.
+        is_python: The python version config_setting to match.
+        python_version: The python version name.
+        native (struct): The struct containing alias and config_setting rules
+            to use for creating the objects. Can be overridden for unit tests
+            reasons.
+        **kwargs: The kwargs passed to the config_setting rule. Visibility of
+            the main alias target is also taken from the kwargs.
+    """
+    _name = "_is_" + name
+
+    visibility = kwargs.get("visibility")
+    native.alias(
+        name = "is_cp{}_{}".format(python_version, name) if python_version else "is_{}".format(name),
+        actual = select({
+            # First match by the python version
+            is_python: _name,
+            "//conditions:default": is_python,
+        }),
+        visibility = visibility,
+    )
+
+    if python_version:
+        # Reuse the config_setting targets that we use with the default
+        # `python_version` setting.
+        return
+
+    config_setting_name = _name + "_setting"
+    native.config_setting(name = config_setting_name, **kwargs)
+
+    # Next match by the `pip_whl` flag value and then match by the flags that
+    # are intrinsic to the distribution.
+    native.alias(
+        name = _name,
+        actual = select({
+            "//conditions:default": FLAGS.is_pip_whl_auto,
+            FLAGS.is_pip_whl_auto: config_setting_name,
+            is_pip_whl: config_setting_name,
+        }),
+        visibility = visibility,
+    )
diff --git a/python/private/pypi/flags.bzl b/python/private/pypi/flags.bzl
new file mode 100644
index 0000000..d834be8
--- /dev/null
+++ b/python/private/pypi/flags.bzl
@@ -0,0 +1,70 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Values and helpers for pip_repository related flags.
+
+NOTE: The transitive loads of this should be kept minimal. This avoids loading
+unnecessary files when all that are needed are flag definitions.
+"""
+
+load("//python/private:enum.bzl", "enum")
+
+# Determines if we should use whls for third party
+#
+# buildifier: disable=name-conventions
+UseWhlFlag = enum(
+    # Automatically decide the effective value based on environment, target
+    # platform and the presence of distributions for a particular package.
+    AUTO = "auto",
+    # Do not use `sdist` and fail if there are no available whls suitable for the target platform.
+    ONLY = "only",
+    # Do not use whl distributions and instead build the whls from `sdist`.
+    NO = "no",
+)
+
+# Determines whether universal wheels should be preferred over arch platform specific ones.
+#
+# buildifier: disable=name-conventions
+UniversalWhlFlag = enum(
+    # Prefer platform-specific wheels over universal wheels.
+    ARCH = "arch",
+    # Prefer universal wheels over platform-specific wheels.
+    UNIVERSAL = "universal",
+)
+
+# Determines which libc flavor is preferred when selecting the linux whl distributions.
+#
+# buildifier: disable=name-conventions
+WhlLibcFlag = enum(
+    # Prefer glibc wheels (e.g. manylinux_2_17_x86_64 or linux_x86_64)
+    GLIBC = "glibc",
+    # Prefer musl wheels (e.g. musllinux_2_17_x86_64)
+    MUSL = "musl",
+)
+
+INTERNAL_FLAGS = [
+    "dist",
+    "whl_plat",
+    "whl_plat_py3",
+    "whl_plat_py3_abi3",
+    "whl_plat_pycp3x",
+    "whl_plat_pycp3x_abi3",
+    "whl_plat_pycp3x_abicp",
+    "whl_py2_py3",
+    "whl_py3",
+    "whl_py3_abi3",
+    "whl_pycp3x",
+    "whl_pycp3x_abi3",
+    "whl_pycp3x_abicp",
+]
diff --git a/python/private/pypi/generate_group_library_build_bazel.bzl b/python/private/pypi/generate_group_library_build_bazel.bzl
new file mode 100644
index 0000000..54da066
--- /dev/null
+++ b/python/private/pypi/generate_group_library_build_bazel.bzl
@@ -0,0 +1,119 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate the BUILD.bazel contents for a repo defined by a group_library."""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:text_util.bzl", "render")
+load(
+    ":labels.bzl",
+    "PY_LIBRARY_IMPL_LABEL",
+    "PY_LIBRARY_PUBLIC_LABEL",
+    "WHEEL_FILE_IMPL_LABEL",
+    "WHEEL_FILE_PUBLIC_LABEL",
+)
+
+_PRELUDE = """\
+load("@rules_python//python:defs.bzl", "py_library")
+"""
+
+_GROUP_TEMPLATE = """\
+## Group {name}
+
+filegroup(
+    name = "{name}_{whl_public_label}",
+    srcs = [],
+    data = {whl_deps},
+    visibility = {visibility},
+)
+
+py_library(
+    name = "{name}_{lib_public_label}",
+    srcs = [],
+    deps = {lib_deps},
+    visibility = {visibility},
+)
+"""
+
+def _generate_group_libraries(repo_prefix, group_name, group_members):
+    """Generate the component libraries implementing a group.
+
+    A group consists of two underlying composite libraries, one `filegroup`
+    which wraps all the whls of the members and one `py_library` which wraps the
+    pkgs of the members.
+
+    Implementation detail of `generate_group_library_build_bazel` which uses
+    this to construct a BUILD.bazel.
+
+    Args:
+        repo_prefix: str; the pip_parse repo prefix.
+        group_name: str; the name which the user provided for the dep group.
+        group_members: list[str]; the names of the _packages_ (not repositories)
+          which make up the group.
+    """
+
+    group_members = sorted(group_members)
+
+    if repo_prefix:
+        lib_dependencies = [
+            "@%s%s//:%s" % (repo_prefix, normalize_name(d), PY_LIBRARY_IMPL_LABEL)
+            for d in group_members
+        ]
+        whl_file_deps = [
+            "@%s%s//:%s" % (repo_prefix, normalize_name(d), WHEEL_FILE_IMPL_LABEL)
+            for d in group_members
+        ]
+        visibility = [
+            "@%s%s//:__pkg__" % (repo_prefix, normalize_name(d))
+            for d in group_members
+        ]
+    else:
+        lib_dependencies = [
+            "//%s:%s" % (normalize_name(d), PY_LIBRARY_IMPL_LABEL)
+            for d in group_members
+        ]
+        whl_file_deps = [
+            "//%s:%s" % (normalize_name(d), WHEEL_FILE_IMPL_LABEL)
+            for d in group_members
+        ]
+        visibility = ["//:__subpackages__"]
+
+    return _GROUP_TEMPLATE.format(
+        name = normalize_name(group_name),
+        whl_public_label = WHEEL_FILE_PUBLIC_LABEL,
+        whl_deps = render.indent(render.list(whl_file_deps)).lstrip(),
+        lib_public_label = PY_LIBRARY_PUBLIC_LABEL,
+        lib_deps = render.indent(render.list(lib_dependencies)).lstrip(),
+        visibility = render.indent(render.list(visibility)).lstrip(),
+    )
+
+def generate_group_library_build_bazel(
+        repo_prefix,
+        groups):
+    """Generate a BUILD file for a repository of group implementations
+
+    Args:
+        repo_prefix: the repo prefix that should be used for dependency lists.
+        groups: a mapping of group names to lists of names of component packages.
+
+    Returns:
+        A complete BUILD file as a string
+    """
+
+    content = [_PRELUDE]
+
+    for group_name, group_members in groups.items():
+        content.append(_generate_group_libraries(repo_prefix, group_name, group_members))
+
+    return "\n\n".join(content)
diff --git a/python/private/pypi/generate_whl_library_build_bazel.bzl b/python/private/pypi/generate_whl_library_build_bazel.bzl
new file mode 100644
index 0000000..d25f73a
--- /dev/null
+++ b/python/private/pypi/generate_whl_library_build_bazel.bzl
@@ -0,0 +1,420 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate the BUILD.bazel contents for a repo defined by a whl_library."""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:text_util.bzl", "render")
+load(
+    ":labels.bzl",
+    "DATA_LABEL",
+    "DIST_INFO_LABEL",
+    "PY_LIBRARY_IMPL_LABEL",
+    "PY_LIBRARY_PUBLIC_LABEL",
+    "WHEEL_ENTRY_POINT_PREFIX",
+    "WHEEL_FILE_IMPL_LABEL",
+    "WHEEL_FILE_PUBLIC_LABEL",
+)
+
+_COPY_FILE_TEMPLATE = """\
+copy_file(
+    name = "{dest}.copy",
+    src = "{src}",
+    out = "{dest}",
+    is_executable = {is_executable},
+)
+"""
+
+_ENTRY_POINT_RULE_TEMPLATE = """\
+py_binary(
+    name = "{name}",
+    srcs = ["{src}"],
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["."],
+    deps = ["{pkg}"],
+)
+"""
+
+_BUILD_TEMPLATE = """\
+{loads}
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "{dist_info_label}",
+    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "{data_label}",
+    srcs = glob(["data/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "{whl_file_label}",
+    srcs = ["{whl_name}"],
+    data = {whl_file_deps},
+    visibility = {impl_vis},
+)
+
+py_library(
+    name = "{py_library_label}",
+    srcs = glob(
+        ["site-packages/**/*.py"],
+        exclude={srcs_exclude},
+        # Empty sources are allowed to support wheels that don't have any
+        # pure-Python code, e.g. pymssql, which is written in Cython.
+        allow_empty = True,
+    ),
+    data = {data} + glob(
+        ["site-packages/**/*"],
+        exclude={data_exclude},
+    ),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["site-packages"],
+    deps = {dependencies},
+    tags = {tags},
+    visibility = {impl_vis},
+)
+"""
+
+def _plat_label(plat):
+    if plat.endswith("default"):
+        return plat
+    if plat.startswith("@//"):
+        return "@@" + str(Label("//:BUILD.bazel")).partition("//")[0].strip("@") + plat.strip("@")
+    elif plat.startswith("@"):
+        return str(Label(plat))
+    else:
+        return ":is_" + plat.replace("cp3", "python_3.")
+
+def _render_list_and_select(deps, deps_by_platform, tmpl):
+    deps = render.list([tmpl.format(d) for d in sorted(deps)])
+
+    if not deps_by_platform:
+        return deps
+
+    deps_by_platform = {
+        _plat_label(p): [
+            tmpl.format(d)
+            for d in sorted(deps)
+        ]
+        for p, deps in sorted(deps_by_platform.items())
+    }
+
+    # Add the default, which means that we will be just using the dependencies in
+    # `deps` for platforms that are not handled in a special way by the packages
+    deps_by_platform.setdefault("//conditions:default", [])
+    deps_by_platform = render.select(deps_by_platform, value_repr = render.list)
+
+    if deps == "[]":
+        return deps_by_platform
+    else:
+        return "{} + {}".format(deps, deps_by_platform)
+
+def _render_config_settings(dependencies_by_platform):
+    loads = []
+    additional_content = []
+    for p in dependencies_by_platform:
+        # p can be one of the following formats:
+        # * //conditions:default
+        # * @platforms//os:{value}
+        # * @platforms//cpu:{value}
+        # * @//python/config_settings:is_python_3.{minor_version}
+        # * {os}_{cpu}
+        # * cp3{minor_version}_{os}_{cpu}
+        if p.startswith("@") or p.endswith("default"):
+            continue
+
+        abi, _, tail = p.partition("_")
+        if not abi.startswith("cp"):
+            tail = p
+            abi = ""
+
+        os, _, arch = tail.partition("_")
+        os = "" if os == "anyos" else os
+        arch = "" if arch == "anyarch" else arch
+
+        constraint_values = []
+        if arch:
+            constraint_values.append("@platforms//cpu:{}".format(arch))
+        if os:
+            constraint_values.append("@platforms//os:{}".format(os))
+
+        constraint_values_str = render.indent(render.list(constraint_values)).lstrip()
+
+        if abi:
+            if not loads:
+                loads.append("""load("@rules_python//python/config_settings:config_settings.bzl", "is_python_config_setting")""")
+
+            additional_content.append(
+                """\
+is_python_config_setting(
+    name = "is_{name}",
+    python_version = "3.{minor_version}",
+    constraint_values = {constraint_values},
+    visibility = ["//visibility:private"],
+)""".format(
+                    name = p.replace("cp3", "python_3."),
+                    minor_version = abi[len("cp3"):],
+                    constraint_values = constraint_values_str,
+                ),
+            )
+        else:
+            additional_content.append(
+                """\
+config_setting(
+    name = "is_{name}",
+    constraint_values = {constraint_values},
+    visibility = ["//visibility:private"],
+)""".format(
+                    name = p.replace("cp3", "python_3."),
+                    constraint_values = constraint_values_str,
+                ),
+            )
+
+    return loads, "\n\n".join(additional_content)
+
+def generate_whl_library_build_bazel(
+        *,
+        dep_template,
+        whl_name,
+        dependencies,
+        dependencies_by_platform,
+        data_exclude,
+        tags,
+        entry_points,
+        annotation = None,
+        group_name = None,
+        group_deps = []):
+    """Generate a BUILD file for an unzipped Wheel
+
+    Args:
+        dep_template: the dependency template that should be used for dependency lists.
+        whl_name: the whl_name that this is generated for.
+        dependencies: a list of PyPI packages that are dependencies to the py_library.
+        dependencies_by_platform: a dict[str, list] of PyPI packages that may vary by platform.
+        data_exclude: more patterns to exclude from the data attribute of generated py_library rules.
+        tags: list of tags to apply to generated py_library rules.
+        entry_points: A dict of entry points to add py_binary rules for.
+        annotation: The annotation for the build file.
+        group_name: Optional[str]; name of the dependency group (if any) which contains this library.
+          If set, this library will behave as a shim to group implementation rules which will provide
+          simultaneously installed dependencies which would otherwise form a cycle.
+        group_deps: List[str]; names of fellow members of the group (if any). These will be excluded
+          from generated deps lists so as to avoid direct cycles. These dependencies will be provided
+          at runtime by the group rules which wrap this library and its fellows together.
+
+    Returns:
+        A complete BUILD file as a string
+    """
+
+    additional_content = []
+    data = []
+    srcs_exclude = []
+    data_exclude = [] + data_exclude
+    dependencies = sorted([normalize_name(d) for d in dependencies])
+    dependencies_by_platform = {
+        platform: sorted([normalize_name(d) for d in deps])
+        for platform, deps in dependencies_by_platform.items()
+    }
+    tags = sorted(tags)
+
+    for entry_point, entry_point_script_name in entry_points.items():
+        additional_content.append(
+            _generate_entry_point_rule(
+                name = "{}_{}".format(WHEEL_ENTRY_POINT_PREFIX, entry_point),
+                script = entry_point_script_name,
+                pkg = ":" + PY_LIBRARY_PUBLIC_LABEL,
+            ),
+        )
+
+    if annotation:
+        for src, dest in annotation.copy_files.items():
+            data.append(dest)
+            additional_content.append(_generate_copy_commands(src, dest))
+        for src, dest in annotation.copy_executables.items():
+            data.append(dest)
+            additional_content.append(
+                _generate_copy_commands(src, dest, is_executable = True),
+            )
+        data.extend(annotation.data)
+        data_exclude.extend(annotation.data_exclude_glob)
+        srcs_exclude.extend(annotation.srcs_exclude_glob)
+        if annotation.additive_build_content:
+            additional_content.append(annotation.additive_build_content)
+
+    _data_exclude = [
+        "**/* *",
+        "**/*.py",
+        "**/*.pyc",
+        "**/*.pyc.*",  # During pyc creation, temp files named *.pyc.NNNN are created
+        # RECORD is known to contain sha256 checksums of files which might include the checksums
+        # of generated files produced when wheels are installed. The file is ignored to avoid
+        # Bazel caching issues.
+        "**/*.dist-info/RECORD",
+    ]
+    for item in data_exclude:
+        if item not in _data_exclude:
+            _data_exclude.append(item)
+
+    # Ensure this list is normalized
+    # Note: mapping used as set
+    group_deps = {
+        normalize_name(d): True
+        for d in group_deps
+    }
+
+    dependencies = [
+        d
+        for d in dependencies
+        if d not in group_deps
+    ]
+    dependencies_by_platform = {
+        p: deps
+        for p, deps in dependencies_by_platform.items()
+        for deps in [[d for d in deps if d not in group_deps]]
+        if deps
+    }
+
+    loads = [
+        """load("@rules_python//python:defs.bzl", "py_library", "py_binary")""",
+        """load("@bazel_skylib//rules:copy_file.bzl", "copy_file")""",
+    ]
+
+    loads_, config_settings_content = _render_config_settings(dependencies_by_platform)
+    if config_settings_content:
+        for line in loads_:
+            if line not in loads:
+                loads.append(line)
+        additional_content.append(config_settings_content)
+
+    lib_dependencies = _render_list_and_select(
+        deps = dependencies,
+        deps_by_platform = dependencies_by_platform,
+        tmpl = dep_template.format(name = "{}", target = PY_LIBRARY_PUBLIC_LABEL),
+    )
+
+    whl_file_deps = _render_list_and_select(
+        deps = dependencies,
+        deps_by_platform = dependencies_by_platform,
+        tmpl = dep_template.format(name = "{}", target = WHEEL_FILE_PUBLIC_LABEL),
+    )
+
+    # If this library is a member of a group, its public label aliases need to
+    # point to the group implementation rule not the implementation rules. We
+    # also need to mark the implementation rules as visible to the group
+    # implementation.
+    if group_name and "//:" in dep_template:
+        # This is the legacy behaviour where the group library is outside the hub repo
+        label_tmpl = dep_template.format(
+            name = "_groups",
+            target = normalize_name(group_name) + "_{}",
+        )
+        impl_vis = [dep_template.format(
+            name = "_groups",
+            target = "__pkg__",
+        )]
+        additional_content.extend([
+            "",
+            render.alias(
+                name = PY_LIBRARY_PUBLIC_LABEL,
+                actual = repr(label_tmpl.format(PY_LIBRARY_PUBLIC_LABEL)),
+            ),
+            "",
+            render.alias(
+                name = WHEEL_FILE_PUBLIC_LABEL,
+                actual = repr(label_tmpl.format(WHEEL_FILE_PUBLIC_LABEL)),
+            ),
+        ])
+        py_library_label = PY_LIBRARY_IMPL_LABEL
+        whl_file_label = WHEEL_FILE_IMPL_LABEL
+
+    elif group_name:
+        py_library_label = PY_LIBRARY_PUBLIC_LABEL
+        whl_file_label = WHEEL_FILE_PUBLIC_LABEL
+        impl_vis = [dep_template.format(name = "", target = "__subpackages__")]
+
+    else:
+        py_library_label = PY_LIBRARY_PUBLIC_LABEL
+        whl_file_label = WHEEL_FILE_PUBLIC_LABEL
+        impl_vis = ["//visibility:public"]
+
+    contents = "\n".join(
+        [
+            _BUILD_TEMPLATE.format(
+                loads = "\n".join(sorted(loads)),
+                py_library_label = py_library_label,
+                dependencies = render.indent(lib_dependencies, " " * 4).lstrip(),
+                whl_file_deps = render.indent(whl_file_deps, " " * 4).lstrip(),
+                data_exclude = repr(_data_exclude),
+                whl_name = whl_name,
+                whl_file_label = whl_file_label,
+                tags = repr(tags),
+                data_label = DATA_LABEL,
+                dist_info_label = DIST_INFO_LABEL,
+                entry_point_prefix = WHEEL_ENTRY_POINT_PREFIX,
+                srcs_exclude = repr(srcs_exclude),
+                data = repr(data),
+                impl_vis = repr(impl_vis),
+            ),
+        ] + additional_content,
+    )
+
+    # NOTE: Ensure that we terminate with a new line
+    return contents.rstrip() + "\n"
+
+def _generate_copy_commands(src, dest, is_executable = False):
+    """Generate a [@bazel_skylib//rules:copy_file.bzl%copy_file][cf] target
+
+    [cf]: https://github.com/bazelbuild/bazel-skylib/blob/1.1.1/docs/copy_file_doc.md
+
+    Args:
+        src (str): The label for the `src` attribute of [copy_file][cf]
+        dest (str): The label for the `out` attribute of [copy_file][cf]
+        is_executable (bool, optional): Whether or not the file being copied is executable.
+            sets `is_executable` for [copy_file][cf]
+
+    Returns:
+        str: A `copy_file` instantiation.
+    """
+    return _COPY_FILE_TEMPLATE.format(
+        src = src,
+        dest = dest,
+        is_executable = is_executable,
+    )
+
+def _generate_entry_point_rule(*, name, script, pkg):
+    """Generate a Bazel `py_binary` rule for an entry point script.
+
+    Note that the script is used to determine the name of the target. The name of
+    entry point targets should be uniuqe to avoid conflicts with existing sources or
+    directories within a wheel.
+
+    Args:
+        name (str): The name of the generated py_binary.
+        script (str): The path to the entry point's python file.
+        pkg (str): The package owning the entry point. This is expected to
+            match up with the `py_library` defined for each repository.
+
+    Returns:
+        str: A `py_binary` instantiation.
+    """
+    return _ENTRY_POINT_RULE_TEMPLATE.format(
+        name = name,
+        src = script.replace("\\", "/"),
+        pkg = pkg,
+    )
diff --git a/python/private/pypi/group_library.bzl b/python/private/pypi/group_library.bzl
new file mode 100644
index 0000000..ff800e2
--- /dev/null
+++ b/python/private/pypi/group_library.bzl
@@ -0,0 +1,40 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""group_library implementation for WORKSPACE setups."""
+
+load(":generate_group_library_build_bazel.bzl", "generate_group_library_build_bazel")
+
+def _group_library_impl(rctx):
+    build_file_contents = generate_group_library_build_bazel(
+        repo_prefix = rctx.attr.repo_prefix,
+        groups = rctx.attr.groups,
+    )
+    rctx.file("BUILD.bazel", build_file_contents)
+
+group_library = repository_rule(
+    attrs = {
+        "groups": attr.string_list_dict(
+            doc = "A mapping of group names to requirements within that group.",
+        ),
+        "repo_prefix": attr.string(
+            doc = "Prefix used for the whl_library created components of each group",
+        ),
+    },
+    implementation = _group_library_impl,
+    doc = """
+Create a package containing only wrapper py_library and whl_library rules for implementing dependency groups.
+This is an implementation detail of dependency groups and should not be used alone.
+    """,
+)
diff --git a/python/private/pypi/hub_repository.bzl b/python/private/pypi/hub_repository.bzl
new file mode 100644
index 0000000..5e209d6
--- /dev/null
+++ b/python/private/pypi/hub_repository.bzl
@@ -0,0 +1,98 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("//python/private:text_util.bzl", "render")
+load(
+    ":render_pkg_aliases.bzl",
+    "render_multiplatform_pkg_aliases",
+    "whl_alias",
+)
+
+_BUILD_FILE_CONTENTS = """\
+package(default_visibility = ["//visibility:public"])
+
+# Ensure the `requirements.bzl` source can be accessed by stardoc, since users load() from it
+exports_files(["requirements.bzl"])
+"""
+
+def _impl(rctx):
+    bzl_packages = rctx.attr.whl_map.keys()
+    aliases = render_multiplatform_pkg_aliases(
+        aliases = {
+            key: [whl_alias(**v) for v in json.decode(values)]
+            for key, values in rctx.attr.whl_map.items()
+        },
+        default_version = rctx.attr.default_version,
+        default_config_setting = "//_config:is_python_" + rctx.attr.default_version,
+        requirement_cycles = rctx.attr.groups,
+    )
+    for path, contents in aliases.items():
+        rctx.file(path, contents)
+
+    # NOTE: we are using the canonical name with the double '@' in order to
+    # always uniquely identify a repository, as the labels are being passed as
+    # a string and the resolution of the label happens at the call-site of the
+    # `requirement`, et al. macros.
+    macro_tmpl = "@@{name}//{{}}:{{}}".format(name = rctx.attr.name)
+
+    rctx.file("BUILD.bazel", _BUILD_FILE_CONTENTS)
+    rctx.template("requirements.bzl", rctx.attr._template, substitutions = {
+        "%%ALL_DATA_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "data")
+            for p in bzl_packages
+        ]),
+        "%%ALL_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "pkg")
+            for p in bzl_packages
+        ]),
+        "%%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%": render.dict({
+            p: macro_tmpl.format(p, "whl")
+            for p in bzl_packages
+        }),
+        "%%MACRO_TMPL%%": macro_tmpl,
+        "%%NAME%%": rctx.attr.repo_name,
+    })
+
+hub_repository = repository_rule(
+    attrs = {
+        "default_version": attr.string(
+            mandatory = True,
+            doc = """\
+This is the default python version in the format of X.Y. This should match
+what is setup by the 'python' extension using the 'is_default = True'
+setting.""",
+        ),
+        "groups": attr.string_list_dict(
+            mandatory = False,
+        ),
+        "repo_name": attr.string(
+            mandatory = True,
+            doc = "The apparent name of the repo. This is needed because in bzlmod, the name attribute becomes the canonical name.",
+        ),
+        "whl_map": attr.string_dict(
+            mandatory = True,
+            doc = """\
+The wheel map where values are json.encoded strings of the whl_map constructed
+in the pip.parse tag class.
+""",
+        ),
+        "_template": attr.label(
+            default = ":requirements.bzl.tmpl.bzlmod",
+        ),
+    },
+    doc = """A rule for bzlmod mulitple pip repository creation. PRIVATE USE ONLY.""",
+    implementation = _impl,
+)
diff --git a/python/private/pypi/index_sources.bzl b/python/private/pypi/index_sources.bzl
new file mode 100644
index 0000000..2166014
--- /dev/null
+++ b/python/private/pypi/index_sources.bzl
@@ -0,0 +1,53 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A file that houses private functions used in the `bzlmod` extension with the same name.
+"""
+
+def index_sources(line):
+    """Get PyPI sources from a requirements.txt line.
+
+    We interpret the spec described in
+    https://pip.pypa.io/en/stable/reference/requirement-specifiers/#requirement-specifiers
+
+    Args:
+        line(str): The requirements.txt entry.
+
+    Returns:
+        A struct with shas attribute containing a list of shas to download from pypi_index.
+    """
+    head, _, maybe_hashes = line.partition(";")
+    _, _, version = head.partition("==")
+    version = version.partition(" ")[0].strip()
+
+    if "@" in head:
+        shas = []
+    else:
+        maybe_hashes = maybe_hashes or line
+        shas = [
+            sha.strip()
+            for sha in maybe_hashes.split("--hash=sha256:")[1:]
+        ]
+
+    if head == line:
+        head = line.partition("--hash=")[0].strip()
+    else:
+        head = head + ";" + maybe_hashes.partition("--hash=")[0].strip()
+
+    return struct(
+        requirement = line if not shas else head,
+        version = version,
+        shas = sorted(shas),
+    )
diff --git a/python/private/pypi/labels.bzl b/python/private/pypi/labels.bzl
new file mode 100644
index 0000000..73df07b
--- /dev/null
+++ b/python/private/pypi/labels.bzl
@@ -0,0 +1,24 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Constants used by parts of pip_repository for naming libraries and wheels."""
+
+WHEEL_FILE_PUBLIC_LABEL = "whl"
+WHEEL_FILE_IMPL_LABEL = "_whl"
+PY_LIBRARY_PUBLIC_LABEL = "pkg"
+PY_LIBRARY_IMPL_LABEL = "_pkg"
+DATA_LABEL = "data"
+DIST_INFO_LABEL = "dist_info"
+WHEEL_ENTRY_POINT_PREFIX = "rules_python_wheel_entry_point"
+NODEPS_LABEL = "no_deps"
diff --git a/python/private/pypi/package_annotation.bzl b/python/private/pypi/package_annotation.bzl
new file mode 100644
index 0000000..4a54703
--- /dev/null
+++ b/python/private/pypi/package_annotation.bzl
@@ -0,0 +1,49 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Package annotation API for WORKSPACE setups."""
+
+def package_annotation(
+        additive_build_content = None,
+        copy_files = {},
+        copy_executables = {},
+        data = [],
+        data_exclude_glob = [],
+        srcs_exclude_glob = []):
+    """Annotations to apply to the BUILD file content from package generated from a `pip_repository` rule.
+
+    [cf]: https://github.com/bazelbuild/bazel-skylib/blob/main/docs/copy_file_doc.md
+
+    Args:
+        additive_build_content (str, optional): Raw text to add to the generated `BUILD` file of a package.
+        copy_files (dict, optional): A mapping of `src` and `out` files for [@bazel_skylib//rules:copy_file.bzl][cf]
+        copy_executables (dict, optional): A mapping of `src` and `out` files for
+            [@bazel_skylib//rules:copy_file.bzl][cf]. Targets generated here will also be flagged as
+            executable.
+        data (list, optional): A list of labels to add as `data` dependencies to the generated `py_library` target.
+        data_exclude_glob (list, optional): A list of exclude glob patterns to add as `data` to the generated
+            `py_library` target.
+        srcs_exclude_glob (list, optional): A list of labels to add as `srcs` to the generated `py_library` target.
+
+    Returns:
+        str: A json encoded string of the provided content.
+    """
+    return json.encode(struct(
+        additive_build_content = additive_build_content,
+        copy_files = copy_files,
+        copy_executables = copy_executables,
+        data = data,
+        data_exclude_glob = data_exclude_glob,
+        srcs_exclude_glob = srcs_exclude_glob,
+    ))
diff --git a/python/private/pypi/parse_requirements.bzl b/python/private/pypi/parse_requirements.bzl
new file mode 100644
index 0000000..22a6f0a
--- /dev/null
+++ b/python/private/pypi/parse_requirements.bzl
@@ -0,0 +1,481 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Requirements parsing for whl_library creation.
+
+Use cases that the code needs to cover:
+* A single requirements_lock file that is used for the host platform.
+* Per-OS requirements_lock files that are used for the host platform.
+* A target platform specific requirements_lock that is used with extra
+  pip arguments with --platform, etc and download_only = True.
+
+In the last case only a single `requirements_lock` file is allowed, in all
+other cases we assume that there may be a desire to resolve the requirements
+file for the host platform to be backwards compatible with the legacy
+behavior.
+"""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load(":index_sources.bzl", "index_sources")
+load(":parse_requirements_txt.bzl", "parse_requirements_txt")
+load(":whl_target_platforms.bzl", "select_whls", "whl_target_platforms")
+
+# This includes the vendored _translate_cpu and _translate_os from
+# @platforms//host:extension.bzl at version 0.0.9 so that we don't
+# force the users to depend on it.
+
+def _translate_cpu(arch):
+    if arch in ["i386", "i486", "i586", "i686", "i786", "x86"]:
+        return "x86_32"
+    if arch in ["amd64", "x86_64", "x64"]:
+        return "x86_64"
+    if arch in ["ppc", "ppc64", "ppc64le"]:
+        return "ppc"
+    if arch in ["arm", "armv7l"]:
+        return "arm"
+    if arch in ["aarch64"]:
+        return "aarch64"
+    if arch in ["s390x", "s390"]:
+        return "s390x"
+    if arch in ["mips64el", "mips64"]:
+        return "mips64"
+    if arch in ["riscv64"]:
+        return "riscv64"
+    return arch
+
+def _translate_os(os):
+    if os.startswith("mac os"):
+        return "osx"
+    if os.startswith("freebsd"):
+        return "freebsd"
+    if os.startswith("openbsd"):
+        return "openbsd"
+    if os.startswith("linux"):
+        return "linux"
+    if os.startswith("windows"):
+        return "windows"
+    return os
+
+# TODO @aignas 2024-05-13: consider using the same platform tags as are used in
+# the //python:versions.bzl
+DEFAULT_PLATFORMS = [
+    "linux_aarch64",
+    "linux_arm",
+    "linux_ppc",
+    "linux_s390x",
+    "linux_x86_64",
+    "osx_aarch64",
+    "osx_x86_64",
+    "windows_x86_64",
+]
+
+def _default_platforms(*, filter):
+    if not filter:
+        fail("Must specific a filter string, got: {}".format(filter))
+
+    if filter.startswith("cp3"):
+        # TODO @aignas 2024-05-23: properly handle python versions in the filter.
+        # For now we are just dropping it to ensure that we don't fail.
+        _, _, filter = filter.partition("_")
+
+    sanitized = filter.replace("*", "").replace("_", "")
+    if sanitized and not sanitized.isalnum():
+        fail("The platform filter can only contain '*', '_' and alphanumerics")
+
+    if "*" in filter:
+        prefix = filter.rstrip("*")
+        if "*" in prefix:
+            fail("The filter can only contain '*' at the end of it")
+
+        if not prefix:
+            return DEFAULT_PLATFORMS
+
+        return [p for p in DEFAULT_PLATFORMS if p.startswith(prefix)]
+    else:
+        return [p for p in DEFAULT_PLATFORMS if filter in p]
+
+def _platforms_from_args(extra_pip_args):
+    platform_values = []
+
+    for arg in extra_pip_args:
+        if platform_values and platform_values[-1] == "":
+            platform_values[-1] = arg
+            continue
+
+        if arg == "--platform":
+            platform_values.append("")
+            continue
+
+        if not arg.startswith("--platform"):
+            continue
+
+        _, _, plat = arg.partition("=")
+        if not plat:
+            _, _, plat = arg.partition(" ")
+        if plat:
+            platform_values.append(plat)
+        else:
+            platform_values.append("")
+
+    if not platform_values:
+        return []
+
+    platforms = {
+        p.target_platform: None
+        for arg in platform_values
+        for p in whl_target_platforms(arg)
+    }
+    return list(platforms.keys())
+
+def parse_requirements(
+        ctx,
+        *,
+        requirements_by_platform = {},
+        requirements_osx = None,
+        requirements_linux = None,
+        requirements_lock = None,
+        requirements_windows = None,
+        extra_pip_args = [],
+        get_index_urls = None,
+        python_version = None,
+        logger = None,
+        fail_fn = fail):
+    """Get the requirements with platforms that the requirements apply to.
+
+    Args:
+        ctx: A context that has .read function that would read contents from a label.
+        requirements_by_platform (label_keyed_string_dict): a way to have
+            different package versions (or different packages) for different
+            os, arch combinations.
+        requirements_osx (label): The requirements file for the osx OS.
+        requirements_linux (label): The requirements file for the linux OS.
+        requirements_lock (label): The requirements file for all OSes, or used as a fallback.
+        requirements_windows (label): The requirements file for windows OS.
+        extra_pip_args (string list): Extra pip arguments to perform extra validations and to
+            be joined with args fined in files.
+        get_index_urls: Callable[[ctx, list[str]], dict], a callable to get all
+            of the distribution URLs from a PyPI index. Accepts ctx and
+            distribution names to query.
+        python_version: str or None. This is needed when the get_index_urls is
+            specified. It should be of the form "3.x.x",
+        logger: repo_utils.logger or None, a simple struct to log diagnostic messages.
+        fail_fn (Callable[[str], None]): A failure function used in testing failure cases.
+
+    Returns:
+        A tuple where the first element a dict of dicts where the first key is
+        the normalized distribution name (with underscores) and the second key
+        is the requirement_line, then value and the keys are structs with the
+        following attributes:
+         * distribution: The non-normalized distribution name.
+         * srcs: The Simple API downloadable source list.
+         * requirement_line: The original requirement line.
+         * target_platforms: The list of target platforms that this package is for.
+
+        The second element is extra_pip_args should be passed to `whl_library`.
+    """
+    if not (
+        requirements_lock or
+        requirements_linux or
+        requirements_osx or
+        requirements_windows or
+        requirements_by_platform
+    ):
+        fail_fn(
+            "A 'requirements_lock' attribute must be specified, a platform-specific lockfiles " +
+            "via 'requirements_by_platform' or an os-specific lockfiles must be specified " +
+            "via 'requirements_*' attributes",
+        )
+        return None
+
+    platforms = _platforms_from_args(extra_pip_args)
+
+    if platforms:
+        lock_files = [
+            f
+            for f in [
+                requirements_lock,
+                requirements_linux,
+                requirements_osx,
+                requirements_windows,
+            ] + list(requirements_by_platform.keys())
+            if f
+        ]
+
+        if len(lock_files) > 1:
+            # If the --platform argument is used, check that we are using
+            # a single `requirements_lock` file instead of the OS specific ones as that is
+            # the only correct way to use the API.
+            fail_fn("only a single 'requirements_lock' file can be used when using '--platform' pip argument, consider specifying it via 'requirements_lock' attribute")
+            return None
+
+        files_by_platform = [
+            (lock_files[0], platforms),
+        ]
+    else:
+        files_by_platform = {
+            file: [
+                platform
+                for filter_or_platform in specifier.split(",")
+                for platform in (_default_platforms(filter = filter_or_platform) if filter_or_platform.endswith("*") else [filter_or_platform])
+            ]
+            for file, specifier in requirements_by_platform.items()
+        }.items()
+
+        for f in [
+            # If the users need a greater span of the platforms, they should consider
+            # using the 'requirements_by_platform' attribute.
+            (requirements_linux, _default_platforms(filter = "linux_*")),
+            (requirements_osx, _default_platforms(filter = "osx_*")),
+            (requirements_windows, _default_platforms(filter = "windows_*")),
+            (requirements_lock, None),
+        ]:
+            if f[0]:
+                files_by_platform.append(f)
+
+    configured_platforms = {}
+
+    options = {}
+    requirements = {}
+    for file, plats in files_by_platform:
+        if plats:
+            for p in plats:
+                if p in configured_platforms:
+                    fail_fn(
+                        "Expected the platform '{}' to be map only to a single requirements file, but got multiple: '{}', '{}'".format(
+                            p,
+                            configured_platforms[p],
+                            file,
+                        ),
+                    )
+                    return None
+                configured_platforms[p] = file
+        else:
+            plats = [
+                p
+                for p in DEFAULT_PLATFORMS
+                if p not in configured_platforms
+            ]
+
+        contents = ctx.read(file)
+
+        # Parse the requirements file directly in starlark to get the information
+        # needed for the whl_library declarations later.
+        parse_result = parse_requirements_txt(contents)
+
+        # Replicate a surprising behavior that WORKSPACE builds allowed:
+        # Defining a repo with the same name multiple times, but only the last
+        # definition is respected.
+        # The requirement lines might have duplicate names because lines for extras
+        # are returned as just the base package name. e.g., `foo[bar]` results
+        # in an entry like `("foo", "foo[bar] == 1.0 ...")`.
+        requirements_dict = {
+            normalize_name(entry[0]): entry
+            for entry in sorted(
+                parse_result.requirements,
+                # Get the longest match and fallback to original WORKSPACE sorting,
+                # which should get us the entry with most extras.
+                #
+                # FIXME @aignas 2024-05-13: The correct behaviour might be to get an
+                # entry with all aggregated extras, but it is unclear if we
+                # should do this now.
+                key = lambda x: (len(x[1].partition("==")[0]), x),
+            )
+        }.values()
+
+        tokenized_options = []
+        for opt in parse_result.options:
+            for p in opt.split(" "):
+                tokenized_options.append(p)
+
+        pip_args = tokenized_options + extra_pip_args
+        for p in plats:
+            requirements[p] = requirements_dict
+            options[p] = pip_args
+
+    requirements_by_platform = {}
+    for target_platform, reqs_ in requirements.items():
+        extra_pip_args = options[target_platform]
+
+        for distribution, requirement_line in reqs_:
+            for_whl = requirements_by_platform.setdefault(
+                normalize_name(distribution),
+                {},
+            )
+
+            for_req = for_whl.setdefault(
+                (requirement_line, ",".join(extra_pip_args)),
+                struct(
+                    distribution = distribution,
+                    srcs = index_sources(requirement_line),
+                    requirement_line = requirement_line,
+                    target_platforms = [],
+                    extra_pip_args = extra_pip_args,
+                    download = len(platforms) > 0,
+                ),
+            )
+            for_req.target_platforms.append(target_platform)
+
+    index_urls = {}
+    if get_index_urls:
+        if not python_version:
+            fail_fn("'python_version' must be provided")
+            return None
+
+        index_urls = get_index_urls(
+            ctx,
+            # Use list({}) as a way to have a set
+            list({
+                req.distribution: None
+                for reqs in requirements_by_platform.values()
+                for req in reqs.values()
+            }),
+        )
+
+    ret = {}
+    for whl_name, reqs in requirements_by_platform.items():
+        for r in sorted(reqs.values(), key = lambda r: r.requirement_line):
+            whls, sdist = _add_dists(
+                r,
+                index_urls.get(whl_name),
+                python_version = python_version,
+                logger = logger,
+            )
+
+            ret.setdefault(whl_name, []).append(
+                struct(
+                    distribution = r.distribution,
+                    srcs = r.srcs,
+                    requirement_line = r.requirement_line,
+                    target_platforms = sorted(r.target_platforms),
+                    extra_pip_args = r.extra_pip_args,
+                    download = r.download,
+                    whls = whls,
+                    sdist = sdist,
+                ),
+            )
+
+    return ret
+
+def select_requirement(requirements, *, platform):
+    """A simple function to get a requirement for a particular platform.
+
+    Args:
+        requirements (list[struct]): The list of requirements as returned by
+            the `parse_requirements` function above.
+        platform (str): The host platform. Usually an output of the
+        `host_platform` function.
+
+    Returns:
+        None if not found or a struct returned as one of the values in the
+        parse_requirements function. The requirement that should be downloaded
+        by the host platform will be returned.
+    """
+    maybe_requirement = [
+        req
+        for req in requirements
+        if platform in req.target_platforms or req.download
+    ]
+    if not maybe_requirement:
+        # Sometimes the package is not present for host platform if there
+        # are whls specified only in particular requirements files, in that
+        # case just continue, however, if the download_only flag is set up,
+        # then the user can also specify the target platform of the wheel
+        # packages they want to download, in that case there will be always
+        # a requirement here, so we will not be in this code branch.
+        return None
+
+    return maybe_requirement[0]
+
+def host_platform(repository_os):
+    """Return a string representation of the repository OS.
+
+    Args:
+        repository_os (struct): The `module_ctx.os` or `repository_ctx.os` attribute.
+            See https://bazel.build/rules/lib/builtins/repository_os.html
+
+    Returns:
+        The string representation of the platform that we can later used in the `pip`
+        machinery.
+    """
+    return "{}_{}".format(
+        _translate_os(repository_os.name.lower()),
+        _translate_cpu(repository_os.arch.lower()),
+    )
+
+def _add_dists(requirement, index_urls, python_version, logger = None):
+    """Populate dists based on the information from the PyPI index.
+
+    This function will modify the given requirements_by_platform data structure.
+
+    Args:
+        requirement: The result of parse_requirements function.
+        index_urls: The result of simpleapi_download.
+        python_version: The version of the python interpreter.
+        logger: A logger for printing diagnostic info.
+    """
+    if not index_urls:
+        return [], None
+
+    whls = []
+    sdist = None
+
+    # TODO @aignas 2024-05-22: it is in theory possible to add all
+    # requirements by version instead of by sha256. This may be useful
+    # for some projects.
+    for sha256 in requirement.srcs.shas:
+        # For now if the artifact is marked as yanked we just ignore it.
+        #
+        # See https://packaging.python.org/en/latest/specifications/simple-repository-api/#adding-yank-support-to-the-simple-api
+
+        maybe_whl = index_urls.whls.get(sha256)
+        if maybe_whl and not maybe_whl.yanked:
+            whls.append(maybe_whl)
+            continue
+
+        maybe_sdist = index_urls.sdists.get(sha256)
+        if maybe_sdist and not maybe_sdist.yanked:
+            sdist = maybe_sdist
+            continue
+
+        if logger:
+            logger.warn("Could not find a whl or an sdist with sha256={}".format(sha256))
+
+    yanked = {}
+    for dist in whls + [sdist]:
+        if dist and dist.yanked:
+            yanked.setdefault(dist.yanked, []).append(dist.filename)
+    if yanked:
+        logger.warn(lambda: "\n".join([
+            "the following distributions got yanked:",
+        ] + [
+            "reason: {}\n  {}".format(reason, "\n".join(sorted(dists)))
+            for reason, dists in yanked.items()
+        ]))
+
+    # Filter out the wheels that are incompatible with the target_platforms.
+    whls = select_whls(
+        whls = whls,
+        want_abis = [
+            "none",
+            "abi3",
+            "cp" + python_version.replace(".", ""),
+            # Older python versions have wheels for the `*m` ABI.
+            "cp" + python_version.replace(".", "") + "m",
+        ],
+        want_platforms = requirement.target_platforms,
+        want_python_version = python_version,
+        logger = logger,
+    )
+
+    return whls, sdist
diff --git a/python/private/pypi/parse_requirements_txt.bzl b/python/private/pypi/parse_requirements_txt.bzl
new file mode 100644
index 0000000..6f51d03
--- /dev/null
+++ b/python/private/pypi/parse_requirements_txt.bzl
@@ -0,0 +1,133 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Pip requirements parser for Starlark."""
+
+_STATE = struct(
+    # Consume extraneous whitespace
+    ConsumeSpace = 0,
+    # Consume a comment
+    ConsumeComment = 1,
+    # Parse the name of a pip package
+    ParseDependency = 2,
+    # Parse a full requirement line
+    ParseRequirement = 3,
+    # Parse a pip option
+    ParseOption = 4,
+)
+
+EOF = {}
+
+def parse_requirements_txt(content):
+    """A simplistic (and incomplete) pip requirements lockfile parser.
+
+    Parses package names and their full requirement lines, as well pip
+    options.
+
+    Args:
+      content: lockfile content as a string
+
+    Returns:
+      Struct with fields `requirements` and `options`.
+
+      requirements: List of requirements, where each requirement is a 2-element
+        tuple containing the package name and the requirement line.
+        E.g., [(certifi', 'certifi==2021.10.8 --hash=sha256:7888...'), ...]
+
+      options: List of pip option lines
+    """
+    content = content.replace("\r", "")
+
+    result = struct(
+        requirements = [],
+        options = [],
+    )
+    state = _STATE.ConsumeSpace
+    buffer = ""
+
+    inputs = content.elems()[:]
+    inputs.append(EOF)
+
+    for input in inputs:
+        if state == _STATE.ConsumeSpace:
+            (state, buffer) = _handleConsumeSpace(input)
+        elif state == _STATE.ConsumeComment:
+            (state, buffer) = _handleConsumeComment(input, buffer, result)
+        elif state == _STATE.ParseDependency:
+            (state, buffer) = _handleParseDependency(input, buffer, result)
+        elif state == _STATE.ParseOption:
+            (state, buffer) = _handleParseOption(input, buffer, result)
+        elif state == _STATE.ParseRequirement:
+            (state, buffer) = _handleParseRequirement(input, buffer, result)
+        else:
+            fail("Unknown state %d" % state)
+
+    return result
+
+def _handleConsumeSpace(input):
+    if input == EOF:
+        return (_STATE.ConsumeSpace, "")
+    if input.isspace():
+        return (_STATE.ConsumeSpace, "")
+    elif input == "#":
+        return (_STATE.ConsumeComment, "")
+    elif input == "-":
+        return (_STATE.ParseOption, input)
+
+    return (_STATE.ParseDependency, input)
+
+def _handleConsumeComment(input, buffer, result):
+    if input == "\n":
+        if len(result.requirements) > 0 and len(result.requirements[-1]) == 1:
+            result.requirements[-1] = (result.requirements[-1][0], buffer.rstrip(" \n"))
+            return (_STATE.ConsumeSpace, "")
+        elif len(buffer) > 0:
+            result.options.append(buffer.rstrip(" \n"))
+            return (_STATE.ConsumeSpace, "")
+        return (_STATE.ConsumeSpace, "")
+    return (_STATE.ConsumeComment, buffer)
+
+def _handleParseDependency(input, buffer, result):
+    if input == EOF:
+        fail("Enountered unexpected end of file while parsing requirement")
+    elif input.isspace() or input in [">", "<", "~", "=", ";", "["]:
+        result.requirements.append((buffer,))
+        return (_STATE.ParseRequirement, buffer + input)
+
+    return (_STATE.ParseDependency, buffer + input)
+
+def _handleParseOption(input, buffer, result):
+    if input == "\n" and buffer.endswith("\\"):
+        return (_STATE.ParseOption, buffer[0:-1])
+    elif input == " ":
+        result.options.append(buffer.rstrip("\n"))
+        return (_STATE.ParseOption, "")
+    elif input == "\n" or input == EOF:
+        result.options.append(buffer.rstrip("\n"))
+        return (_STATE.ConsumeSpace, "")
+    elif input == "#" and (len(buffer) == 0 or buffer[-1].isspace()):
+        return (_STATE.ConsumeComment, buffer)
+
+    return (_STATE.ParseOption, buffer + input)
+
+def _handleParseRequirement(input, buffer, result):
+    if input == "\n" and buffer.endswith("\\"):
+        return (_STATE.ParseRequirement, buffer[0:-1])
+    elif input == "\n" or input == EOF:
+        result.requirements[-1] = (result.requirements[-1][0], buffer.rstrip(" \n"))
+        return (_STATE.ConsumeSpace, "")
+    elif input == "#" and (len(buffer) == 0 or buffer[-1].isspace()):
+        return (_STATE.ConsumeComment, buffer)
+
+    return (_STATE.ParseRequirement, buffer + input)
diff --git a/python/private/pypi/parse_simpleapi_html.bzl b/python/private/pypi/parse_simpleapi_html.bzl
new file mode 100644
index 0000000..f7cd032
--- /dev/null
+++ b/python/private/pypi/parse_simpleapi_html.bzl
@@ -0,0 +1,106 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Parse SimpleAPI HTML in Starlark.
+"""
+
+def parse_simpleapi_html(*, url, content):
+    """Get the package URLs for given shas by parsing the Simple API HTML.
+
+    Args:
+        url(str): The URL that the HTML content can be downloaded from.
+        content(str): The Simple API HTML content.
+
+    Returns:
+        A list of structs with:
+        * filename: The filename of the artifact.
+        * url: The URL to download the artifact.
+        * sha256: The sha256 of the artifact.
+        * metadata_sha256: The whl METADATA sha256 if we can download it. If this is
+          present, then the 'metadata_url' is also present. Defaults to "".
+        * metadata_url: The URL for the METADATA if we can download it. Defaults to "".
+    """
+    sdists = {}
+    whls = {}
+    lines = content.split("<a href=\"")
+
+    _, _, api_version = lines[0].partition("name=\"pypi:repository-version\" content=\"")
+    api_version, _, _ = api_version.partition("\"")
+
+    # We must assume the 1.0 if it is not present
+    # See https://packaging.python.org/en/latest/specifications/simple-repository-api/#clients
+    api_version = api_version or "1.0"
+    api_version = tuple([int(i) for i in api_version.split(".")])
+
+    if api_version >= (2, 0):
+        # We don't expect to have version 2.0 here, but have this check in place just in case.
+        # https://packaging.python.org/en/latest/specifications/simple-repository-api/#versioning-pypi-s-simple-api
+        fail("Unsupported API version: {}".format(api_version))
+
+    for line in lines[1:]:
+        dist_url, _, tail = line.partition("#sha256=")
+        sha256, _, tail = tail.partition("\"")
+
+        # See https://packaging.python.org/en/latest/specifications/simple-repository-api/#adding-yank-support-to-the-simple-api
+        yanked = "data-yanked" in line
+
+        maybe_metadata, _, tail = tail.partition(">")
+        filename, _, tail = tail.partition("<")
+
+        metadata_sha256 = ""
+        metadata_url = ""
+        for metadata_marker in ["data-core-metadata", "data-dist-info-metadata"]:
+            metadata_marker = metadata_marker + "=\"sha256="
+            if metadata_marker in maybe_metadata:
+                # Implement https://peps.python.org/pep-0714/
+                _, _, tail = maybe_metadata.partition(metadata_marker)
+                metadata_sha256, _, _ = tail.partition("\"")
+                metadata_url = dist_url + ".metadata"
+                break
+
+        if filename.endswith(".whl"):
+            whls[sha256] = struct(
+                filename = filename,
+                url = _absolute_url(url, dist_url),
+                sha256 = sha256,
+                metadata_sha256 = metadata_sha256,
+                metadata_url = _absolute_url(url, metadata_url),
+                yanked = yanked,
+            )
+        else:
+            sdists[sha256] = struct(
+                filename = filename,
+                url = _absolute_url(url, dist_url),
+                sha256 = sha256,
+                metadata_sha256 = "",
+                metadata_url = "",
+                yanked = yanked,
+            )
+
+    return struct(
+        sdists = sdists,
+        whls = whls,
+    )
+
+def _absolute_url(index_url, candidate):
+    if not candidate.startswith(".."):
+        return candidate
+
+    candidate_parts = candidate.split("..")
+    last = candidate_parts[-1]
+    for _ in range(len(candidate_parts) - 1):
+        index_url, _, _ = index_url.rstrip("/").rpartition("/")
+
+    return "{}/{}".format(index_url, last.strip("/"))
diff --git a/python/private/pypi/parse_whl_name.bzl b/python/private/pypi/parse_whl_name.bzl
new file mode 100644
index 0000000..063ac84
--- /dev/null
+++ b/python/private/pypi/parse_whl_name.bzl
@@ -0,0 +1,96 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A starlark implementation of a Wheel filename parsing.
+"""
+
+# Taken from https://peps.python.org/pep-0600/
+_LEGACY_ALIASES = {
+    "manylinux1_i686": "manylinux_2_5_i686",
+    "manylinux1_x86_64": "manylinux_2_5_x86_64",
+    "manylinux2010_i686": "manylinux_2_12_i686",
+    "manylinux2010_x86_64": "manylinux_2_12_x86_64",
+    "manylinux2014_aarch64": "manylinux_2_17_aarch64",
+    "manylinux2014_armv7l": "manylinux_2_17_armv7l",
+    "manylinux2014_i686": "manylinux_2_17_i686",
+    "manylinux2014_ppc64": "manylinux_2_17_ppc64",
+    "manylinux2014_ppc64le": "manylinux_2_17_ppc64le",
+    "manylinux2014_s390x": "manylinux_2_17_s390x",
+    "manylinux2014_x86_64": "manylinux_2_17_x86_64",
+}
+
+def normalize_platform_tag(tag):
+    """Resolve legacy aliases to modern equivalents for easier parsing elsewhere."""
+    return ".".join(list({
+        # The `list({})` usage here is to use it as a string set, where we will
+        # deduplicate, but otherwise retain the order of the tags.
+        _LEGACY_ALIASES.get(p, p): None
+        for p in tag.split(".")
+    }))
+
+def parse_whl_name(file):
+    """Parse whl file name into a struct of constituents.
+
+    Args:
+        file (str): The file name of a wheel
+
+    Returns:
+        A struct with the following attributes:
+            distribution: the distribution name
+            version: the version of the distribution
+            build_tag: the build tag for the wheel. None if there was no
+              build_tag in the given string.
+            python_tag: the python tag for the wheel
+            abi_tag: the ABI tag for the wheel
+            platform_tag: the platform tag
+    """
+    if not file.endswith(".whl"):
+        fail("not a valid wheel: {}".format(file))
+
+    file = file[:-len(".whl")]
+
+    # Parse the following
+    # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl
+    #
+    # For more info, see the following standards:
+    # https://packaging.python.org/en/latest/specifications/binary-distribution-format/#binary-distribution-format
+    # https://packaging.python.org/en/latest/specifications/platform-compatibility-tags/
+    head, _, platform_tag = file.rpartition("-")
+    if not platform_tag:
+        fail("cannot extract platform tag from the whl filename: {}".format(file))
+    head, _, abi_tag = head.rpartition("-")
+    if not abi_tag:
+        fail("cannot extract abi tag from the whl filename: {}".format(file))
+    head, _, python_tag = head.rpartition("-")
+    if not python_tag:
+        fail("cannot extract python tag from the whl filename: {}".format(file))
+    head, _, version = head.rpartition("-")
+    if not version:
+        fail("cannot extract version from the whl filename: {}".format(file))
+    distribution, _, maybe_version = head.partition("-")
+
+    if maybe_version:
+        version, build_tag = maybe_version, version
+    else:
+        build_tag = None
+
+    return struct(
+        distribution = distribution,
+        version = version,
+        build_tag = build_tag,
+        python_tag = python_tag,
+        abi_tag = abi_tag,
+        platform_tag = normalize_platform_tag(platform_tag),
+    )
diff --git a/python/private/pypi/patch_whl.bzl b/python/private/pypi/patch_whl.bzl
new file mode 100644
index 0000000..c2c633d
--- /dev/null
+++ b/python/private/pypi/patch_whl.bzl
@@ -0,0 +1,111 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A small utility to patch a file in the repository context and repackage it using a Python interpreter
+
+Note, because we are patching a wheel file and we need a new RECORD file, this
+function will print a diff of the RECORD and will ask the user to include a
+RECORD patch in their patches that they maintain. This is to ensure that we can
+satisfy the following usecases:
+* Patch an invalid RECORD file.
+* Patch files within a wheel.
+
+If we were silently regenerating the RECORD file, we may be vulnerable to supply chain
+attacks (it is a very small chance) and keeping the RECORD patches next to the
+other patches ensures that the users have overview on exactly what has changed
+within the wheel.
+"""
+
+load("//python/private:repo_utils.bzl", "repo_utils")
+load(":parse_whl_name.bzl", "parse_whl_name")
+
+_rules_python_root = Label("//:BUILD.bazel")
+
+def patch_whl(rctx, *, python_interpreter, whl_path, patches, **kwargs):
+    """Patch a whl file and repack it to ensure that the RECORD metadata stays correct.
+
+    Args:
+        rctx: repository_ctx
+        python_interpreter: the python interpreter to use.
+        whl_path: The whl file name to be patched.
+        patches: a label-keyed-int dict that has the patch files as keys and
+            the patch_strip as the value.
+        **kwargs: extras passed to repo_utils.execute_checked.
+
+    Returns:
+        value of the repackaging action.
+    """
+
+    # extract files into the current directory for patching as rctx.patch
+    # does not support patching in another directory.
+    whl_input = rctx.path(whl_path)
+
+    # symlink to a zip file to use bazel's extract so that we can use bazel's
+    # repository_ctx patch implementation. The whl file may be in a different
+    # external repository.
+    whl_file_zip = whl_input.basename + ".zip"
+    rctx.symlink(whl_input, whl_file_zip)
+    rctx.extract(whl_file_zip)
+    if not rctx.delete(whl_file_zip):
+        fail("Failed to remove the symlink after extracting")
+
+    for patch_file, patch_strip in patches.items():
+        rctx.patch(patch_file, strip = patch_strip)
+
+    # Generate an output filename, which we will be returning
+    parsed_whl = parse_whl_name(whl_input.basename)
+    whl_patched = "{}.whl".format("-".join([
+        parsed_whl.distribution,
+        parsed_whl.version,
+        (parsed_whl.build_tag or "") + "patched",
+        parsed_whl.python_tag,
+        parsed_whl.abi_tag,
+        parsed_whl.platform_tag,
+    ]))
+
+    record_patch = rctx.path("RECORD.patch")
+
+    repo_utils.execute_checked(
+        rctx,
+        arguments = [
+            python_interpreter,
+            "-m",
+            "python.private.pypi.repack_whl",
+            "--record-patch",
+            record_patch,
+            whl_input,
+            whl_patched,
+        ],
+        environment = {
+            "PYTHONPATH": str(rctx.path(_rules_python_root).dirname),
+        },
+        **kwargs
+    )
+
+    if record_patch.exists:
+        record_patch_contents = rctx.read(record_patch)
+        warning_msg = """WARNING: the resultant RECORD file of the patch wheel is different
+
+    If you are patching on Windows, you may see this warning because of
+    a known issue (bazelbuild/rules_python#1639) with file endings.
+
+    If you would like to silence the warning, you can apply the patch that is stored in
+      {record_patch}. The contents of the file are below:
+{record_patch_contents}""".format(
+            record_patch = record_patch,
+            record_patch_contents = record_patch_contents,
+        )
+        print(warning_msg)  # buildifier: disable=print
+
+    return rctx.path(whl_patched)
diff --git a/python/private/pypi/pip_repository.bzl b/python/private/pypi/pip_repository.bzl
new file mode 100644
index 0000000..a22f4d9
--- /dev/null
+++ b/python/private/pypi/pip_repository.bzl
@@ -0,0 +1,327 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@bazel_skylib//lib:sets.bzl", "sets")
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR")
+load("//python/private:text_util.bzl", "render")
+load(":parse_requirements.bzl", "host_platform", "parse_requirements", "select_requirement")
+load(":pip_repository_attrs.bzl", "ATTRS")
+load(":render_pkg_aliases.bzl", "render_pkg_aliases", "whl_alias")
+
+def _get_python_interpreter_attr(rctx):
+    """A helper function for getting the `python_interpreter` attribute or it's default
+
+    Args:
+        rctx (repository_ctx): Handle to the rule repository context.
+
+    Returns:
+        str: The attribute value or it's default
+    """
+    if rctx.attr.python_interpreter:
+        return rctx.attr.python_interpreter
+
+    if "win" in rctx.os.name:
+        return "python.exe"
+    else:
+        return "python3"
+
+def use_isolated(ctx, attr):
+    """Determine whether or not to pass the pip `--isolated` flag to the pip invocation.
+
+    Args:
+        ctx: repository or module context
+        attr: attributes for the repo rule or tag extension
+
+    Returns:
+        True if --isolated should be passed
+    """
+    use_isolated = attr.isolated
+
+    # The environment variable will take precedence over the attribute
+    isolated_env = ctx.os.environ.get("RULES_PYTHON_PIP_ISOLATED", None)
+    if isolated_env != None:
+        if isolated_env.lower() in ("0", "false"):
+            use_isolated = False
+        else:
+            use_isolated = True
+
+    return use_isolated
+
+_BUILD_FILE_CONTENTS = """\
+package(default_visibility = ["//visibility:public"])
+
+# Ensure the `requirements.bzl` source can be accessed by stardoc, since users load() from it
+exports_files(["requirements.bzl"])
+"""
+
+def _pip_repository_impl(rctx):
+    requirements_by_platform = parse_requirements(
+        rctx,
+        requirements_by_platform = rctx.attr.requirements_by_platform,
+        requirements_linux = rctx.attr.requirements_linux,
+        requirements_lock = rctx.attr.requirements_lock,
+        requirements_osx = rctx.attr.requirements_darwin,
+        requirements_windows = rctx.attr.requirements_windows,
+        extra_pip_args = rctx.attr.extra_pip_args,
+    )
+    selected_requirements = {}
+    options = None
+    repository_platform = host_platform(rctx.os)
+    for name, requirements in requirements_by_platform.items():
+        r = select_requirement(
+            requirements,
+            platform = repository_platform,
+        )
+        if not r:
+            continue
+        options = options or r.extra_pip_args
+        selected_requirements[name] = r.requirement_line
+
+    bzl_packages = sorted(selected_requirements.keys())
+
+    # Normalize cycles first
+    requirement_cycles = {
+        name: sorted(sets.to_list(sets.make(deps)))
+        for name, deps in rctx.attr.experimental_requirement_cycles.items()
+    }
+
+    # Check for conflicts between cycles _before_ we normalize package names so
+    # that reported errors use the names the user specified
+    for i in range(len(requirement_cycles)):
+        left_group = requirement_cycles.keys()[i]
+        left_deps = requirement_cycles.values()[i]
+        for j in range(len(requirement_cycles) - (i + 1)):
+            right_deps = requirement_cycles.values()[1 + i + j]
+            right_group = requirement_cycles.keys()[1 + i + j]
+            for d in left_deps:
+                if d in right_deps:
+                    fail("Error: Requirement %s cannot be repeated between cycles %s and %s; please merge the cycles." % (d, left_group, right_group))
+
+    # And normalize the names as used in the cycle specs
+    #
+    # NOTE: We must check that a listed dependency is actually in the actual
+    # requirements set for the current platform so that we can support cycles in
+    # platform-conditional requirements. Otherwise we'll blindly generate a
+    # label referencing a package which may not be installed on the current
+    # platform.
+    requirement_cycles = {
+        normalize_name(name): sorted([normalize_name(d) for d in group if normalize_name(d) in bzl_packages])
+        for name, group in requirement_cycles.items()
+    }
+
+    imports = [
+        # NOTE: Maintain the order consistent with `buildifier`
+        'load("@rules_python//python:pip.bzl", "pip_utils")',
+        'load("@rules_python//python/pip_install:pip_repository.bzl", "group_library", "whl_library")',
+    ]
+
+    annotations = {}
+    for pkg, annotation in rctx.attr.annotations.items():
+        filename = "{}.annotation.json".format(normalize_name(pkg))
+        rctx.file(filename, json.encode_indent(json.decode(annotation)))
+        annotations[pkg] = "@{name}//:{filename}".format(name = rctx.attr.name, filename = filename)
+
+    config = {
+        "download_only": rctx.attr.download_only,
+        "enable_implicit_namespace_pkgs": rctx.attr.enable_implicit_namespace_pkgs,
+        "environment": rctx.attr.environment,
+        "envsubst": rctx.attr.envsubst,
+        "extra_pip_args": options,
+        "isolated": use_isolated(rctx, rctx.attr),
+        "pip_data_exclude": rctx.attr.pip_data_exclude,
+        "python_interpreter": _get_python_interpreter_attr(rctx),
+        "quiet": rctx.attr.quiet,
+        "repo": rctx.attr.name,
+        "timeout": rctx.attr.timeout,
+    }
+    if rctx.attr.use_hub_alias_dependencies:
+        config["dep_template"] = "@{}//{{name}}:{{target}}".format(rctx.attr.name)
+    else:
+        config["repo_prefix"] = "{}_".format(rctx.attr.name)
+
+    if rctx.attr.python_interpreter_target:
+        config["python_interpreter_target"] = str(rctx.attr.python_interpreter_target)
+    if rctx.attr.experimental_target_platforms:
+        config["experimental_target_platforms"] = rctx.attr.experimental_target_platforms
+
+    macro_tmpl = "@%s//{}:{}" % rctx.attr.name
+
+    aliases = render_pkg_aliases(
+        aliases = {
+            pkg: [whl_alias(repo = rctx.attr.name + "_" + pkg)]
+            for pkg in bzl_packages or []
+        },
+    )
+    for path, contents in aliases.items():
+        rctx.file(path, contents)
+
+    rctx.file("BUILD.bazel", _BUILD_FILE_CONTENTS)
+    rctx.template("requirements.bzl", rctx.attr._template, substitutions = {
+        "    # %%GROUP_LIBRARY%%": """\
+    group_repo = "{name}__groups"
+    group_library(
+        name = group_repo,
+        repo_prefix = "{name}_",
+        groups = all_requirement_groups,
+    )""".format(name = rctx.attr.name) if not rctx.attr.use_hub_alias_dependencies else "",
+        "%%ALL_DATA_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "data")
+            for p in bzl_packages
+        ]),
+        "%%ALL_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "pkg")
+            for p in bzl_packages
+        ]),
+        "%%ALL_REQUIREMENT_GROUPS%%": render.dict(requirement_cycles),
+        "%%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%": render.dict({
+            p: macro_tmpl.format(p, "whl")
+            for p in bzl_packages
+        }),
+        "%%ANNOTATIONS%%": render.dict(dict(sorted(annotations.items()))),
+        "%%CONFIG%%": render.dict(dict(sorted(config.items()))),
+        "%%EXTRA_PIP_ARGS%%": json.encode(options),
+        "%%IMPORTS%%": "\n".join(imports),
+        "%%MACRO_TMPL%%": macro_tmpl,
+        "%%NAME%%": rctx.attr.name,
+        "%%PACKAGES%%": render.list(
+            [
+                ("{}_{}".format(rctx.attr.name, p), r)
+                for p, r in sorted(selected_requirements.items())
+            ],
+        ),
+    })
+
+    return
+
+pip_repository = repository_rule(
+    attrs = dict(
+        annotations = attr.string_dict(
+            doc = "Optional annotations to apply to packages",
+        ),
+        _template = attr.label(
+            default = ":requirements.bzl.tmpl.workspace",
+        ),
+        **ATTRS
+    ),
+    doc = """Accepts a locked/compiled requirements file and installs the dependencies listed within.
+
+Those dependencies become available in a generated `requirements.bzl` file.
+You can instead check this `requirements.bzl` file into your repo, see the "vendoring" section below.
+
+In your WORKSPACE file:
+
+```starlark
+load("@rules_python//python:pip.bzl", "pip_parse")
+
+pip_parse(
+    name = "pypi",
+    requirements_lock = ":requirements.txt",
+)
+
+load("@pypi//:requirements.bzl", "install_deps")
+
+install_deps()
+```
+
+You can then reference installed dependencies from a `BUILD` file with the alias targets generated in the same repo, for example, for `PyYAML` we would have the following:
+- `@pypi//pyyaml` and `@pypi//pyyaml:pkg` both point to the `py_library`
+  created after extracting the `PyYAML` package.
+- `@pypi//pyyaml:data` points to the extra data included in the package.
+- `@pypi//pyyaml:dist_info` points to the `dist-info` files in the package.
+- `@pypi//pyyaml:whl` points to the wheel file that was extracted.
+
+```starlark
+py_library(
+    name = "bar",
+    ...
+    deps = [
+       "//my/other:dep",
+       "@pypi//numpy",
+       "@pypi//requests",
+    ],
+)
+```
+
+or
+
+```starlark
+load("@pypi//:requirements.bzl", "requirement")
+
+py_library(
+    name = "bar",
+    ...
+    deps = [
+       "//my/other:dep",
+       requirement("numpy"),
+       requirement("requests"),
+    ],
+)
+```
+
+In addition to the `requirement` macro, which is used to access the generated `py_library`
+target generated from a package's wheel, The generated `requirements.bzl` file contains
+functionality for exposing [entry points][whl_ep] as `py_binary` targets as well.
+
+[whl_ep]: https://packaging.python.org/specifications/entry-points/
+
+```starlark
+load("@pypi//:requirements.bzl", "entry_point")
+
+alias(
+    name = "pip-compile",
+    actual = entry_point(
+        pkg = "pip-tools",
+        script = "pip-compile",
+    ),
+)
+```
+
+Note that for packages whose name and script are the same, only the name of the package
+is needed when calling the `entry_point` macro.
+
+```starlark
+load("@pip//:requirements.bzl", "entry_point")
+
+alias(
+    name = "flake8",
+    actual = entry_point("flake8"),
+)
+```
+
+### Vendoring the requirements.bzl file
+
+In some cases you may not want to generate the requirements.bzl file as a repository rule
+while Bazel is fetching dependencies. For example, if you produce a reusable Bazel module
+such as a ruleset, you may want to include the requirements.bzl file rather than make your users
+install the WORKSPACE setup to generate it.
+See https://github.com/bazelbuild/rules_python/issues/608
+
+This is the same workflow as Gazelle, which creates `go_repository` rules with
+[`update-repos`](https://github.com/bazelbuild/bazel-gazelle#update-repos)
+
+To do this, use the "write to source file" pattern documented in
+https://blog.aspect.dev/bazel-can-write-to-the-source-folder
+to put a copy of the generated requirements.bzl into your project.
+Then load the requirements.bzl file directly rather than from the generated repository.
+See the example in rules_python/examples/pip_parse_vendored.
+""",
+    implementation = _pip_repository_impl,
+    environ = [
+        "RULES_PYTHON_PIP_ISOLATED",
+        REPO_DEBUG_ENV_VAR,
+    ],
+)
diff --git a/python/private/pypi/pip_repository_attrs.bzl b/python/private/pypi/pip_repository_attrs.bzl
new file mode 100644
index 0000000..2300086
--- /dev/null
+++ b/python/private/pypi/pip_repository_attrs.bzl
@@ -0,0 +1,73 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common attributes between bzlmod pip.parse and workspace pip_parse.
+
+A common attributes shared between bzlmod and workspace implementations
+stored in a separate file to avoid unnecessary refetching of the
+repositories."""
+
+load(":attrs.bzl", COMMON_ATTRS = "ATTRS")
+
+ATTRS = {
+    "requirements_by_platform": attr.label_keyed_string_dict(
+        doc = """\
+The requirements files and the comma delimited list of target platforms as values.
+
+The keys are the requirement files and the values are comma-separated platform
+identifiers. For now we only support `<os>_<cpu>` values that are present in
+`@platforms//os` and `@platforms//cpu` packages respectively.
+""",
+    ),
+    "requirements_darwin": attr.label(
+        allow_single_file = True,
+        doc = "Override the requirements_lock attribute when the host platform is Mac OS",
+    ),
+    "requirements_linux": attr.label(
+        allow_single_file = True,
+        doc = "Override the requirements_lock attribute when the host platform is Linux",
+    ),
+    "requirements_lock": attr.label(
+        allow_single_file = True,
+        doc = """\
+A fully resolved 'requirements.txt' pip requirement file containing the
+transitive set of your dependencies. If this file is passed instead of
+'requirements' no resolve will take place and pip_repository will create
+individual repositories for each of your dependencies so that wheels are
+fetched/built only for the targets specified by 'build/run/test'. Note that if
+your lockfile is platform-dependent, you can use the `requirements_[platform]`
+attributes.
+
+Note, that in general requirements files are compiled for a specific platform,
+but sometimes they can work for multiple platforms. `rules_python` right now
+supports requirements files that are created for a particular platform without
+platform markers.
+""",
+    ),
+    "requirements_windows": attr.label(
+        allow_single_file = True,
+        doc = "Override the requirements_lock attribute when the host platform is Windows",
+    ),
+    "use_hub_alias_dependencies": attr.bool(
+        default = False,
+        doc = """\
+Controls if the hub alias dependencies are used. If set to true, then the
+group_library will be included in the hub repo.
+
+True will become default in a subsequent release.
+""",
+    ),
+}
+
+ATTRS.update(**COMMON_ATTRS)
diff --git a/python/private/pypi/render_pkg_aliases.bzl b/python/private/pypi/render_pkg_aliases.bzl
new file mode 100644
index 0000000..eb907fe
--- /dev/null
+++ b/python/private/pypi/render_pkg_aliases.bzl
@@ -0,0 +1,622 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""render_pkg_aliases is a function to generate BUILD.bazel contents used to create user-friendly aliases.
+
+This is used in bzlmod and non-bzlmod setups."""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:text_util.bzl", "render")
+load(
+    ":generate_group_library_build_bazel.bzl",
+    "generate_group_library_build_bazel",
+)  # buildifier: disable=bzl-visibility
+load(
+    ":labels.bzl",
+    "DATA_LABEL",
+    "DIST_INFO_LABEL",
+    "PY_LIBRARY_IMPL_LABEL",
+    "PY_LIBRARY_PUBLIC_LABEL",
+    "WHEEL_FILE_IMPL_LABEL",
+    "WHEEL_FILE_PUBLIC_LABEL",
+)
+load(":parse_whl_name.bzl", "parse_whl_name")
+load(":whl_target_platforms.bzl", "whl_target_platforms")
+
+NO_MATCH_ERROR_MESSAGE_TEMPLATE = """\
+No matching wheel for current configuration's Python version.
+
+The current build configuration's Python version doesn't match any of the Python
+versions available for this wheel. This wheel supports the following Python versions:
+    {supported_versions}
+
+As matched by the `@{rules_python}//python/config_settings:is_python_<version>`
+configuration settings.
+
+To determine the current configuration's Python version, run:
+    `bazel config <config id>` (shown further below)
+and look for
+    {rules_python}//python/config_settings:python_version
+
+If the value is missing, then the "default" Python version is being used,
+which has a "null" version value and will not match version constraints.
+"""
+
+NO_MATCH_ERROR_MESSAGE_TEMPLATE_V2 = """\
+No matching wheel for current configuration's Python version.
+
+The current build configuration's Python version doesn't match any of the Python
+wheels available for this wheel. This wheel supports the following Python
+configuration settings:
+    {config_settings}
+
+To determine the current configuration's Python version, run:
+    `bazel config <config id>` (shown further below)
+and look for
+    {rules_python}//python/config_settings:python_version
+
+If the value is missing, then the "default" Python version is being used,
+which has a "null" version value and will not match version constraints.
+"""
+
+def _render_whl_library_alias(
+        *,
+        name,
+        default_config_setting,
+        aliases,
+        target_name,
+        **kwargs):
+    """Render an alias for common targets."""
+    if len(aliases) == 1 and not aliases[0].version:
+        alias = aliases[0]
+        return render.alias(
+            name = name,
+            actual = repr("@{repo}//:{name}".format(
+                repo = alias.repo,
+                name = target_name,
+            )),
+            **kwargs
+        )
+
+    # Create the alias repositories which contains different select
+    # statements  These select statements point to the different pip
+    # whls that are based on a specific version of Python.
+    selects = {}
+    no_match_error = "_NO_MATCH_ERROR"
+    for alias in sorted(aliases, key = lambda x: x.version):
+        actual = "@{repo}//:{name}".format(repo = alias.repo, name = target_name)
+        selects.setdefault(actual, []).append(alias.config_setting)
+        if alias.config_setting == default_config_setting:
+            selects[actual].append("//conditions:default")
+            no_match_error = None
+
+    return render.alias(
+        name = name,
+        actual = render.select(
+            {
+                tuple(sorted(
+                    conditions,
+                    # Group `is_python` and other conditions for easier reading
+                    # when looking at the generated files.
+                    key = lambda condition: ("is_python" not in condition, condition),
+                )): target
+                for target, conditions in sorted(selects.items())
+            },
+            no_match_error = no_match_error,
+            # This key_repr is used to render selects.with_or keys
+            key_repr = lambda x: repr(x[0]) if len(x) == 1 else render.tuple(x),
+            name = "selects.with_or",
+        ),
+        **kwargs
+    )
+
+def _render_common_aliases(*, name, aliases, default_config_setting = None, group_name = None):
+    lines = [
+        """load("@bazel_skylib//lib:selects.bzl", "selects")""",
+        """package(default_visibility = ["//visibility:public"])""",
+    ]
+
+    config_settings = None
+    if aliases:
+        config_settings = sorted([v.config_setting for v in aliases if v.config_setting])
+
+    if not config_settings or default_config_setting in config_settings:
+        pass
+    else:
+        error_msg = NO_MATCH_ERROR_MESSAGE_TEMPLATE_V2.format(
+            config_settings = render.indent(
+                "\n".join(config_settings),
+            ).lstrip(),
+            rules_python = "rules_python",
+        )
+
+        lines.append("_NO_MATCH_ERROR = \"\"\"\\\n{error_msg}\"\"\"".format(
+            error_msg = error_msg,
+        ))
+
+        # This is to simplify the code in _render_whl_library_alias and to ensure
+        # that we don't pass a 'default_version' that is not in 'versions'.
+        default_config_setting = None
+
+    lines.append(
+        render.alias(
+            name = name,
+            actual = repr(":pkg"),
+        ),
+    )
+    lines.extend(
+        [
+            _render_whl_library_alias(
+                name = name,
+                default_config_setting = default_config_setting,
+                aliases = aliases,
+                target_name = target_name,
+                visibility = ["//_groups:__subpackages__"] if name.startswith("_") else None,
+            )
+            for target_name, name in {
+                PY_LIBRARY_PUBLIC_LABEL: PY_LIBRARY_IMPL_LABEL if group_name else PY_LIBRARY_PUBLIC_LABEL,
+                WHEEL_FILE_PUBLIC_LABEL: WHEEL_FILE_IMPL_LABEL if group_name else WHEEL_FILE_PUBLIC_LABEL,
+                DATA_LABEL: DATA_LABEL,
+                DIST_INFO_LABEL: DIST_INFO_LABEL,
+            }.items()
+        ],
+    )
+    if group_name:
+        lines.extend(
+            [
+                render.alias(
+                    name = "pkg",
+                    actual = repr("//_groups:{}_pkg".format(group_name)),
+                ),
+                render.alias(
+                    name = "whl",
+                    actual = repr("//_groups:{}_whl".format(group_name)),
+                ),
+            ],
+        )
+
+    return "\n\n".join(lines)
+
+def render_pkg_aliases(*, aliases, default_config_setting = None, requirement_cycles = None):
+    """Create alias declarations for each PyPI package.
+
+    The aliases should be appended to the pip_repository BUILD.bazel file. These aliases
+    allow users to use requirement() without needed a corresponding `use_repo()` for each dep
+    when using bzlmod.
+
+    Args:
+        aliases: dict, the keys are normalized distribution names and values are the
+            whl_alias instances.
+        default_config_setting: the default to be used for the aliases.
+        requirement_cycles: any package groups to also add.
+
+    Returns:
+        A dict of file paths and their contents.
+    """
+    contents = {}
+    if not aliases:
+        return contents
+    elif type(aliases) != type({}):
+        fail("The aliases need to be provided as a dict, got: {}".format(type(aliases)))
+
+    whl_group_mapping = {}
+    if requirement_cycles:
+        requirement_cycles = {
+            name: [normalize_name(whl_name) for whl_name in whls]
+            for name, whls in requirement_cycles.items()
+        }
+
+        whl_group_mapping = {
+            whl_name: group_name
+            for group_name, group_whls in requirement_cycles.items()
+            for whl_name in group_whls
+        }
+
+    files = {
+        "{}/BUILD.bazel".format(normalize_name(name)): _render_common_aliases(
+            name = normalize_name(name),
+            aliases = pkg_aliases,
+            default_config_setting = default_config_setting,
+            group_name = whl_group_mapping.get(normalize_name(name)),
+        ).strip()
+        for name, pkg_aliases in aliases.items()
+    }
+
+    if requirement_cycles:
+        files["_groups/BUILD.bazel"] = generate_group_library_build_bazel("", requirement_cycles)
+    return files
+
+def whl_alias(*, repo, version = None, config_setting = None, filename = None, target_platforms = None):
+    """The bzl_packages value used by by the render_pkg_aliases function.
+
+    This contains the minimum amount of information required to generate correct
+    aliases in a hub repository.
+
+    Args:
+        repo: str, the repo of where to find the things to be aliased.
+        version: optional(str), the version of the python toolchain that this
+            whl alias is for. If not set, then non-version aware aliases will be
+            constructed. This is mainly used for better error messages when there
+            is no match found during a select.
+        config_setting: optional(Label or str), the config setting that we should use. Defaults
+            to "//_config:is_python_{version}".
+        filename: optional(str), the distribution filename to derive the config_setting.
+        target_platforms: optional(list[str]), the list of target_platforms for this
+            distribution.
+
+    Returns:
+        a struct with the validated and parsed values.
+    """
+    if not repo:
+        fail("'repo' must be specified")
+
+    if version:
+        config_setting = config_setting or ("//_config:is_python_" + version)
+        config_setting = str(config_setting)
+
+    return struct(
+        repo = repo,
+        version = version,
+        config_setting = config_setting,
+        filename = filename,
+        target_platforms = target_platforms,
+    )
+
+def render_multiplatform_pkg_aliases(*, aliases, default_version = None, **kwargs):
+    """Render the multi-platform pkg aliases.
+
+    Args:
+        aliases: dict[str, list(whl_alias)] A list of aliases that will be
+          transformed from ones having `filename` to ones having `config_setting`.
+        default_version: str, the default python version. Defaults to None.
+        **kwargs: extra arguments passed to render_pkg_aliases.
+
+    Returns:
+        A dict of file paths and their contents.
+    """
+
+    flag_versions = get_whl_flag_versions(
+        aliases = [
+            a
+            for bunch in aliases.values()
+            for a in bunch
+        ],
+    )
+
+    config_setting_aliases = {
+        pkg: multiplatform_whl_aliases(
+            aliases = pkg_aliases,
+            default_version = default_version,
+            glibc_versions = flag_versions.get("glibc_versions", []),
+            muslc_versions = flag_versions.get("muslc_versions", []),
+            osx_versions = flag_versions.get("osx_versions", []),
+        )
+        for pkg, pkg_aliases in aliases.items()
+    }
+
+    contents = render_pkg_aliases(
+        aliases = config_setting_aliases,
+        **kwargs
+    )
+    contents["_config/BUILD.bazel"] = _render_config_settings(**flag_versions)
+    return contents
+
+def multiplatform_whl_aliases(*, aliases, default_version = None, **kwargs):
+    """convert a list of aliases from filename to config_setting ones.
+
+    Args:
+        aliases: list(whl_alias): The aliases to process. Any aliases that have
+            the filename set will be converted to a list of aliases, each with
+            an appropriate config_setting value.
+        default_version: string | None, the default python version to use.
+        **kwargs: Extra parameters passed to get_filename_config_settings.
+
+    Returns:
+        A dict with aliases to be used in the hub repo.
+    """
+
+    ret = []
+    versioned_additions = {}
+    for alias in aliases:
+        if not alias.filename:
+            ret.append(alias)
+            continue
+
+        config_settings, all_versioned_settings = get_filename_config_settings(
+            # TODO @aignas 2024-05-27: pass the parsed whl to reduce the
+            # number of duplicate operations.
+            filename = alias.filename,
+            target_platforms = alias.target_platforms,
+            python_version = alias.version,
+            python_default = default_version == alias.version,
+            **kwargs
+        )
+
+        for setting in config_settings:
+            ret.append(whl_alias(
+                repo = alias.repo,
+                version = alias.version,
+                config_setting = "//_config" + setting,
+            ))
+
+        # Now for the versioned platform config settings, we need to select one
+        # that best fits the bill and if there are multiple wheels, e.g.
+        # manylinux_2_17_x86_64 and manylinux_2_28_x86_64, then we need to select
+        # the former when the glibc is in the range of [2.17, 2.28) and then chose
+        # the later if it is [2.28, ...). If the 2.28 wheel was not present in
+        # the hub, then we would need to use 2.17 for all the glibc version
+        # configurations.
+        #
+        # Here we add the version settings to a dict where we key the range of
+        # versions that the whl spans. If the wheel supports musl and glibc at
+        # the same time, we do this for each supported platform, hence the
+        # double dict.
+        for default_setting, versioned in all_versioned_settings.items():
+            versions = sorted(versioned)
+            min_version = versions[0]
+            max_version = versions[-1]
+
+            versioned_additions.setdefault(default_setting, {})[(min_version, max_version)] = struct(
+                repo = alias.repo,
+                python_version = alias.version,
+                settings = versioned,
+            )
+
+    versioned = {}
+    for default_setting, candidates in versioned_additions.items():
+        # Sort the candidates by the range of versions the span, so that we
+        # start with the lowest version.
+        for _, candidate in sorted(candidates.items()):
+            # Set the default with the first candidate, which gives us the highest
+            # compatibility. If the users want to use a higher-version than the default
+            # they can configure the glibc_version flag.
+            versioned.setdefault(default_setting, whl_alias(
+                version = candidate.python_version,
+                config_setting = "//_config" + default_setting,
+                repo = candidate.repo,
+            ))
+
+            # We will be overwriting previously added entries, but that is intended.
+            for _, setting in sorted(candidate.settings.items()):
+                versioned[setting] = whl_alias(
+                    version = candidate.python_version,
+                    config_setting = "//_config" + setting,
+                    repo = candidate.repo,
+                )
+
+    ret.extend(versioned.values())
+    return ret
+
+def _render_config_settings(python_versions = [], target_platforms = [], osx_versions = [], glibc_versions = [], muslc_versions = []):
+    return """\
+load("@rules_python//python/private/pypi:config_settings.bzl", "config_settings")
+
+config_settings(
+    name = "config_settings",
+    glibc_versions = {glibc_versions},
+    muslc_versions = {muslc_versions},
+    osx_versions = {osx_versions},
+    python_versions = {python_versions},
+    target_platforms = {target_platforms},
+    visibility = ["//:__subpackages__"],
+)""".format(
+        glibc_versions = render.indent(render.list(glibc_versions)).lstrip(),
+        muslc_versions = render.indent(render.list(muslc_versions)).lstrip(),
+        osx_versions = render.indent(render.list(osx_versions)).lstrip(),
+        python_versions = render.indent(render.list(python_versions)).lstrip(),
+        target_platforms = render.indent(render.list(target_platforms)).lstrip(),
+    )
+
+def get_whl_flag_versions(aliases):
+    """Return all of the flag versions that is used by the aliases
+
+    Args:
+        aliases: list[whl_alias]
+
+    Returns:
+        dict, which may have keys:
+          * python_versions
+    """
+    python_versions = {}
+    glibc_versions = {}
+    target_platforms = {}
+    muslc_versions = {}
+    osx_versions = {}
+
+    for a in aliases:
+        if not a.version and not a.filename:
+            continue
+
+        if a.version:
+            python_versions[a.version] = None
+
+        if not a.filename:
+            continue
+
+        if a.filename.endswith(".whl") and not a.filename.endswith("-any.whl"):
+            parsed = parse_whl_name(a.filename)
+        else:
+            for plat in a.target_platforms or []:
+                target_platforms[plat] = None
+            continue
+
+        for platform_tag in parsed.platform_tag.split("."):
+            parsed = whl_target_platforms(platform_tag)
+
+            for p in parsed:
+                target_platforms[p.target_platform] = None
+
+            if platform_tag.startswith("win") or platform_tag.startswith("linux"):
+                continue
+
+            head, _, tail = platform_tag.partition("_")
+            major, _, tail = tail.partition("_")
+            minor, _, tail = tail.partition("_")
+            if tail:
+                version = (int(major), int(minor))
+                if "many" in head:
+                    glibc_versions[version] = None
+                elif "musl" in head:
+                    muslc_versions[version] = None
+                elif "mac" in head:
+                    osx_versions[version] = None
+                else:
+                    fail(platform_tag)
+
+    return {
+        k: sorted(v)
+        for k, v in {
+            "glibc_versions": glibc_versions,
+            "muslc_versions": muslc_versions,
+            "osx_versions": osx_versions,
+            "python_versions": python_versions,
+            "target_platforms": target_platforms,
+        }.items()
+        if v
+    }
+
+def get_filename_config_settings(
+        *,
+        filename,
+        target_platforms,
+        glibc_versions,
+        muslc_versions,
+        osx_versions,
+        python_version = "",
+        python_default = True):
+    """Get the filename config settings.
+
+    Args:
+        filename: the distribution filename (can be a whl or an sdist).
+        target_platforms: list[str], target platforms in "{os}_{cpu}" format.
+        glibc_versions: list[tuple[int, int]], list of versions.
+        muslc_versions: list[tuple[int, int]], list of versions.
+        osx_versions: list[tuple[int, int]], list of versions.
+        python_version: the python version to generate the config_settings for.
+        python_default: if we should include the setting when python_version is not set.
+
+    Returns:
+        A tuple:
+         * A list of config settings that are generated by ./pip_config_settings.bzl
+         * The list of default version settings.
+    """
+    prefixes = []
+    suffixes = []
+    if (0, 0) in glibc_versions:
+        fail("Invalid version in 'glibc_versions': cannot specify (0, 0) as a value")
+    if (0, 0) in muslc_versions:
+        fail("Invalid version in 'muslc_versions': cannot specify (0, 0) as a value")
+    if (0, 0) in osx_versions:
+        fail("Invalid version in 'osx_versions': cannot specify (0, 0) as a value")
+
+    glibc_versions = sorted(glibc_versions)
+    muslc_versions = sorted(muslc_versions)
+    osx_versions = sorted(osx_versions)
+    setting_supported_versions = {}
+
+    if filename.endswith(".whl"):
+        parsed = parse_whl_name(filename)
+        if parsed.python_tag == "py2.py3":
+            py = "py"
+        elif parsed.python_tag.startswith("cp"):
+            py = "cp3x"
+        else:
+            py = "py3"
+
+        if parsed.abi_tag.startswith("cp"):
+            abi = "cp"
+        else:
+            abi = parsed.abi_tag
+
+        if parsed.platform_tag == "any":
+            prefixes = ["{}_{}_any".format(py, abi)]
+            suffixes = target_platforms
+        else:
+            prefixes = ["{}_{}".format(py, abi)]
+            suffixes = _whl_config_setting_suffixes(
+                platform_tag = parsed.platform_tag,
+                glibc_versions = glibc_versions,
+                muslc_versions = muslc_versions,
+                osx_versions = osx_versions,
+                setting_supported_versions = setting_supported_versions,
+            )
+    else:
+        prefixes = ["sdist"]
+        suffixes = target_platforms
+
+    if python_default and python_version:
+        prefixes += ["cp{}_{}".format(python_version, p) for p in prefixes]
+    elif python_version:
+        prefixes = ["cp{}_{}".format(python_version, p) for p in prefixes]
+    elif python_default:
+        pass
+    else:
+        fail("BUG: got no python_version and it is not default")
+
+    versioned = {
+        ":is_{}_{}".format(p, suffix): {
+            version: ":is_{}_{}".format(p, setting)
+            for version, setting in versions.items()
+        }
+        for p in prefixes
+        for suffix, versions in setting_supported_versions.items()
+    }
+
+    if suffixes or versioned:
+        return [":is_{}_{}".format(p, s) for p in prefixes for s in suffixes], versioned
+    else:
+        return [":is_{}".format(p) for p in prefixes], setting_supported_versions
+
+def _whl_config_setting_suffixes(
+        platform_tag,
+        glibc_versions,
+        muslc_versions,
+        osx_versions,
+        setting_supported_versions):
+    suffixes = []
+    for platform_tag in platform_tag.split("."):
+        for p in whl_target_platforms(platform_tag):
+            prefix = p.os
+            suffix = p.cpu
+            if "manylinux" in platform_tag:
+                prefix = "manylinux"
+                versions = glibc_versions
+            elif "musllinux" in platform_tag:
+                prefix = "musllinux"
+                versions = muslc_versions
+            elif p.os in ["linux", "windows"]:
+                versions = [(0, 0)]
+            elif p.os == "osx":
+                versions = osx_versions
+                if "universal2" in platform_tag:
+                    suffix += "_universal2"
+            else:
+                fail("Unsupported whl os: {}".format(p.os))
+
+            default_version_setting = "{}_{}".format(prefix, suffix)
+            supported_versions = {}
+            for v in versions:
+                if v == (0, 0):
+                    suffixes.append(default_version_setting)
+                elif v >= p.version:
+                    supported_versions[v] = "{}_{}_{}_{}".format(
+                        prefix,
+                        v[0],
+                        v[1],
+                        suffix,
+                    )
+            if supported_versions:
+                setting_supported_versions[default_version_setting] = supported_versions
+
+    return suffixes
diff --git a/python/private/pypi/repack_whl.py b/python/private/pypi/repack_whl.py
new file mode 100644
index 0000000..9052ac3
--- /dev/null
+++ b/python/private/pypi/repack_whl.py
@@ -0,0 +1,185 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Regenerate a whl file after patching and cleanup the patched contents.
+
+This script will take contents of the current directory and create a new wheel
+out of it and will remove all files that were written to the wheel.
+"""
+
+from __future__ import annotations
+
+import argparse
+import difflib
+import logging
+import pathlib
+import sys
+import tempfile
+
+from tools.wheelmaker import _WhlFile
+
+# NOTE: Implement the following matching of what goes into the RECORD
+# https://peps.python.org/pep-0491/#the-dist-info-directory
+_EXCLUDES = [
+    "RECORD",
+    "INSTALLER",
+    "RECORD.jws",
+    "RECORD.p7s",
+    "REQUESTED",
+]
+
+_DISTINFO = "dist-info"
+
+
+def _unidiff_output(expected, actual, record):
+    """
+    Helper function. Returns a string containing the unified diff of two
+    multiline strings.
+    """
+
+    expected = expected.splitlines(1)
+    actual = actual.splitlines(1)
+
+    diff = difflib.unified_diff(
+        expected, actual, fromfile=f"a/{record}", tofile=f"b/{record}"
+    )
+
+    return "".join(diff)
+
+
+def _files_to_pack(dir: pathlib.Path, want_record: str) -> list[pathlib.Path]:
+    """Check that the RECORD file entries are correct and print a unified diff on failure."""
+
+    # First get existing files by using the RECORD file
+    got_files = []
+    got_distinfos = []
+    for line in want_record.splitlines():
+        rec, _, _ = line.partition(",")
+        path = dir / rec
+
+        if not path.exists():
+            # skip files that do not exist as they won't be present in the final
+            # RECORD file.
+            continue
+
+        if not path.parent.name.endswith(_DISTINFO):
+            got_files.append(path)
+        elif path.name not in _EXCLUDES:
+            got_distinfos.append(path)
+
+    # Then get extra files present in the directory but not in the RECORD file
+    extra_files = []
+    extra_distinfos = []
+    for path in dir.rglob("*"):
+        if path.is_dir():
+            continue
+
+        elif path.parent.name.endswith(_DISTINFO):
+            if path.name in _EXCLUDES:
+                # NOTE: we implement the following matching of what goes into the RECORD
+                # https://peps.python.org/pep-0491/#the-dist-info-directory
+                continue
+            elif path not in got_distinfos:
+                extra_distinfos.append(path)
+
+        elif path not in got_files:
+            extra_files.append(path)
+
+    # sort the extra files for reproducibility
+    extra_files.sort()
+    extra_distinfos.sort()
+
+    # This order ensures that the structure of the RECORD file is always the
+    # same and ensures smaller patchsets to the RECORD file in general
+    return got_files + extra_files + got_distinfos + extra_distinfos
+
+
+def main(sys_argv):
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument(
+        "whl_path",
+        type=pathlib.Path,
+        help="The original wheel file that we have patched.",
+    )
+    parser.add_argument(
+        "--record-patch",
+        type=pathlib.Path,
+        help="The output path that we are going to write the RECORD file patch to.",
+    )
+    parser.add_argument(
+        "output",
+        type=pathlib.Path,
+        help="The output path that we are going to write a new file to.",
+    )
+    args = parser.parse_args(sys_argv)
+
+    cwd = pathlib.Path.cwd()
+    logging.debug("=" * 80)
+    logging.debug("Repackaging the wheel")
+    logging.debug("=" * 80)
+
+    with tempfile.TemporaryDirectory(dir=cwd) as tmpdir:
+        patched_wheel_dir = cwd / tmpdir
+        logging.debug(f"Created a tmpdir: {patched_wheel_dir}")
+
+        excludes = [args.whl_path, patched_wheel_dir]
+
+        logging.debug("Moving whl contents to the newly created tmpdir")
+        for p in cwd.glob("*"):
+            if p in excludes:
+                logging.debug(f"Ignoring: {p}")
+                continue
+
+            rel_path = p.relative_to(cwd)
+            dst = p.rename(patched_wheel_dir / rel_path)
+            logging.debug(f"mv {p} -> {dst}")
+
+        distinfo_dir = next(iter(patched_wheel_dir.glob("*dist-info")))
+        logging.debug(f"Found dist-info dir: {distinfo_dir}")
+        record_path = distinfo_dir / "RECORD"
+        record_contents = record_path.read_text() if record_path.exists() else ""
+        distribution_prefix = distinfo_dir.with_suffix("").name
+
+        with _WhlFile(
+            args.output, mode="w", distribution_prefix=distribution_prefix
+        ) as out:
+            for p in _files_to_pack(patched_wheel_dir, record_contents):
+                rel_path = p.relative_to(patched_wheel_dir)
+                out.add_file(str(rel_path), p)
+
+            logging.debug(f"Writing RECORD file")
+            got_record = out.add_recordfile().decode("utf-8", "surrogateescape")
+
+    if got_record == record_contents:
+        logging.info(f"Created a whl file: {args.output}")
+        return
+
+    record_diff = _unidiff_output(
+        record_contents,
+        got_record,
+        out.distinfo_path("RECORD"),
+    )
+    args.record_patch.write_text(record_diff)
+    logging.warning(
+        f"Please apply patch to the RECORD file ({args.record_patch}):\n{record_diff}"
+    )
+
+
+if __name__ == "__main__":
+    logging.basicConfig(
+        format="%(module)s: %(levelname)s: %(message)s", level=logging.DEBUG
+    )
+
+    sys.exit(main(sys.argv[1:]))
diff --git a/python/private/pypi/requirements.bzl.tmpl.bzlmod b/python/private/pypi/requirements.bzl.tmpl.bzlmod
new file mode 100644
index 0000000..ba227ae
--- /dev/null
+++ b/python/private/pypi/requirements.bzl.tmpl.bzlmod
@@ -0,0 +1,26 @@
+"""Starlark representation of locked requirements.
+
+@generated by rules_python pip.parse bzlmod extension.
+"""
+
+load("@rules_python//python:pip.bzl", "pip_utils")
+
+all_requirements = %%ALL_REQUIREMENTS%%
+
+all_whl_requirements_by_package = %%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%
+
+all_whl_requirements = all_whl_requirements_by_package.values()
+
+all_data_requirements = %%ALL_DATA_REQUIREMENTS%%
+
+def requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "pkg")
+
+def whl_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "whl")
+
+def data_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "data")
+
+def dist_info_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "dist_info")
diff --git a/python/private/pypi/requirements.bzl.tmpl.workspace b/python/private/pypi/requirements.bzl.tmpl.workspace
new file mode 100644
index 0000000..2f4bcd6
--- /dev/null
+++ b/python/private/pypi/requirements.bzl.tmpl.workspace
@@ -0,0 +1,72 @@
+"""Starlark representation of locked requirements.
+
+@generated by rules_python pip_parse repository rule.
+"""
+
+%%IMPORTS%%
+
+all_requirements = %%ALL_REQUIREMENTS%%
+
+all_whl_requirements_by_package = %%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%
+
+all_whl_requirements = all_whl_requirements_by_package.values()
+
+all_data_requirements = %%ALL_DATA_REQUIREMENTS%%
+
+_packages = %%PACKAGES%%
+_config = %%CONFIG%%
+_annotations = %%ANNOTATIONS%%
+
+def requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "pkg")
+
+def whl_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "whl")
+
+def data_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "data")
+
+def dist_info_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "dist_info")
+
+def _get_annotation(requirement):
+    # This expects to parse `setuptools==58.2.0     --hash=sha256:2551203ae6955b9876741a26ab3e767bb3242dafe86a32a749ea0d78b6792f11`
+    # down to `setuptools`.
+    name = requirement.split(" ")[0].split("=")[0].split("[")[0]
+    return _annotations.get(name)
+
+def install_deps(**whl_library_kwargs):
+    """Repository rule macro. Install dependencies from `pip_parse`.
+
+    Args:
+       **whl_library_kwargs: Additional arguments which will flow to underlying
+         `whl_library` calls. See pip_repository.bzl for details.
+    """
+
+    # Set up the requirement groups
+    all_requirement_groups = %%ALL_REQUIREMENT_GROUPS%%
+
+    requirement_group_mapping = {
+        requirement: group_name
+        for group_name, group_requirements in all_requirement_groups.items()
+        for requirement in group_requirements
+    }
+
+    # %%GROUP_LIBRARY%%
+
+    # Install wheels which may be participants in a group
+    whl_config = dict(_config)
+    whl_config.update(whl_library_kwargs)
+
+    for name, requirement in _packages:
+        group_name = requirement_group_mapping.get(name.replace("%%NAME%%_", ""))
+        group_deps = all_requirement_groups.get(group_name, [])
+
+        whl_library(
+            name = name,
+            requirement = requirement,
+            group_name = group_name,
+            group_deps = group_deps,
+            annotation = _get_annotation(requirement),
+            **whl_config
+        )
diff --git a/python/private/pypi/simpleapi_download.bzl b/python/private/pypi/simpleapi_download.bzl
new file mode 100644
index 0000000..b258fef
--- /dev/null
+++ b/python/private/pypi/simpleapi_download.bzl
@@ -0,0 +1,204 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A file that houses private functions used in the `bzlmod` extension with the same name.
+"""
+
+load("@bazel_features//:features.bzl", "bazel_features")
+load("//python/private:auth.bzl", "get_auth")
+load("//python/private:envsubst.bzl", "envsubst")
+load("//python/private:normalize_name.bzl", "normalize_name")
+load(":parse_simpleapi_html.bzl", "parse_simpleapi_html")
+
+def simpleapi_download(ctx, *, attr, cache, parallel_download = True):
+    """Download Simple API HTML.
+
+    Args:
+        ctx: The module_ctx or repository_ctx.
+        attr: Contains the parameters for the download. They are grouped into a
+          struct for better clarity. It must have attributes:
+           * index_url: str, the index.
+           * index_url_overrides: dict[str, str], the index overrides for
+             separate packages.
+           * extra_index_urls: Extra index URLs that will be looked up after
+             the main is looked up.
+           * sources: list[str], the sources to download things for. Each value is
+             the contents of requirements files.
+           * envsubst: list[str], the envsubst vars for performing substitution in index url.
+           * netrc: The netrc parameter for ctx.download, see http_file for docs.
+           * auth_patterns: The auth_patterns parameter for ctx.download, see
+               http_file for docs.
+        cache: A dictionary that can be used as a cache between calls during a
+            single evaluation of the extension. We use a dictionary as a cache
+            so that we can reuse calls to the simple API when evaluating the
+            extension. Using the canonical_id parameter of the module_ctx would
+            deposit the simple API responses to the bazel cache and that is
+            undesirable because additions to the PyPI index would not be
+            reflected when re-evaluating the extension unless we do
+            `bazel clean --expunge`.
+        parallel_download: A boolean to enable usage of bazel 7.1 non-blocking downloads.
+
+    Returns:
+        dict of pkg name to the parsed HTML contents - a list of structs.
+    """
+    index_url_overrides = {
+        normalize_name(p): i
+        for p, i in (attr.index_url_overrides or {}).items()
+    }
+
+    download_kwargs = {}
+    if bazel_features.external_deps.download_has_block_param:
+        download_kwargs["block"] = not parallel_download
+
+    # NOTE @aignas 2024-03-31: we are not merging results from multiple indexes
+    # to replicate how `pip` would handle this case.
+    async_downloads = {}
+    contents = {}
+    index_urls = [attr.index_url] + attr.extra_index_urls
+    for pkg in attr.sources:
+        pkg_normalized = normalize_name(pkg)
+
+        success = False
+        for index_url in index_urls:
+            result = _read_simpleapi(
+                ctx = ctx,
+                url = "{}/{}/".format(
+                    index_url_overrides.get(pkg_normalized, index_url).rstrip("/"),
+                    pkg,
+                ),
+                attr = attr,
+                cache = cache,
+                **download_kwargs
+            )
+            if hasattr(result, "wait"):
+                # We will process it in a separate loop:
+                async_downloads.setdefault(pkg_normalized, []).append(
+                    struct(
+                        pkg_normalized = pkg_normalized,
+                        wait = result.wait,
+                    ),
+                )
+                continue
+
+            if result.success:
+                contents[pkg_normalized] = result.output
+                success = True
+                break
+
+        if not async_downloads and not success:
+            fail("Failed to download metadata from urls: {}".format(
+                ", ".join(index_urls),
+            ))
+
+    if not async_downloads:
+        return contents
+
+    # If we use `block` == False, then we need to have a second loop that is
+    # collecting all of the results as they were being downloaded in parallel.
+    for pkg, downloads in async_downloads.items():
+        success = False
+        for download in downloads:
+            result = download.wait()
+
+            if result.success and download.pkg_normalized not in contents:
+                contents[download.pkg_normalized] = result.output
+                success = True
+
+        if not success:
+            fail("Failed to download metadata from urls: {}".format(
+                ", ".join(index_urls),
+            ))
+
+    return contents
+
+def _read_simpleapi(ctx, url, attr, cache, **download_kwargs):
+    """Read SimpleAPI.
+
+    Args:
+        ctx: The module_ctx or repository_ctx.
+        url: str, the url parameter that can be passed to ctx.download.
+        attr: The attribute that contains necessary info for downloading. The
+          following attributes must be present:
+           * envsubst: The envsubst values for performing substitutions in the URL.
+           * netrc: The netrc parameter for ctx.download, see http_file for docs.
+           * auth_patterns: The auth_patterns parameter for ctx.download, see
+               http_file for docs.
+        cache: A dict for storing the results.
+        **download_kwargs: Any extra params to ctx.download.
+            Note that output and auth will be passed for you.
+
+    Returns:
+        A similar object to what `download` would return except that in result.out
+        will be the parsed simple api contents.
+    """
+    # NOTE @aignas 2024-03-31: some of the simple APIs use relative URLs for
+    # the whl location and we cannot handle multiple URLs at once by passing
+    # them to ctx.download if we want to correctly handle the relative URLs.
+    # TODO: Add a test that env subbed index urls do not leak into the lock file.
+
+    real_url = envsubst(
+        url,
+        attr.envsubst,
+        ctx.getenv if hasattr(ctx, "getenv") else ctx.os.environ.get,
+    )
+
+    cache_key = real_url
+    if cache_key in cache:
+        return struct(success = True, output = cache[cache_key])
+
+    output_str = envsubst(
+        url,
+        attr.envsubst,
+        # Use env names in the subst values - this will be unique over
+        # the lifetime of the execution of this function and we also use
+        # `~` as the separator to ensure that we don't get clashes.
+        {e: "~{}~".format(e) for e in attr.envsubst}.get,
+    )
+
+    # Transform the URL into a valid filename
+    for char in [".", ":", "/", "\\", "-"]:
+        output_str = output_str.replace(char, "_")
+
+    output = ctx.path(output_str.strip("_").lower() + ".html")
+
+    # NOTE: this may have block = True or block = False in the download_kwargs
+    download = ctx.download(
+        url = [real_url],
+        output = output,
+        auth = get_auth(ctx, [real_url], ctx_attr = attr),
+        allow_fail = True,
+        **download_kwargs
+    )
+
+    if download_kwargs.get("block") == False:
+        # Simulate the same API as ctx.download has
+        return struct(
+            wait = lambda: _read_index_result(ctx, download.wait(), output, url, cache, cache_key),
+        )
+
+    return _read_index_result(ctx, download, output, url, cache, cache_key)
+
+def _read_index_result(ctx, result, output, url, cache, cache_key):
+    if not result.success:
+        return struct(success = False)
+
+    content = ctx.read(output)
+
+    output = parse_simpleapi_html(url = url, content = content)
+    if output:
+        cache.setdefault(cache_key, output)
+        return struct(success = True, output = output, cache_key = cache_key)
+    else:
+        return struct(success = False)
diff --git a/python/private/pypi/whl_library.bzl b/python/private/pypi/whl_library.bzl
new file mode 100644
index 0000000..cae0db3
--- /dev/null
+++ b/python/private/pypi/whl_library.bzl
@@ -0,0 +1,509 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("//python:repositories.bzl", "is_standalone_interpreter")
+load("//python:versions.bzl", "WINDOWS_NAME")
+load("//python/pip_install:repositories.bzl", "all_requirements")
+load("//python/private:auth.bzl", "AUTH_ATTRS", "get_auth")
+load("//python/private:envsubst.bzl", "envsubst")
+load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
+load("//python/private:toolchains_repo.bzl", "get_host_os_arch")
+load(":attrs.bzl", "ATTRS", "use_isolated")
+load(":generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")
+load(":parse_whl_name.bzl", "parse_whl_name")
+load(":patch_whl.bzl", "patch_whl")
+load(":whl_target_platforms.bzl", "whl_target_platforms")
+
+_CPPFLAGS = "CPPFLAGS"
+_COMMAND_LINE_TOOLS_PATH_SLUG = "commandlinetools"
+_WHEEL_ENTRY_POINT_PREFIX = "rules_python_wheel_entry_point"
+
+def _construct_pypath(rctx):
+    """Helper function to construct a PYTHONPATH.
+
+    Contains entries for code in this repo as well as packages downloaded from //python/pip_install:repositories.bzl.
+    This allows us to run python code inside repository rule implementations.
+
+    Args:
+        rctx: Handle to the repository_context.
+
+    Returns: String of the PYTHONPATH.
+    """
+
+    separator = ":" if not "windows" in rctx.os.name.lower() else ";"
+    pypath = separator.join([
+        str(rctx.path(entry).dirname)
+        for entry in rctx.attr._python_path_entries
+    ])
+    return pypath
+
+def _get_python_interpreter_attr(rctx):
+    """A helper function for getting the `python_interpreter` attribute or it's default
+
+    Args:
+        rctx (repository_ctx): Handle to the rule repository context.
+
+    Returns:
+        str: The attribute value or it's default
+    """
+    if rctx.attr.python_interpreter:
+        return rctx.attr.python_interpreter
+
+    if "win" in rctx.os.name:
+        return "python.exe"
+    else:
+        return "python3"
+
+def _resolve_python_interpreter(rctx):
+    """Helper function to find the python interpreter from the common attributes
+
+    Args:
+        rctx: Handle to the rule repository context.
+
+    Returns:
+        `path` object, for the resolved path to the Python interpreter.
+    """
+    python_interpreter = _get_python_interpreter_attr(rctx)
+
+    if rctx.attr.python_interpreter_target != None:
+        python_interpreter = rctx.path(rctx.attr.python_interpreter_target)
+
+        (os, _) = get_host_os_arch(rctx)
+
+        # On Windows, the symlink doesn't work because Windows attempts to find
+        # Python DLLs where the symlink is, not where the symlink points.
+        if os == WINDOWS_NAME:
+            python_interpreter = python_interpreter.realpath
+    elif "/" not in python_interpreter:
+        # It's a plain command, e.g. "python3", to look up in the environment.
+        found_python_interpreter = rctx.which(python_interpreter)
+        if not found_python_interpreter:
+            fail("python interpreter `{}` not found in PATH".format(python_interpreter))
+        python_interpreter = found_python_interpreter
+    else:
+        python_interpreter = rctx.path(python_interpreter)
+    return python_interpreter
+
+def _get_xcode_location_cflags(rctx):
+    """Query the xcode sdk location to update cflags
+
+    Figure out if this interpreter target comes from rules_python, and patch the xcode sdk location if so.
+    Pip won't be able to compile c extensions from sdists with the pre built python distributions from indygreg
+    otherwise. See https://github.com/indygreg/python-build-standalone/issues/103
+    """
+
+    # Only run on MacOS hosts
+    if not rctx.os.name.lower().startswith("mac os"):
+        return []
+
+    xcode_sdk_location = repo_utils.execute_unchecked(
+        rctx,
+        op = "GetXcodeLocation",
+        arguments = [repo_utils.which_checked(rctx, "xcode-select"), "--print-path"],
+    )
+    if xcode_sdk_location.return_code != 0:
+        return []
+
+    xcode_root = xcode_sdk_location.stdout.strip()
+    if _COMMAND_LINE_TOOLS_PATH_SLUG not in xcode_root.lower():
+        # This is a full xcode installation somewhere like /Applications/Xcode13.0.app/Contents/Developer
+        # so we need to change the path to to the macos specific tools which are in a different relative
+        # path than xcode installed command line tools.
+        xcode_root = "{}/Platforms/MacOSX.platform/Developer".format(xcode_root)
+    return [
+        "-isysroot {}/SDKs/MacOSX.sdk".format(xcode_root),
+    ]
+
+def _get_toolchain_unix_cflags(rctx, python_interpreter):
+    """Gather cflags from a standalone toolchain for unix systems.
+
+    Pip won't be able to compile c extensions from sdists with the pre built python distributions from indygreg
+    otherwise. See https://github.com/indygreg/python-build-standalone/issues/103
+    """
+
+    # Only run on Unix systems
+    if not rctx.os.name.lower().startswith(("mac os", "linux")):
+        return []
+
+    # Only update the location when using a standalone toolchain.
+    if not is_standalone_interpreter(rctx, python_interpreter):
+        return []
+
+    stdout = repo_utils.execute_checked_stdout(
+        rctx,
+        op = "GetPythonVersionForUnixCflags",
+        arguments = [
+            python_interpreter,
+            "-c",
+            "import sys; print(f'{sys.version_info[0]}.{sys.version_info[1]}', end='')",
+        ],
+    )
+    _python_version = stdout
+    include_path = "{}/include/python{}".format(
+        python_interpreter.dirname,
+        _python_version,
+    )
+
+    return ["-isystem {}".format(include_path)]
+
+def _parse_optional_attrs(rctx, args, extra_pip_args = None):
+    """Helper function to parse common attributes of pip_repository and whl_library repository rules.
+
+    This function also serializes the structured arguments as JSON
+    so they can be passed on the command line to subprocesses.
+
+    Args:
+        rctx: Handle to the rule repository context.
+        args: A list of parsed args for the rule.
+        extra_pip_args: The pip args to pass.
+    Returns: Augmented args list.
+    """
+
+    if use_isolated(rctx, rctx.attr):
+        args.append("--isolated")
+
+    # Bazel version 7.1.0 and later (and rolling releases from version 8.0.0-pre.20240128.3)
+    # support rctx.getenv(name, default): When building incrementally, any change to the value of
+    # the variable named by name will cause this repository to be re-fetched.
+    if "getenv" in dir(rctx):
+        getenv = rctx.getenv
+    else:
+        getenv = rctx.os.environ.get
+
+    # Check for None so we use empty default types from our attrs.
+    # Some args want to be list, and some want to be dict.
+    if extra_pip_args != None:
+        args += [
+            "--extra_pip_args",
+            json.encode(struct(arg = [
+                envsubst(pip_arg, rctx.attr.envsubst, getenv)
+                for pip_arg in rctx.attr.extra_pip_args
+            ])),
+        ]
+
+    if rctx.attr.download_only:
+        args.append("--download_only")
+
+    if rctx.attr.pip_data_exclude != None:
+        args += [
+            "--pip_data_exclude",
+            json.encode(struct(arg = rctx.attr.pip_data_exclude)),
+        ]
+
+    if rctx.attr.enable_implicit_namespace_pkgs:
+        args.append("--enable_implicit_namespace_pkgs")
+
+    if rctx.attr.environment != None:
+        args += [
+            "--environment",
+            json.encode(struct(arg = rctx.attr.environment)),
+        ]
+
+    return args
+
+def _create_repository_execution_environment(rctx, python_interpreter):
+    """Create a environment dictionary for processes we spawn with rctx.execute.
+
+    Args:
+        rctx (repository_ctx): The repository context.
+        python_interpreter (path): The resolved python interpreter.
+    Returns:
+        Dictionary of environment variable suitable to pass to rctx.execute.
+    """
+
+    # Gather any available CPPFLAGS values
+    cppflags = []
+    cppflags.extend(_get_xcode_location_cflags(rctx))
+    cppflags.extend(_get_toolchain_unix_cflags(rctx, python_interpreter))
+
+    env = {
+        "PYTHONPATH": _construct_pypath(rctx),
+        _CPPFLAGS: " ".join(cppflags),
+    }
+
+    return env
+
+def _whl_library_impl(rctx):
+    python_interpreter = _resolve_python_interpreter(rctx)
+    args = [
+        python_interpreter,
+        "-m",
+        "python.pip_install.tools.wheel_installer.wheel_installer",
+        "--requirement",
+        rctx.attr.requirement,
+    ]
+    extra_pip_args = []
+    extra_pip_args.extend(rctx.attr.extra_pip_args)
+
+    # Manually construct the PYTHONPATH since we cannot use the toolchain here
+    environment = _create_repository_execution_environment(rctx, python_interpreter)
+
+    whl_path = None
+    if rctx.attr.whl_file:
+        whl_path = rctx.path(rctx.attr.whl_file)
+
+        # Simulate the behaviour where the whl is present in the current directory.
+        rctx.symlink(whl_path, whl_path.basename)
+        whl_path = rctx.path(whl_path.basename)
+    elif rctx.attr.urls:
+        filename = rctx.attr.filename
+        urls = rctx.attr.urls
+        if not filename:
+            _, _, filename = urls[0].rpartition("/")
+
+        if not (filename.endswith(".whl") or filename.endswith("tar.gz") or filename.endswith(".zip")):
+            if rctx.attr.filename:
+                msg = "got '{}'".format(filename)
+            else:
+                msg = "detected '{}' from url:\n{}".format(filename, urls[0])
+            fail("Only '.whl', '.tar.gz' or '.zip' files are supported, {}".format(msg))
+
+        result = rctx.download(
+            url = urls,
+            output = filename,
+            sha256 = rctx.attr.sha256,
+            auth = get_auth(rctx, urls),
+        )
+
+        if not result.success:
+            fail("could not download the '{}' from {}:\n{}".format(filename, urls, result))
+
+        if filename.endswith(".whl"):
+            whl_path = rctx.path(rctx.attr.filename)
+        else:
+            # It is an sdist and we need to tell PyPI to use a file in this directory
+            # and not use any indexes.
+            extra_pip_args.extend(["--no-index", "--find-links", "."])
+
+    args = _parse_optional_attrs(rctx, args, extra_pip_args)
+
+    if not whl_path:
+        repo_utils.execute_checked(
+            rctx,
+            op = "whl_library.ResolveRequirement({}, {})".format(rctx.attr.name, rctx.attr.requirement),
+            arguments = args,
+            environment = environment,
+            quiet = rctx.attr.quiet,
+            timeout = rctx.attr.timeout,
+        )
+
+        whl_path = rctx.path(json.decode(rctx.read("whl_file.json"))["whl_file"])
+        if not rctx.delete("whl_file.json"):
+            fail("failed to delete the whl_file.json file")
+
+    if rctx.attr.whl_patches:
+        patches = {}
+        for patch_file, json_args in rctx.attr.whl_patches.items():
+            patch_dst = struct(**json.decode(json_args))
+            if whl_path.basename in patch_dst.whls:
+                patches[patch_file] = patch_dst.patch_strip
+
+        whl_path = patch_whl(
+            rctx,
+            op = "whl_library.PatchWhl({}, {})".format(rctx.attr.name, rctx.attr.requirement),
+            python_interpreter = python_interpreter,
+            whl_path = whl_path,
+            patches = patches,
+            quiet = rctx.attr.quiet,
+            timeout = rctx.attr.timeout,
+        )
+
+    target_platforms = rctx.attr.experimental_target_platforms
+    if target_platforms:
+        parsed_whl = parse_whl_name(whl_path.basename)
+        if parsed_whl.platform_tag != "any":
+            # NOTE @aignas 2023-12-04: if the wheel is a platform specific
+            # wheel, we only include deps for that target platform
+            target_platforms = [
+                p.target_platform
+                for p in whl_target_platforms(
+                    platform_tag = parsed_whl.platform_tag,
+                    abi_tag = parsed_whl.abi_tag,
+                )
+            ]
+
+    repo_utils.execute_checked(
+        rctx,
+        op = "whl_library.ExtractWheel({}, {})".format(rctx.attr.name, whl_path),
+        arguments = args + [
+            "--whl-file",
+            whl_path,
+        ] + ["--platform={}".format(p) for p in target_platforms],
+        environment = environment,
+        quiet = rctx.attr.quiet,
+        timeout = rctx.attr.timeout,
+    )
+
+    metadata = json.decode(rctx.read("metadata.json"))
+    rctx.delete("metadata.json")
+
+    # NOTE @aignas 2024-06-22: this has to live on until we stop supporting
+    # passing `twine` as a `:pkg` library via the `WORKSPACE` builds.
+    #
+    # See ../../packaging.bzl line 190
+    entry_points = {}
+    for item in metadata["entry_points"]:
+        name = item["name"]
+        module = item["module"]
+        attribute = item["attribute"]
+
+        # There is an extreme edge-case with entry_points that end with `.py`
+        # See: https://github.com/bazelbuild/bazel/blob/09c621e4cf5b968f4c6cdf905ab142d5961f9ddc/src/test/java/com/google/devtools/build/lib/rules/python/PyBinaryConfiguredTargetTest.java#L174
+        entry_point_without_py = name[:-3] + "_py" if name.endswith(".py") else name
+        entry_point_target_name = (
+            _WHEEL_ENTRY_POINT_PREFIX + "_" + entry_point_without_py
+        )
+        entry_point_script_name = entry_point_target_name + ".py"
+
+        rctx.file(
+            entry_point_script_name,
+            _generate_entry_point_contents(module, attribute),
+        )
+        entry_points[entry_point_without_py] = entry_point_script_name
+
+    build_file_contents = generate_whl_library_build_bazel(
+        dep_template = rctx.attr.dep_template or "@{}{{name}}//:{{target}}".format(rctx.attr.repo_prefix),
+        whl_name = whl_path.basename,
+        dependencies = metadata["deps"],
+        dependencies_by_platform = metadata["deps_by_platform"],
+        group_name = rctx.attr.group_name,
+        group_deps = rctx.attr.group_deps,
+        data_exclude = rctx.attr.pip_data_exclude,
+        tags = [
+            "pypi_name=" + metadata["name"],
+            "pypi_version=" + metadata["version"],
+        ],
+        entry_points = entry_points,
+        annotation = None if not rctx.attr.annotation else struct(**json.decode(rctx.read(rctx.attr.annotation))),
+    )
+    rctx.file("BUILD.bazel", build_file_contents)
+
+    return
+
+def _generate_entry_point_contents(
+        module,
+        attribute,
+        shebang = "#!/usr/bin/env python3"):
+    """Generate the contents of an entry point script.
+
+    Args:
+        module (str): The name of the module to use.
+        attribute (str): The name of the attribute to call.
+        shebang (str, optional): The shebang to use for the entry point python
+            file.
+
+    Returns:
+        str: A string of python code.
+    """
+    contents = """\
+{shebang}
+import sys
+from {module} import {attribute}
+if __name__ == "__main__":
+    sys.exit({attribute}())
+""".format(
+        shebang = shebang,
+        module = module,
+        attribute = attribute,
+    )
+    return contents
+
+# NOTE @aignas 2024-03-21: The usage of dict({}, **common) ensures that all args to `dict` are unique
+whl_library_attrs = dict({
+    "annotation": attr.label(
+        doc = (
+            "Optional json encoded file containing annotation to apply to the extracted wheel. " +
+            "See `package_annotation`"
+        ),
+        allow_files = True,
+    ),
+    "dep_template": attr.string(
+        doc = """
+The dep template to use for referencing the dependencies. It should have `{name}`
+and `{target}` tokens that will be replaced with the normalized distribution name
+and the target that we need respectively.
+""",
+    ),
+    "filename": attr.string(
+        doc = "Download the whl file to this filename. Only used when the `urls` is passed. If not specified, will be auto-detected from the `urls`.",
+    ),
+    "group_deps": attr.string_list(
+        doc = "List of dependencies to skip in order to break the cycles within a dependency group.",
+        default = [],
+    ),
+    "group_name": attr.string(
+        doc = "Name of the group, if any.",
+    ),
+    "repo": attr.string(
+        mandatory = True,
+        doc = "Pointer to parent repo name. Used to make these rules rerun if the parent repo changes.",
+    ),
+    "repo_prefix": attr.string(
+        doc = """
+Prefix for the generated packages will be of the form `@<prefix><sanitized-package-name>//...`
+
+DEPRECATED. Only left for people who vendor requirements.bzl.
+""",
+    ),
+    "requirement": attr.string(
+        mandatory = True,
+        doc = "Python requirement string describing the package to make available, if 'urls' or 'whl_file' is given, then this only needs to include foo[any_extras] as a bare minimum.",
+    ),
+    "sha256": attr.string(
+        doc = "The sha256 of the downloaded whl. Only used when the `urls` is passed.",
+    ),
+    "urls": attr.string_list(
+        doc = """\
+The list of urls of the whl to be downloaded using bazel downloader. Using this
+attr makes `extra_pip_args` and `download_only` ignored.""",
+    ),
+    "whl_file": attr.label(
+        doc = "The whl file that should be used instead of downloading or building the whl.",
+    ),
+    "whl_patches": attr.label_keyed_string_dict(
+        doc = """a label-keyed-string dict that has
+            json.encode(struct([whl_file], patch_strip]) as values. This
+            is to maintain flexibility and correct bzlmod extension interface
+            until we have a better way to define whl_library and move whl
+            patching to a separate place. INTERNAL USE ONLY.""",
+    ),
+    "_python_path_entries": attr.label_list(
+        # Get the root directory of these rules and keep them as a default attribute
+        # in order to avoid unnecessary repository fetching restarts.
+        #
+        # This is very similar to what was done in https://github.com/bazelbuild/rules_go/pull/3478
+        default = [
+            Label("//:BUILD.bazel"),
+        ] + [
+            # Includes all the external dependencies from repositories.bzl
+            Label("@" + repo + "//:BUILD.bazel")
+            for repo in all_requirements
+        ],
+    ),
+}, **ATTRS)
+whl_library_attrs.update(AUTH_ATTRS)
+
+whl_library = repository_rule(
+    attrs = whl_library_attrs,
+    doc = """
+Download and extracts a single wheel based into a bazel repo based on the requirement string passed in.
+Instantiated from pip_repository and inherits config options from there.""",
+    implementation = _whl_library_impl,
+    environ = [
+        "RULES_PYTHON_PIP_ISOLATED",
+        REPO_DEBUG_ENV_VAR,
+    ],
+)
diff --git a/python/private/pypi/whl_repo_name.bzl b/python/private/pypi/whl_repo_name.bzl
new file mode 100644
index 0000000..295f5a4
--- /dev/null
+++ b/python/private/pypi/whl_repo_name.bzl
@@ -0,0 +1,52 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A function to convert a dist name to a valid bazel repo name.
+"""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load(":parse_whl_name.bzl", "parse_whl_name")
+
+def whl_repo_name(prefix, filename, sha256):
+    """Return a valid whl_library repo name given a distribution filename.
+
+    Args:
+        prefix: str, the prefix of the whl_library.
+        filename: str, the filename of the distribution.
+        sha256: str, the sha256 of the distribution.
+
+    Returns:
+        a string that can be used in `whl_library`.
+    """
+    parts = [prefix]
+
+    if not filename.endswith(".whl"):
+        # Then the filename is basically foo-3.2.1.<ext>
+        parts.append(normalize_name(filename.rpartition("-")[0]))
+        parts.append("sdist")
+    else:
+        parsed = parse_whl_name(filename)
+        name = normalize_name(parsed.distribution)
+        python_tag, _, _ = parsed.python_tag.partition(".")
+        abi_tag, _, _ = parsed.abi_tag.partition(".")
+        platform_tag, _, _ = parsed.platform_tag.partition(".")
+
+        parts.append(name)
+        parts.append(python_tag)
+        parts.append(abi_tag)
+        parts.append(platform_tag)
+
+    parts.append(sha256[:8])
+
+    return "_".join(parts)
diff --git a/python/private/pypi/whl_target_platforms.bzl b/python/private/pypi/whl_target_platforms.bzl
new file mode 100644
index 0000000..bee7957
--- /dev/null
+++ b/python/private/pypi/whl_target_platforms.bzl
@@ -0,0 +1,218 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A starlark implementation of the wheel platform tag parsing to get the target platform.
+"""
+
+load(":parse_whl_name.bzl", "parse_whl_name")
+
+# The order of the dictionaries is to keep definitions with their aliases next to each
+# other
+_CPU_ALIASES = {
+    "x86_32": "x86_32",
+    "i386": "x86_32",
+    "i686": "x86_32",
+    "x86": "x86_32",
+    "x86_64": "x86_64",
+    "amd64": "x86_64",
+    "aarch64": "aarch64",
+    "arm64": "aarch64",
+    "ppc": "ppc",
+    "ppc64": "ppc",
+    "ppc64le": "ppc",
+    "s390x": "s390x",
+    "arm": "arm",
+    "armv6l": "arm",
+    "armv7l": "arm",
+}  # buildifier: disable=unsorted-dict-items
+
+_OS_PREFIXES = {
+    "linux": "linux",
+    "manylinux": "linux",
+    "musllinux": "linux",
+    "macos": "osx",
+    "win": "windows",
+}  # buildifier: disable=unsorted-dict-items
+
+def select_whls(*, whls, want_python_version = "3.0", want_abis = [], want_platforms = [], logger = None):
+    """Select a subset of wheels suitable for target platforms from a list.
+
+    Args:
+        whls(list[struct]): A list of candidates which have a `filename`
+            attribute containing the `whl` filename.
+        want_python_version(str): An optional parameter to filter whls by python version. Defaults to '3.0'.
+        want_abis(list[str]): A list of ABIs that are supported.
+        want_platforms(str): The platforms
+        logger: A logger for printing diagnostic messages.
+
+    Returns:
+        A filtered list of items from the `whls` arg where `filename` matches
+        the selected criteria. If no match is found, an empty list is returned.
+    """
+    if not whls:
+        return []
+
+    version_limit = -1
+    if want_python_version:
+        version_limit = int(want_python_version.split(".")[1])
+
+    candidates = {}
+    for whl in whls:
+        parsed = parse_whl_name(whl.filename)
+
+        if logger:
+            logger.trace(lambda: "Deciding whether to use '{}'".format(whl.filename))
+
+        supported_implementations = {}
+        whl_version_min = 0
+        for tag in parsed.python_tag.split("."):
+            supported_implementations[tag[:2]] = None
+
+            if tag.startswith("cp3") or tag.startswith("py3"):
+                version = int(tag[len("..3"):] or 0)
+            else:
+                # In this case it should be eithor "cp2" or "py2" and we will default
+                # to `whl_version_min` = 0
+                continue
+
+            if whl_version_min == 0 or version < whl_version_min:
+                whl_version_min = version
+
+        if not ("cp" in supported_implementations or "py" in supported_implementations):
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl does not support CPython, whl supported implementations are: {}".format(supported_implementations))
+            continue
+
+        if want_abis and parsed.abi_tag not in want_abis:
+            # Filter out incompatible ABIs
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl abi did not match")
+            continue
+
+        if version_limit != -1 and whl_version_min > version_limit:
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl supported python version is too high")
+            continue
+
+        compatible = False
+        if parsed.platform_tag == "any":
+            compatible = True
+        else:
+            for p in whl_target_platforms(parsed.platform_tag):
+                if p.target_platform in want_platforms:
+                    compatible = True
+                    break
+
+        if not compatible:
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl does not support the desired platforms: {}".format(want_platforms))
+            continue
+
+        for implementation in supported_implementations:
+            candidates.setdefault(
+                (
+                    parsed.abi_tag,
+                    parsed.platform_tag,
+                ),
+                {},
+            ).setdefault(
+                (
+                    # prefer cp implementation
+                    implementation == "cp",
+                    # prefer higher versions
+                    whl_version_min,
+                    # prefer abi3 over none
+                    parsed.abi_tag != "none",
+                    # prefer cpx abi over abi3
+                    parsed.abi_tag != "abi3",
+                ),
+                [],
+            ).append(whl)
+
+    return [
+        candidates[key][sorted(v)[-1]][-1]
+        for key, v in candidates.items()
+    ]
+
+def whl_target_platforms(platform_tag, abi_tag = ""):
+    """Parse the wheel abi and platform tags and return (os, cpu) tuples.
+
+    Args:
+        platform_tag (str): The platform_tag part of the wheel name. See
+            ./parse_whl_name.bzl for more details.
+        abi_tag (str): The abi tag that should be used for parsing.
+
+    Returns:
+        A list of structs, with attributes:
+        * os: str, one of the _OS_PREFIXES values
+        * cpu: str, one of the _CPU_PREFIXES values
+        * abi: str, the ABI that the interpreter should have if it is passed.
+        * target_platform: str, the target_platform that can be given to the
+          wheel_installer for parsing whl METADATA.
+    """
+    cpus = _cpu_from_tag(platform_tag)
+
+    abi = None
+    if abi_tag not in ["", "none", "abi3"]:
+        abi = abi_tag
+
+    # TODO @aignas 2024-05-29: this code is present in many places, I think
+    _, _, tail = platform_tag.partition("_")
+    maybe_arch = tail
+    major, _, tail = tail.partition("_")
+    minor, _, tail = tail.partition("_")
+    if not tail or not major.isdigit() or not minor.isdigit():
+        tail = maybe_arch
+        major = 0
+        minor = 0
+
+    for prefix, os in _OS_PREFIXES.items():
+        if platform_tag.startswith(prefix):
+            return [
+                struct(
+                    os = os,
+                    cpu = cpu,
+                    abi = abi,
+                    version = (int(major), int(minor)),
+                    target_platform = "_".join([abi, os, cpu] if abi else [os, cpu]),
+                )
+                for cpu in cpus
+            ]
+
+    print("WARNING: ignoring unknown platform_tag os: {}".format(platform_tag))  # buildifier: disable=print
+    return []
+
+def _cpu_from_tag(tag):
+    candidate = [
+        cpu
+        for input, cpu in _CPU_ALIASES.items()
+        if tag.endswith(input)
+    ]
+    if candidate:
+        return candidate
+
+    if tag == "win32":
+        return ["x86_32"]
+    elif tag == "win_ia64":
+        return []
+    elif tag.startswith("macosx"):
+        if tag.endswith("universal2"):
+            return ["x86_64", "aarch64"]
+        elif tag.endswith("universal"):
+            return ["x86_64", "aarch64"]
+        elif tag.endswith("intel"):
+            return ["x86_32"]
+
+    return []