sorted imports statements and apply "black" formatting (#583)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79f025d..1f6698f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml
@@ -6,8 +6,17 @@ rev: 4.0.1.1 hooks: - id: buildifier - args: &args + args: &args # Keep this argument in sync with .bazelci/presubmit.yaml - --warnings=all - id: buildifier-lint args: *args + - repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + name: isort (python) + - repo: https://github.com/psf/black + rev: 21.12b0 + hooks: + - id: black
diff --git a/examples/build_file_generation/__init__.py b/examples/build_file_generation/__init__.py index ce47b77..11b15b1 100644 --- a/examples/build_file_generation/__init__.py +++ b/examples/build_file_generation/__init__.py
@@ -1 +1 @@ -print("hello") \ No newline at end of file +print("hello")
diff --git a/examples/pip_install/main.py b/examples/pip_install/main.py index fdb3c65..b65ad0e 100644 --- a/examples/pip_install/main.py +++ b/examples/pip_install/main.py
@@ -1,7 +1,9 @@ import boto3 + def the_dir(): return dir(boto3) + if __name__ == "__main__": print(the_dir())
diff --git a/examples/pip_install/pip_install_test.py b/examples/pip_install/pip_install_test.py index 1e53c12..e865dea 100644 --- a/examples/pip_install/pip_install_test.py +++ b/examples/pip_install/pip_install_test.py
@@ -1,9 +1,9 @@ #!/usr/bin/env python3 -from pathlib import Path import os import subprocess import unittest +from pathlib import Path class PipInstallTest(unittest.TestCase): @@ -16,13 +16,23 @@ entry_point = Path(env) self.assertTrue(entry_point.exists()) - proc = subprocess.run([entry_point, "--version"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + proc = subprocess.run( + [entry_point, "--version"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) self.assertEqual(proc.stdout.decode("utf-8").strip(), "yamllint 1.26.3") # yamllint entry_point is of the form `def run(argv=None):` with self.assertRaises(subprocess.CalledProcessError) as context: - subprocess.run([entry_point, "--option-does-not-exist"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - self.assertIn('returned non-zero exit status 2', str(context.exception)) + subprocess.run( + [entry_point, "--option-does-not-exist"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + self.assertIn("returned non-zero exit status 2", str(context.exception)) def test_entry_point_int_return(self): env = os.environ.get("SPHINX_BUILD_ENTRY_POINT") @@ -31,14 +41,24 @@ entry_point = Path(env) self.assertTrue(entry_point.exists()) - proc = subprocess.run([entry_point, "--version"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + proc = subprocess.run( + [entry_point, "--version"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) # sphinx-build uses args[0] for its name, only assert the version here - self.assertTrue(proc.stdout.decode("utf-8").strip().endswith('4.2.0')) + self.assertTrue(proc.stdout.decode("utf-8").strip().endswith("4.2.0")) # sphinx-build entry_point is of the form `def main(argv: List[str] = sys.argv[1:]) -> int:` with self.assertRaises(subprocess.CalledProcessError) as context: - subprocess.run([entry_point, "--option-does-not-exist"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - self.assertIn('returned non-zero exit status 2', str(context.exception)) + subprocess.run( + [entry_point, "--option-does-not-exist"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + self.assertIn("returned non-zero exit status 2", str(context.exception)) def test_data(self): env = os.environ.get("WHEEL_DATA_CONTENTS")
diff --git a/examples/pip_install/test.py b/examples/pip_install/test.py index 0b3b333..0859a28 100644 --- a/examples/pip_install/test.py +++ b/examples/pip_install/test.py
@@ -1,9 +1,12 @@ import unittest + import main + class ExampleTest(unittest.TestCase): def test_main(self): self.assertIn("set_stream_logger", main.the_dir()) -if __name__ == '__main__': - unittest.main() + +if __name__ == "__main__": + unittest.main()
diff --git a/examples/pip_parse/pip_parse_test.py b/examples/pip_parse/pip_parse_test.py index 361d59f..8d8846a 100644 --- a/examples/pip_parse/pip_parse_test.py +++ b/examples/pip_parse/pip_parse_test.py
@@ -1,9 +1,9 @@ #!/usr/bin/env python3 -from pathlib import Path import os import subprocess import unittest +from pathlib import Path class PipInstallTest(unittest.TestCase): @@ -16,13 +16,23 @@ entry_point = Path(env) self.assertTrue(entry_point.exists()) - proc = subprocess.run([entry_point, "--version"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + proc = subprocess.run( + [entry_point, "--version"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) self.assertEqual(proc.stdout.decode("utf-8").strip(), "yamllint 1.26.3") # yamllint entry_point is of the form `def run(argv=None):` with self.assertRaises(subprocess.CalledProcessError) as context: - subprocess.run([entry_point, "--option-does-not-exist"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - self.assertIn('returned non-zero exit status 2', str(context.exception)) + subprocess.run( + [entry_point, "--option-does-not-exist"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + self.assertIn("returned non-zero exit status 2", str(context.exception)) def test_entry_point_int_return(self): env = os.environ.get("SPHINX_BUILD_ENTRY_POINT") @@ -31,14 +41,24 @@ entry_point = Path(env) self.assertTrue(entry_point.exists()) - proc = subprocess.run([entry_point, "--version"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + proc = subprocess.run( + [entry_point, "--version"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) # sphinx-build uses args[0] for its name, only assert the version here - self.assertTrue(proc.stdout.decode("utf-8").strip().endswith('4.2.0')) + self.assertTrue(proc.stdout.decode("utf-8").strip().endswith("4.2.0")) # sphinx-build entry_point is of the form `def main(argv: List[str] = sys.argv[1:]) -> int:` with self.assertRaises(subprocess.CalledProcessError) as context: - subprocess.run([entry_point, "--option-does-not-exist"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - self.assertIn('returned non-zero exit status 2', str(context.exception)) + subprocess.run( + [entry_point, "--option-does-not-exist"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + self.assertIn("returned non-zero exit status 2", str(context.exception)) def test_data(self): env = os.environ.get("WHEEL_DATA_CONTENTS")
diff --git a/examples/pip_parse/test.py b/examples/pip_parse/test.py index 3415a32..e1f97f1 100644 --- a/examples/pip_parse/test.py +++ b/examples/pip_parse/test.py
@@ -1,4 +1,5 @@ import unittest + import main @@ -7,5 +8,5 @@ self.assertEqual("2.25.1", main.version()) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main()
diff --git a/examples/py_import/py_import_test.py b/examples/py_import/py_import_test.py index 1d7212d..1f5aa71 100644 --- a/examples/py_import/py_import_test.py +++ b/examples/py_import/py_import_test.py
@@ -18,24 +18,23 @@ class HelloWorldTest(unittest.TestCase): + def test_helloworld(self): + hw = helloworld.HelloWorld() + hw.SayHello() - def test_helloworld(self): - hw = helloworld.HelloWorld() - hw.SayHello() + def test_helloworld_async(self): + hw = helloworld.HelloWorld() + hw.SayHelloAsync() + hw.Stop() - def test_helloworld_async(self): - hw = helloworld.HelloWorld() - hw.SayHelloAsync() - hw.Stop() - - def test_helloworld_multiple(self): - hw = helloworld.HelloWorld() - hw.SayHelloAsync() - hw.SayHelloAsync() - hw.SayHelloAsync() - hw.SayHelloAsync() - hw.Stop() + def test_helloworld_multiple(self): + hw = helloworld.HelloWorld() + hw.SayHelloAsync() + hw.SayHelloAsync() + hw.SayHelloAsync() + hw.SayHelloAsync() + hw.Stop() -if __name__ == '__main__': - unittest.main() +if __name__ == "__main__": + unittest.main()
diff --git a/examples/relative_requirements/relative_package/setup.py b/examples/relative_requirements/relative_package/setup.py index 3fd85c1..052b519 100644 --- a/examples/relative_requirements/relative_package/setup.py +++ b/examples/relative_requirements/relative_package/setup.py
@@ -1,7 +1,7 @@ from setuptools import setup setup( - name='relative_package_name', - version='1.0.0', - packages=['relative_package_name'], + name="relative_package_name", + version="1.0.0", + packages=["relative_package_name"], )
diff --git a/examples/wheel/lib/module_with_data.py b/examples/wheel/lib/module_with_data.py index 7b28643..6b661eb 100644 --- a/examples/wheel/lib/module_with_data.py +++ b/examples/wheel/lib/module_with_data.py
@@ -12,5 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. + def function(): return "foo"
diff --git a/examples/wheel/lib/simple_module.py b/examples/wheel/lib/simple_module.py index fb26a51..b69ae2b 100644 --- a/examples/wheel/lib/simple_module.py +++ b/examples/wheel/lib/simple_module.py
@@ -12,5 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. + def function(): return "bar"
diff --git a/examples/wheel/main.py b/examples/wheel/main.py index 3068ff0..7c4d323 100644 --- a/examples/wheel/main.py +++ b/examples/wheel/main.py
@@ -26,5 +26,5 @@ print(simple_module.function()) -if __name__ == '__main__': +if __name__ == "__main__": main()
diff --git a/examples/wheel/wheel_test.py b/examples/wheel/wheel_test.py index e1d7b18..c28accd 100644 --- a/examples/wheel/wheel_test.py +++ b/examples/wheel/wheel_test.py
@@ -19,77 +19,101 @@ class WheelTest(unittest.TestCase): def test_py_library_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'example_minimal_library-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "example_minimal_library-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['examples/wheel/lib/module_with_data.py', - 'examples/wheel/lib/simple_module.py', - 'example_minimal_library-0.0.1.dist-info/WHEEL', - 'example_minimal_library-0.0.1.dist-info/METADATA', - 'example_minimal_library-0.0.1.dist-info/RECORD']) + [ + "examples/wheel/lib/module_with_data.py", + "examples/wheel/lib/simple_module.py", + "example_minimal_library-0.0.1.dist-info/WHEEL", + "example_minimal_library-0.0.1.dist-info/METADATA", + "example_minimal_library-0.0.1.dist-info/RECORD", + ], + ) def test_py_package_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'example_minimal_package-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "example_minimal_package-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['examples/wheel/lib/data.txt', - 'examples/wheel/lib/module_with_data.py', - 'examples/wheel/lib/simple_module.py', - 'examples/wheel/main.py', - 'example_minimal_package-0.0.1.dist-info/WHEEL', - 'example_minimal_package-0.0.1.dist-info/METADATA', - 'example_minimal_package-0.0.1.dist-info/RECORD']) + [ + "examples/wheel/lib/data.txt", + "examples/wheel/lib/module_with_data.py", + "examples/wheel/lib/simple_module.py", + "examples/wheel/main.py", + "example_minimal_package-0.0.1.dist-info/WHEEL", + "example_minimal_package-0.0.1.dist-info/METADATA", + "example_minimal_package-0.0.1.dist-info/RECORD", + ], + ) def test_customized_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'example_customized-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "example_customized-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['examples/wheel/lib/data.txt', - 'examples/wheel/lib/module_with_data.py', - 'examples/wheel/lib/simple_module.py', - 'examples/wheel/main.py', - 'example_customized-0.0.1.dist-info/WHEEL', - 'example_customized-0.0.1.dist-info/METADATA', - 'example_customized-0.0.1.dist-info/entry_points.txt', - 'example_customized-0.0.1.dist-info/RECORD']) - record_contents = zf.read( - 'example_customized-0.0.1.dist-info/RECORD') - wheel_contents = zf.read( - 'example_customized-0.0.1.dist-info/WHEEL') - metadata_contents = zf.read( - 'example_customized-0.0.1.dist-info/METADATA') + [ + "examples/wheel/lib/data.txt", + "examples/wheel/lib/module_with_data.py", + "examples/wheel/lib/simple_module.py", + "examples/wheel/main.py", + "example_customized-0.0.1.dist-info/WHEEL", + "example_customized-0.0.1.dist-info/METADATA", + "example_customized-0.0.1.dist-info/entry_points.txt", + "example_customized-0.0.1.dist-info/RECORD", + ], + ) + record_contents = zf.read("example_customized-0.0.1.dist-info/RECORD") + wheel_contents = zf.read("example_customized-0.0.1.dist-info/WHEEL") + metadata_contents = zf.read("example_customized-0.0.1.dist-info/METADATA") entry_point_contents = zf.read( - 'example_customized-0.0.1.dist-info/entry_points.txt') + "example_customized-0.0.1.dist-info/entry_points.txt" + ) # The entries are guaranteed to be sorted. - self.assertEquals(record_contents, b"""\ + self.assertEquals( + record_contents, + b"""\ example_customized-0.0.1.dist-info/METADATA,sha256=TeeEmokHE2NWjkaMcVJuSAq4_AXUoIad2-SLuquRmbg,372 example_customized-0.0.1.dist-info/RECORD,, example_customized-0.0.1.dist-info/WHEEL,sha256=sobxWSyDDkdg_rinUth-jxhXHqoNqlmNMJY3aTZn2Us,91 example_customized-0.0.1.dist-info/entry_points.txt,sha256=pqzpbQ8MMorrJ3Jp0ntmpZcuvfByyqzMXXi2UujuXD0,137 examples/wheel/lib/data.txt,sha256=9vJKEdfLu8bZRArKLroPZJh1XKkK3qFMXiM79MBL2Sg,12 -examples/wheel/lib/module_with_data.py,sha256=K_IGAq_CHcZX0HUyINpD1hqSKIEdCn58d9E9nhWF2EA,636 -examples/wheel/lib/simple_module.py,sha256=72-91Dm6NB_jw-7wYQt7shzdwvk5RB0LujIah8g7kr8,636 -examples/wheel/main.py,sha256=xnha0jPnVBJt3LUQRbLf7rFA5njczSdd3gm3kSyQJZw,909 -""") - self.assertEquals(wheel_contents, b"""\ +examples/wheel/lib/module_with_data.py,sha256=8s0Khhcqz3yVsBKv2IB5u4l4TMKh7-c_V6p65WVHPms,637 +examples/wheel/lib/simple_module.py,sha256=z2hwciab_XPNIBNH8B1Q5fYgnJvQTeYf0ZQJpY8yLLY,637 +examples/wheel/main.py,sha256=sgg5iWN_9inYBjm6_Zw27hYdmo-l24fA-2rfphT-IlY,909 +""", + ) + self.assertEquals( + wheel_contents, + b"""\ Wheel-Version: 1.0 Generator: bazel-wheelmaker 1.0 Root-Is-Purelib: true Tag: py3-none-any -""") - self.assertEquals(metadata_contents, b"""\ +""", + ) + self.assertEquals( + metadata_contents, + b"""\ Metadata-Version: 2.1 Name: example_customized Version: 0.0.1 @@ -102,112 +126,151 @@ Requires-Dist: pytest This is a sample description of a wheel. -""") - self.assertEquals(entry_point_contents, b"""\ +""", + ) + self.assertEquals( + entry_point_contents, + b"""\ [console_scripts] another = foo.bar:baz customized_wheel = examples.wheel.main:main [group2] first = first.main:f -second = second.main:s""") +second = second.main:s""", + ) def test_filename_escaping(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'file_name_escaping-0.0.1_r7-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "file_name_escaping-0.0.1_r7-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['examples/wheel/lib/data.txt', - 'examples/wheel/lib/module_with_data.py', - 'examples/wheel/lib/simple_module.py', - 'examples/wheel/main.py', - # PEP calls for replacing only in the archive filename. - # Alas setuptools also escapes in the dist-info directory - # name, so let's be compatible. - 'file_name_escaping-0.0.1_r7.dist-info/WHEEL', - 'file_name_escaping-0.0.1_r7.dist-info/METADATA', - 'file_name_escaping-0.0.1_r7.dist-info/RECORD']) + [ + "examples/wheel/lib/data.txt", + "examples/wheel/lib/module_with_data.py", + "examples/wheel/lib/simple_module.py", + "examples/wheel/main.py", + # PEP calls for replacing only in the archive filename. + # Alas setuptools also escapes in the dist-info directory + # name, so let's be compatible. + "file_name_escaping-0.0.1_r7.dist-info/WHEEL", + "file_name_escaping-0.0.1_r7.dist-info/METADATA", + "file_name_escaping-0.0.1_r7.dist-info/RECORD", + ], + ) metadata_contents = zf.read( - 'file_name_escaping-0.0.1_r7.dist-info/METADATA') - self.assertEquals(metadata_contents, b"""\ + "file_name_escaping-0.0.1_r7.dist-info/METADATA" + ) + self.assertEquals( + metadata_contents, + b"""\ Metadata-Version: 2.1 Name: file~~name-escaping Version: 0.0.1-r7 UNKNOWN -""") +""", + ) def test_custom_package_root_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'example_custom_package_root-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "example_custom_package_root-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['wheel/lib/data.txt', - 'wheel/lib/module_with_data.py', - 'wheel/lib/simple_module.py', - 'wheel/main.py', - 'example_custom_package_root-0.0.1.dist-info/WHEEL', - 'example_custom_package_root-0.0.1.dist-info/METADATA', - 'example_custom_package_root-0.0.1.dist-info/RECORD']) + [ + "wheel/lib/data.txt", + "wheel/lib/module_with_data.py", + "wheel/lib/simple_module.py", + "wheel/main.py", + "example_custom_package_root-0.0.1.dist-info/WHEEL", + "example_custom_package_root-0.0.1.dist-info/METADATA", + "example_custom_package_root-0.0.1.dist-info/RECORD", + ], + ) def test_custom_package_root_multi_prefix_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'example_custom_package_root_multi_prefix-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "example_custom_package_root_multi_prefix-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['data.txt', - 'module_with_data.py', - 'simple_module.py', - 'main.py', - 'example_custom_package_root_multi_prefix-0.0.1.dist-info/WHEEL', - 'example_custom_package_root_multi_prefix-0.0.1.dist-info/METADATA', - 'example_custom_package_root_multi_prefix-0.0.1.dist-info/RECORD']) + [ + "data.txt", + "module_with_data.py", + "simple_module.py", + "main.py", + "example_custom_package_root_multi_prefix-0.0.1.dist-info/WHEEL", + "example_custom_package_root_multi_prefix-0.0.1.dist-info/METADATA", + "example_custom_package_root_multi_prefix-0.0.1.dist-info/RECORD", + ], + ) def test_custom_package_root_multi_prefix_reverse_order_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'example_custom_package_root_multi_prefix_reverse_order-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "example_custom_package_root_multi_prefix_reverse_order-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['lib/data.txt', - 'lib/module_with_data.py', - 'lib/simple_module.py', - 'main.py', - 'example_custom_package_root_multi_prefix_reverse_order-0.0.1.dist-info/WHEEL', - 'example_custom_package_root_multi_prefix_reverse_order-0.0.1.dist-info/METADATA', - 'example_custom_package_root_multi_prefix_reverse_order-0.0.1.dist-info/RECORD']) + [ + "lib/data.txt", + "lib/module_with_data.py", + "lib/simple_module.py", + "main.py", + "example_custom_package_root_multi_prefix_reverse_order-0.0.1.dist-info/WHEEL", + "example_custom_package_root_multi_prefix_reverse_order-0.0.1.dist-info/METADATA", + "example_custom_package_root_multi_prefix_reverse_order-0.0.1.dist-info/RECORD", + ], + ) def test_python_requires_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'example_python_requires_in_a_package-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "example_python_requires_in_a_package-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: metadata_contents = zf.read( - 'example_python_requires_in_a_package-0.0.1.dist-info/METADATA') + "example_python_requires_in_a_package-0.0.1.dist-info/METADATA" + ) # The entries are guaranteed to be sorted. - self.assertEquals(metadata_contents, b"""\ + self.assertEquals( + metadata_contents, + b"""\ Metadata-Version: 2.1 Name: example_python_requires_in_a_package Version: 0.0.1 Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* UNKNOWN -""") +""", + ) def test_python_abi3_binary_wheel(self): filename = os.path.join( @@ -247,20 +310,26 @@ ) def test_genrule_creates_directory_and_is_included_in_wheel(self): - filename = os.path.join(os.environ['TEST_SRCDIR'], - 'rules_python', - 'examples', 'wheel', - 'use_genrule_with_dir_in_outs-0.0.1-py3-none-any.whl') + filename = os.path.join( + os.environ["TEST_SRCDIR"], + "rules_python", + "examples", + "wheel", + "use_genrule_with_dir_in_outs-0.0.1-py3-none-any.whl", + ) with zipfile.ZipFile(filename) as zf: self.assertEquals( zf.namelist(), - ['examples/wheel/main.py', - 'examples/wheel/someDir/foo.py', - 'use_genrule_with_dir_in_outs-0.0.1.dist-info/WHEEL', - 'use_genrule_with_dir_in_outs-0.0.1.dist-info/METADATA', - 'use_genrule_with_dir_in_outs-0.0.1.dist-info/RECORD']) + [ + "examples/wheel/main.py", + "examples/wheel/someDir/foo.py", + "use_genrule_with_dir_in_outs-0.0.1.dist-info/WHEEL", + "use_genrule_with_dir_in_outs-0.0.1.dist-info/METADATA", + "use_genrule_with_dir_in_outs-0.0.1.dist-info/RECORD", + ], + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main()
diff --git a/gazelle/modules_mapping/builder.py b/gazelle/modules_mapping/builder.py index 352bfcb..3b471c0 100644 --- a/gazelle/modules_mapping/builder.py +++ b/gazelle/modules_mapping/builder.py
@@ -67,4 +67,4 @@ help="The number of concurrent build jobs to be executed.", ) args = parser.parse_args() - exit(main(args.jobs, args.wheels)) \ No newline at end of file + exit(main(args.jobs, args.wheels))
diff --git a/gazelle/modules_mapping/generator.py b/gazelle/modules_mapping/generator.py index 44cfcf6..6ee654c 100644 --- a/gazelle/modules_mapping/generator.py +++ b/gazelle/modules_mapping/generator.py
@@ -19,10 +19,9 @@ def dig_wheel(self, wheel): mapping = {} wheel_paths = glob.glob(wheel["path"]) - assert ( - len(wheel_paths) != 0 - ), "wheel not found for {}: searched for {}".format( - wheel["name"], wheel["path"], + assert len(wheel_paths) != 0, "wheel not found for {}: searched for {}".format( + wheel["name"], + wheel["path"], ) wheel_path = wheel_paths[0] assert ( @@ -46,7 +45,7 @@ if ext == ".so": # Also remove extra metadata that is embeded as part of # the file name as an extra extension. - ext = ''.join(pathlib.Path(path).suffixes) + ext = "".join(pathlib.Path(path).suffixes) module = path[: -len(ext)].replace("/", ".") mapping[module] = wheel["name"] return mapping @@ -77,4 +76,4 @@ if __name__ == "__main__": wheels = sys.argv[1:] generator = Generator(sys.stdout, sys.stderr) - exit(generator.run(wheels)) \ No newline at end of file + exit(generator.run(wheels))
diff --git a/gazelle/parse.py b/gazelle/parse.py index bbc9e97..6fe2dc4 100644 --- a/gazelle/parse.py +++ b/gazelle/parse.py
@@ -60,4 +60,4 @@ if __name__ == "__main__": - exit(main(sys.stdin, sys.stdout)) \ No newline at end of file + exit(main(sys.stdin, sys.stdout))
diff --git a/gazelle/std_modules.py b/gazelle/std_modules.py index 59e132d..ccd1dcd 100644 --- a/gazelle/std_modules.py +++ b/gazelle/std_modules.py
@@ -9,11 +9,12 @@ # Don't return any paths, all userland site-packages should be ignored. def __override_getusersitepackages__(): - return '' + return "" site.getusersitepackages = __override_getusersitepackages__ + def is_std_modules(module): try: __import__(module, globals(), locals(), [], 0)
diff --git a/gazelle/testdata/first_party_file_and_directory_modules/__main__.py b/gazelle/testdata/first_party_file_and_directory_modules/__main__.py index 6aca4f0..acf5f10 100644 --- a/gazelle/testdata/first_party_file_and_directory_modules/__main__.py +++ b/gazelle/testdata/first_party_file_and_directory_modules/__main__.py
@@ -4,8 +4,8 @@ from one.two import two from package1.subpackage1.module1 import find_me -assert not hasattr(foo, 'foo') -assert baz() == 'baz from foo/bar.py' -assert another_baz() == 'baz from baz.py' -assert two() == 'two' -assert find_me() == 'found' +assert not hasattr(foo, "foo") +assert baz() == "baz from foo/bar.py" +assert another_baz() == "baz from baz.py" +assert two() == "two" +assert find_me() == "found"
diff --git a/gazelle/testdata/first_party_file_and_directory_modules/baz.py b/gazelle/testdata/first_party_file_and_directory_modules/baz.py index cc29925..b161d6a 100644 --- a/gazelle/testdata/first_party_file_and_directory_modules/baz.py +++ b/gazelle/testdata/first_party_file_and_directory_modules/baz.py
@@ -1,2 +1,2 @@ def baz(): - return 'baz from baz.py' + return "baz from baz.py"
diff --git a/gazelle/testdata/first_party_file_and_directory_modules/foo.py b/gazelle/testdata/first_party_file_and_directory_modules/foo.py index 81d3ef1..af3cbda 100644 --- a/gazelle/testdata/first_party_file_and_directory_modules/foo.py +++ b/gazelle/testdata/first_party_file_and_directory_modules/foo.py
@@ -1,2 +1,2 @@ def foo(): - print('foo') + print("foo")
diff --git a/gazelle/testdata/first_party_file_and_directory_modules/foo/bar.py b/gazelle/testdata/first_party_file_and_directory_modules/foo/bar.py index 4b6419f..d6524cc 100644 --- a/gazelle/testdata/first_party_file_and_directory_modules/foo/bar.py +++ b/gazelle/testdata/first_party_file_and_directory_modules/foo/bar.py
@@ -4,4 +4,4 @@ def baz(): - return 'baz from foo/bar.py' + return "baz from foo/bar.py"
diff --git a/gazelle/testdata/first_party_file_and_directory_modules/one/two.py b/gazelle/testdata/first_party_file_and_directory_modules/one/two.py index ce53b87..0020c44 100644 --- a/gazelle/testdata/first_party_file_and_directory_modules/one/two.py +++ b/gazelle/testdata/first_party_file_and_directory_modules/one/two.py
@@ -1,2 +1,2 @@ def two(): - return 'two' + return "two"
diff --git a/gazelle/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py b/gazelle/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py index 668c700..0ff1c42 100644 --- a/gazelle/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py +++ b/gazelle/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py
@@ -1,2 +1,2 @@ def find_me(): - return 'found' + return "found"
diff --git a/gazelle/testdata/generated_test_entrypoint/foo.py b/gazelle/testdata/generated_test_entrypoint/foo.py index a266b7c..cf68624 100644 --- a/gazelle/testdata/generated_test_entrypoint/foo.py +++ b/gazelle/testdata/generated_test_entrypoint/foo.py
@@ -1,2 +1,2 @@ def foo(): - return 'foo' + return "foo"
diff --git a/gazelle/testdata/simple_test/foo.py b/gazelle/testdata/simple_test/foo.py index a266b7c..cf68624 100644 --- a/gazelle/testdata/simple_test/foo.py +++ b/gazelle/testdata/simple_test/foo.py
@@ -1,2 +1,2 @@ def foo(): - return 'foo' + return "foo"
diff --git a/python/pip_install/extract_wheels/__init__.py b/python/pip_install/extract_wheels/__init__.py index bf5a18c..e8097e1 100644 --- a/python/pip_install/extract_wheels/__init__.py +++ b/python/pip_install/extract_wheels/__init__.py
@@ -7,13 +7,13 @@ """ import argparse import glob +import json import os import pathlib import subprocess import sys -import json -from python.pip_install.extract_wheels.lib import bazel, requirements, arguments +from python.pip_install.extract_wheels.lib import arguments, bazel, requirements def configure_reproducible_wheels() -> None: @@ -68,18 +68,23 @@ # relative requirements to be correctly resolved. The --wheel-dir is therefore required to be repointed back to the # current calling working directory (the repo root in .../external/name), where the wheel files should be written to pip_args = ( - [sys.executable, "-m", "pip"] + - (["--isolated"] if args.isolated else []) + - ["wheel", "-r", args.requirements] + - ["--wheel-dir", os.getcwd()] + - deserialized_args["extra_pip_args"] + [sys.executable, "-m", "pip"] + + (["--isolated"] if args.isolated else []) + + ["wheel", "-r", args.requirements] + + ["--wheel-dir", os.getcwd()] + + deserialized_args["extra_pip_args"] ) env = os.environ.copy() env.update(deserialized_args["environment"]) # Assumes any errors are logged by pip so do nothing. This command will fail if pip fails - subprocess.run(pip_args, check=True, env=env, cwd=str(pathlib.Path(args.requirements).parent.resolve())) + subprocess.run( + pip_args, + check=True, + env=env, + cwd=str(pathlib.Path(args.requirements).parent.resolve()), + ) extras = requirements.parse_extras(args.requirements)
diff --git a/python/pip_install/extract_wheels/lib/arguments.py b/python/pip_install/extract_wheels/lib/arguments.py index 9c8f49a..d7d3452 100644 --- a/python/pip_install/extract_wheels/lib/arguments.py +++ b/python/pip_install/extract_wheels/lib/arguments.py
@@ -10,10 +10,14 @@ help="The external repo name to install dependencies. In the format '@{REPO_NAME}'", ) parser.add_argument( - "--isolated", action="store_true", help="Whether or not to include the `--isolated` pip flag.", + "--isolated", + action="store_true", + help="Whether or not to include the `--isolated` pip flag.", ) parser.add_argument( - "--extra_pip_args", action="store", help="Extra arguments to pass down to pip.", + "--extra_pip_args", + action="store", + help="Extra arguments to pass down to pip.", ) parser.add_argument( "--pip_data_exclude",
diff --git a/python/pip_install/extract_wheels/lib/arguments_test.py b/python/pip_install/extract_wheels/lib/arguments_test.py index 89ab291..6a714be 100644 --- a/python/pip_install/extract_wheels/lib/arguments_test.py +++ b/python/pip_install/extract_wheels/lib/arguments_test.py
@@ -13,15 +13,17 @@ repo_prefix = "pypi_" index_url = "--index_url=pypi.org/simple" extra_pip_args = [index_url] - args_dict = vars(parser.parse_args( - args=[ - "--repo", - repo_name, - f"--extra_pip_args={json.dumps({'arg': extra_pip_args})}", - "--repo-prefix", - repo_prefix, - ] - )) + args_dict = vars( + parser.parse_args( + args=[ + "--repo", + repo_name, + f"--extra_pip_args={json.dumps({'arg': extra_pip_args})}", + "--repo-prefix", + repo_prefix, + ] + ) + ) args_dict = arguments.deserialize_structured_args(args_dict) self.assertIn("repo", args_dict) self.assertIn("extra_pip_args", args_dict)
diff --git a/python/pip_install/extract_wheels/lib/bazel.py b/python/pip_install/extract_wheels/lib/bazel.py index 3411756..8e9519f 100644 --- a/python/pip_install/extract_wheels/lib/bazel.py +++ b/python/pip_install/extract_wheels/lib/bazel.py
@@ -1,13 +1,12 @@ """Utility functions to manipulate Bazel files""" -import os -import textwrap import json -from typing import Iterable, List, Dict, Set, Optional +import os import shutil +import textwrap from pathlib import Path +from typing import Dict, Iterable, List, Optional, Set -from python.pip_install.extract_wheels.lib import namespace_pkgs, wheel, purelib - +from python.pip_install.extract_wheels.lib import namespace_pkgs, purelib, wheel WHEEL_FILE_LABEL = "whl" PY_LIBRARY_LABEL = "pkg" @@ -16,7 +15,9 @@ WHEEL_ENTRY_POINT_PREFIX = "rules_python_wheel_entry_point" -def generate_entry_point_contents(entry_point: str, shebang: str = "#!/usr/bin/env python3") -> str: +def generate_entry_point_contents( + entry_point: str, shebang: str = "#!/usr/bin/env python3" +) -> str: """Generate the contents of an entry point script. Args: @@ -29,7 +30,8 @@ str: A string of python code. """ module, method = entry_point.split(":", 1) - return textwrap.dedent("""\ + return textwrap.dedent( + """\ {shebang} import sys from {module} import {method} @@ -37,10 +39,9 @@ rc = {method}() sys.exit({method}()) """.format( - shebang=shebang, - module=module, - method=method - )) + shebang=shebang, module=module, method=method + ) + ) def generate_entry_point_rule(script: str, pkg: str) -> str: @@ -60,7 +61,8 @@ str: A `py_binary` instantiation. """ name = os.path.splitext(script)[0] - return textwrap.dedent("""\ + return textwrap.dedent( + """\ py_binary( name = "{name}", srcs = ["{src}"], @@ -70,10 +72,9 @@ deps = ["{pkg}"], ) """.format( - name=name, - src=str(script).replace("\\", "/"), - pkg=pkg - )) + name=name, src=str(script).replace("\\", "/"), pkg=pkg + ) + ) def generate_build_file_contents( @@ -112,8 +113,10 @@ "WORKSPACE", ] + pip_data_exclude - return "\n".join([textwrap.dedent( - """\ + return "\n".join( + [ + textwrap.dedent( + """\ load("@rules_python//python:defs.bzl", "py_library", "py_binary") package(default_visibility = ["//visibility:public"]) @@ -145,16 +148,19 @@ tags = [{tags}], ) """.format( - name=name, - dependencies=",".join(dependencies), - data_exclude=json.dumps(data_exclude), - whl_file_label=WHEEL_FILE_LABEL, - whl_file_deps=",".join(whl_file_deps), - tags=",".join(["\"%s\"" % t for t in tags]), - data_label=DATA_LABEL, - dist_info_label=DIST_INFO_LABEL, - entry_point_prefix=WHEEL_ENTRY_POINT_PREFIX, - ))] + additional_targets + name=name, + dependencies=",".join(dependencies), + data_exclude=json.dumps(data_exclude), + whl_file_label=WHEEL_FILE_LABEL, + whl_file_deps=",".join(whl_file_deps), + tags=",".join(['"%s"' % t for t in tags]), + data_label=DATA_LABEL, + dist_info_label=DIST_INFO_LABEL, + entry_point_prefix=WHEEL_ENTRY_POINT_PREFIX, + ) + ) + ] + + additional_targets ) @@ -251,7 +257,8 @@ """ namespace_pkg_dirs = namespace_pkgs.implicit_namespace_packages( - wheel_dir, ignored_dirnames=["%s/bin" % wheel_dir], + wheel_dir, + ignored_dirnames=["%s/bin" % wheel_dir], ) for ns_pkg_dir in namespace_pkg_dirs: @@ -271,11 +278,15 @@ def sanitised_repo_library_label(whl_name: str, repo_prefix: str) -> str: - return '"{}:{}"'.format(_whl_name_to_repo_root(whl_name, repo_prefix), PY_LIBRARY_LABEL) + return '"{}:{}"'.format( + _whl_name_to_repo_root(whl_name, repo_prefix), PY_LIBRARY_LABEL + ) def sanitised_repo_file_label(whl_name: str, repo_prefix: str) -> str: - return '"{}:{}"'.format(_whl_name_to_repo_root(whl_name, repo_prefix), WHEEL_FILE_LABEL) + return '"{}:{}"'.format( + _whl_name_to_repo_root(whl_name, repo_prefix), WHEEL_FILE_LABEL + ) def extract_wheel( @@ -335,17 +346,23 @@ sanitised_file_label(d, prefix=repo_prefix) for d in whl_deps ] - library_name = PY_LIBRARY_LABEL if incremental else sanitise_name(whl.name, repo_prefix) + library_name = ( + PY_LIBRARY_LABEL if incremental else sanitise_name(whl.name, repo_prefix) + ) directory_path = Path(directory) entry_points = [] for name, entry_point in sorted(whl.entry_points().items()): entry_point_script = f"{WHEEL_ENTRY_POINT_PREFIX}_{name}.py" - (directory_path / entry_point_script).write_text(generate_entry_point_contents(entry_point)) - entry_points.append(generate_entry_point_rule( - entry_point_script, - library_name, - )) + (directory_path / entry_point_script).write_text( + generate_entry_point_contents(entry_point) + ) + entry_points.append( + generate_entry_point_rule( + entry_point_script, + library_name, + ) + ) with open(os.path.join(directory, "BUILD.bazel"), "w") as build_file: contents = generate_build_file_contents(
diff --git a/python/pip_install/extract_wheels/lib/bazel_test.py b/python/pip_install/extract_wheels/lib/bazel_test.py index 5c22047..c6c11dc 100644 --- a/python/pip_install/extract_wheels/lib/bazel_test.py +++ b/python/pip_install/extract_wheels/lib/bazel_test.py
@@ -16,7 +16,8 @@ def test_generate_entry_point_contents_with_shebang(self): got = generate_entry_point_contents( - "sphinx.cmd.build:main", shebang="#!/usr/bin/python") + "sphinx.cmd.build:main", shebang="#!/usr/bin/python" + ) want = """#!/usr/bin/python import sys from sphinx.cmd.build import main
diff --git a/python/pip_install/extract_wheels/lib/namespace_pkgs.py b/python/pip_install/extract_wheels/lib/namespace_pkgs.py index b3177b3..5ddd4e1 100644 --- a/python/pip_install/extract_wheels/lib/namespace_pkgs.py +++ b/python/pip_install/extract_wheels/lib/namespace_pkgs.py
@@ -1,8 +1,8 @@ """Utility functions to discover python package types""" import os -from pathlib import Path # supported in >= 3.4 import textwrap -from typing import Set, List, Optional +from pathlib import Path # supported in >= 3.4 +from typing import List, Optional, Set def implicit_namespace_packages( @@ -25,19 +25,27 @@ directory_path = Path(directory) ignored_dirname_paths: List[Path] = [Path(p) for p in ignored_dirnames or ()] # Traverse bottom-up because a directory can be a namespace pkg because its child contains module files. - for dirpath, dirnames, filenames in map(lambda t: (Path(t[0]), *t[1:]), os.walk(directory_path, topdown=False)): + for dirpath, dirnames, filenames in map( + lambda t: (Path(t[0]), *t[1:]), os.walk(directory_path, topdown=False) + ): if "__init__.py" in filenames: standard_pkg_dirs.add(dirpath) continue elif ignored_dirname_paths: is_ignored_dir = dirpath in ignored_dirname_paths - child_of_ignored_dir = any(d in dirpath.parents for d in ignored_dirname_paths) + child_of_ignored_dir = any( + d in dirpath.parents for d in ignored_dirname_paths + ) if is_ignored_dir or child_of_ignored_dir: continue dir_includes_py_modules = _includes_python_modules(filenames) - parent_of_namespace_pkg = any(Path(dirpath, d) in namespace_pkg_dirs for d in dirnames) - parent_of_standard_pkg = any(Path(dirpath, d) in standard_pkg_dirs for d in dirnames) + parent_of_namespace_pkg = any( + Path(dirpath, d) in namespace_pkg_dirs for d in dirnames + ) + parent_of_standard_pkg = any( + Path(dirpath, d) in standard_pkg_dirs for d in dirnames + ) parent_of_pkg = parent_of_namespace_pkg or parent_of_standard_pkg if ( (dir_includes_py_modules or parent_of_pkg) @@ -94,10 +102,6 @@ ".py", # Source modules ".pyc", # Compiled bytecode modules ".so", # Unix extension modules - ".pyd" # https://docs.python.org/3/faq/windows.html#is-a-pyd-file-the-same-as-a-dll + ".pyd", # https://docs.python.org/3/faq/windows.html#is-a-pyd-file-the-same-as-a-dll } - return any( - Path(f).suffix in module_suffixes - for f - in files - ) + return any(Path(f).suffix in module_suffixes for f in files)
diff --git a/python/pip_install/extract_wheels/lib/namespace_pkgs_test.py b/python/pip_install/extract_wheels/lib/namespace_pkgs_test.py index dca7026..815fe62 100644 --- a/python/pip_install/extract_wheels/lib/namespace_pkgs_test.py +++ b/python/pip_install/extract_wheels/lib/namespace_pkgs_test.py
@@ -2,8 +2,8 @@ import pathlib import shutil import tempfile -from typing import Optional, Set import unittest +from typing import Optional, Set from python.pip_install.extract_wheels.lib import namespace_pkgs @@ -33,7 +33,6 @@ class TestImplicitNamespacePackages(unittest.TestCase): - def assertPathsEqual(self, actual: Set[pathlib.Path], expected: Set[str]) -> None: self.assertEqual(actual, {pathlib.Path(p) for p in expected})
diff --git a/python/pip_install/extract_wheels/lib/purelib.py b/python/pip_install/extract_wheels/lib/purelib.py index 99e2c80..40eb25d 100644 --- a/python/pip_install/extract_wheels/lib/purelib.py +++ b/python/pip_install/extract_wheels/lib/purelib.py
@@ -48,5 +48,6 @@ # See: https://github.com/dillon-giacoppo/rules_python_external/issues/8 if not pathlib.Path(root_dir, grandchild.name).exists(): shutil.move( - src=str(grandchild), dst=root_dir, + src=str(grandchild), + dst=root_dir, )
diff --git a/python/pip_install/extract_wheels/lib/requirements.py b/python/pip_install/extract_wheels/lib/requirements.py index e246379..cfab339 100644 --- a/python/pip_install/extract_wheels/lib/requirements.py +++ b/python/pip_install/extract_wheels/lib/requirements.py
@@ -1,5 +1,5 @@ import re -from typing import Dict, Set, Tuple, Optional +from typing import Dict, Optional, Set, Tuple def parse_extras(requirements_path: str) -> Dict[str, Set[str]]:
diff --git a/python/pip_install/extract_wheels/lib/requirements_bzl_test.py b/python/pip_install/extract_wheels/lib/requirements_bzl_test.py index 3424f3e..d753f6f 100644 --- a/python/pip_install/extract_wheels/lib/requirements_bzl_test.py +++ b/python/pip_install/extract_wheels/lib/requirements_bzl_test.py
@@ -6,10 +6,12 @@ class TestGenerateRequirementsFileContents(unittest.TestCase): def test_all_wheel_requirements(self) -> None: contents = bazel.generate_requirements_file_contents( - repo_name='test', + repo_name="test", targets=['"@test//pypi__pkg1"', '"@test//pypi__pkg2"'], ) - expected = 'all_whl_requirements = ["@test//pypi__pkg1:whl","@test//pypi__pkg2:whl"]' + expected = ( + 'all_whl_requirements = ["@test//pypi__pkg1:whl","@test//pypi__pkg2:whl"]' + ) self.assertIn(expected, contents)
diff --git a/python/pip_install/extract_wheels/lib/requirements_test.py b/python/pip_install/extract_wheels/lib/requirements_test.py index ba7ee13..0ee4255 100644 --- a/python/pip_install/extract_wheels/lib/requirements_test.py +++ b/python/pip_install/extract_wheels/lib/requirements_test.py
@@ -17,8 +17,14 @@ "name[quux, strange];python_version<'2.7' and platform_version=='2'", ("name", frozenset(["quux", "strange"])), ), - ("name; (os_name=='a' or os_name=='b') and os_name=='c'", (None, None),), - ("name@http://foo.com", (None, None),), + ( + "name; (os_name=='a' or os_name=='b') and os_name=='c'", + (None, None), + ), + ( + "name@http://foo.com", + (None, None), + ), ] for case, expected in cases:
diff --git a/python/pip_install/extract_wheels/lib/wheel.py b/python/pip_install/extract_wheels/lib/wheel.py index a60efc6..fadf8af 100644 --- a/python/pip_install/extract_wheels/lib/wheel.py +++ b/python/pip_install/extract_wheels/lib/wheel.py
@@ -55,7 +55,9 @@ # Calculate the location of the entry_points.txt file metadata = self.metadata name = "{}-{}".format(metadata.name.replace("-", "_"), metadata.version) - entry_points_path = os.path.join("{}.dist-info".format(name), "entry_points.txt") + entry_points_path = os.path.join( + "{}.dist-info".format(name), "entry_points.txt" + ) # If this file does not exist in the wheel, there are no entry points if entry_points_path not in whl.namelist(): @@ -111,7 +113,7 @@ def get_dist_info(wheel_dir: str) -> str: - """"Returns the relative path to the dist-info directory if it exists. + """ "Returns the relative path to the dist-info directory if it exists. Args: wheel_dir: The root of the extracted wheel directory.
diff --git a/python/pip_install/extract_wheels/lib/whl_filegroup_test.py b/python/pip_install/extract_wheels/lib/whl_filegroup_test.py index 39a0d4c..5bf5f7a 100644 --- a/python/pip_install/extract_wheels/lib/whl_filegroup_test.py +++ b/python/pip_install/extract_wheels/lib/whl_filegroup_test.py
@@ -1,7 +1,6 @@ import os import shutil import tempfile -from typing import Optional import unittest from python.pip_install.extract_wheels.lib import bazel @@ -12,9 +11,7 @@ self.wheel_name = "example_minimal_package-0.0.1-py3-none-any.whl" self.wheel_dir = tempfile.mkdtemp() self.wheel_path = os.path.join(self.wheel_dir, self.wheel_name) - shutil.copy( - os.path.join("examples", "wheel", self.wheel_name), self.wheel_dir - ) + shutil.copy(os.path.join("examples", "wheel", self.wheel_name), self.wheel_dir) self.original_dir = os.getcwd() os.chdir(self.wheel_dir) @@ -33,15 +30,17 @@ pip_data_exclude=[], enable_implicit_namespace_pkgs=False, incremental=incremental, - repo_prefix=repo_prefix + repo_prefix=repo_prefix, ) # Take off the leading // from the returned label. # Assert that the raw wheel ends up in the package. - generated_bazel_dir = generated_bazel_dir[2:] if not incremental else self.wheel_dir + generated_bazel_dir = ( + generated_bazel_dir[2:] if not incremental else self.wheel_dir + ) self.assertIn(self.wheel_name, os.listdir(generated_bazel_dir)) with open("{}/BUILD.bazel".format(generated_bazel_dir)) as build_file: build_file_content = build_file.read() - self.assertIn('filegroup', build_file_content) + self.assertIn("filegroup", build_file_content) def test_nonincremental(self) -> None: self._run(repo_prefix="prefix_")
diff --git a/python/pip_install/parse_requirements_to_bzl/__init__.py b/python/pip_install/parse_requirements_to_bzl/__init__.py index e22d4a3..1e1261d 100644 --- a/python/pip_install/parse_requirements_to_bzl/__init__.py +++ b/python/pip_install/parse_requirements_to_bzl/__init__.py
@@ -1,18 +1,27 @@ import argparse import json -import textwrap -import sys import shlex +import sys +import textwrap from typing import List, Tuple -from python.pip_install.extract_wheels.lib import bazel, arguments -from pip._internal.req import parse_requirements, constructors -from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.req_file import get_file_content, preprocess, handle_line, get_line_parser, RequirementsFileParser from pip._internal.network.session import PipSession +from pip._internal.req import constructors, parse_requirements +from pip._internal.req.req_file import ( + RequirementsFileParser, + get_file_content, + get_line_parser, + handle_line, + preprocess, +) +from pip._internal.req.req_install import InstallRequirement + +from python.pip_install.extract_wheels.lib import arguments, bazel -def parse_install_requirements(requirements_lock: str, extra_pip_args: List[str]) -> List[Tuple[InstallRequirement, str]]: +def parse_install_requirements( + requirements_lock: str, extra_pip_args: List[str] +) -> List[Tuple[InstallRequirement, str]]: ps = PipSession() # This is roughly taken from pip._internal.req.req_file.parse_requirements # (https://github.com/pypa/pip/blob/21.0.1/src/pip/_internal/req/req_file.py#L127) in order to keep @@ -22,13 +31,12 @@ parser = RequirementsFileParser(ps, line_parser) install_req_and_lines: List[Tuple[InstallRequirement, str]] = [] _, content = get_file_content(requirements_lock, ps) - for parsed_line, (_, line) in zip(parser.parse(requirements_lock, constraint=False), preprocess(content)): + for parsed_line, (_, line) in zip( + parser.parse(requirements_lock, constraint=False), preprocess(content) + ): if parsed_line.is_requirement: install_req_and_lines.append( - ( - constructors.install_req_from_line(parsed_line.requirement), - line - ) + (constructors.install_req_from_line(parsed_line.requirement), line) ) else: @@ -36,7 +44,9 @@ return install_req_and_lines -def repo_names_and_requirements(install_reqs: List[Tuple[InstallRequirement, str]], repo_prefix: str) -> List[Tuple[str, str]]: +def repo_names_and_requirements( + install_reqs: List[Tuple[InstallRequirement, str]], repo_prefix: str +) -> List[Tuple[str, str]]: return [ ( bazel.sanitise_name(ir.name, prefix=repo_prefix), @@ -61,19 +71,26 @@ # Pop this off because it wont be used as a config argument to the whl_library rule. requirements_lock = args.pop("requirements_lock") - install_req_and_lines = parse_install_requirements(requirements_lock, args["extra_pip_args"]) + install_req_and_lines = parse_install_requirements( + requirements_lock, args["extra_pip_args"] + ) repo_names_and_reqs = repo_names_and_requirements( install_req_and_lines, args["repo_prefix"] ) - all_requirements = ", ".join([ - bazel.sanitised_repo_library_label(ir.name, repo_prefix=args["repo_prefix"]) - for ir, _ in install_req_and_lines - ]) - all_whl_requirements = ", ".join([ - bazel.sanitised_repo_file_label(ir.name, repo_prefix=args["repo_prefix"]) - for ir, _ in install_req_and_lines - ]) - return textwrap.dedent("""\ + all_requirements = ", ".join( + [ + bazel.sanitised_repo_library_label(ir.name, repo_prefix=args["repo_prefix"]) + for ir, _ in install_req_and_lines + ] + ) + all_whl_requirements = ", ".join( + [ + bazel.sanitised_repo_file_label(ir.name, repo_prefix=args["repo_prefix"]) + for ir, _ in install_req_and_lines + ] + ) + return textwrap.dedent( + """\ load("@rules_python//python/pip_install:pip_repository.bzl", "whl_library") all_requirements = [{all_requirements}] @@ -121,11 +138,13 @@ data_label=bazel.DATA_LABEL, dist_info_label=bazel.DIST_INFO_LABEL, entry_point_prefix=bazel.WHEEL_ENTRY_POINT_PREFIX, - ) ) + ) + def coerce_to_bool(option): - return str(option).lower() == 'true' + return str(option).lower() == "true" + def main() -> None: parser = argparse.ArgumentParser( @@ -166,6 +185,4 @@ args = parser.parse_args() with open("requirements.bzl", "w") as requirement_file: - requirement_file.write( - generate_parsed_requirements_contents(args) - ) + requirement_file.write(generate_parsed_requirements_contents(args))
diff --git a/python/pip_install/parse_requirements_to_bzl/extract_single_wheel/__init__.py b/python/pip_install/parse_requirements_to_bzl/extract_single_wheel/__init__.py index 3937116..2c03ff3 100644 --- a/python/pip_install/parse_requirements_to_bzl/extract_single_wheel/__init__.py +++ b/python/pip_install/parse_requirements_to_bzl/extract_single_wheel/__init__.py
@@ -1,14 +1,13 @@ -import os import argparse -import sys +import errno import glob +import os import subprocess -import json - +import sys from tempfile import NamedTemporaryFile -from python.pip_install.extract_wheels.lib import bazel, requirements, arguments from python.pip_install.extract_wheels import configure_reproducible_wheels +from python.pip_install.extract_wheels.lib import arguments, bazel, requirements def main() -> None: @@ -29,13 +28,13 @@ configure_reproducible_wheels() pip_args = ( - [sys.executable, "-m", "pip"] + - (["--isolated"] if args.isolated else []) + - ["wheel", "--no-deps"] + - deserialized_args["extra_pip_args"] + [sys.executable, "-m", "pip"] + + (["--isolated"] if args.isolated else []) + + ["wheel", "--no-deps"] + + deserialized_args["extra_pip_args"] ) - requirement_file = NamedTemporaryFile(mode='wb', delete=False) + requirement_file = NamedTemporaryFile(mode="wb", delete=False) try: requirement_file.write(args.requirement.encode("utf-8")) requirement_file.flush()
diff --git a/python/pip_install/parse_requirements_to_bzl/parse_requirements_to_bzl_test.py b/python/pip_install/parse_requirements_to_bzl/parse_requirements_to_bzl_test.py index ad603f4..a5c76d3 100644 --- a/python/pip_install/parse_requirements_to_bzl/parse_requirements_to_bzl_test.py +++ b/python/pip_install/parse_requirements_to_bzl/parse_requirements_to_bzl_test.py
@@ -1,22 +1,21 @@ -import unittest import argparse import json +import unittest from tempfile import NamedTemporaryFile -from python.pip_install.parse_requirements_to_bzl import generate_parsed_requirements_contents -from python.pip_install.extract_wheels.lib.bazel import ( - sanitised_repo_library_label, - sanitised_repo_file_label +from python.pip_install.parse_requirements_to_bzl import ( + generate_parsed_requirements_contents, ) class TestParseRequirementsToBzl(unittest.TestCase): - def test_generated_requirements_bzl(self) -> None: with NamedTemporaryFile() as requirements_lock: comments_and_flags = "#comment\n--require-hashes True\n" requirement_string = "foo==0.0.0 --hash=sha256:hashofFoowhl" - requirements_lock.write(bytes(comments_and_flags + requirement_string, encoding="utf-8")) + requirements_lock.write( + bytes(comments_and_flags + requirement_string, encoding="utf-8") + ) requirements_lock.flush() args = argparse.Namespace() args.requirements_lock = requirements_lock.name @@ -24,23 +23,37 @@ extra_pip_args = ["--index-url=pypi.org/simple"] pip_data_exclude = ["**.foo"] args.extra_pip_args = json.dumps({"arg": extra_pip_args}) - args.pip_data_exclude= json.dumps({"arg": pip_data_exclude}) + args.pip_data_exclude = json.dumps({"arg": pip_data_exclude}) args.python_interpreter = "/custom/python3" args.python_interpreter_target = "@custom_python//:exec" - args.environment= json.dumps({"arg": {}}) + args.environment = json.dumps({"arg": {}}) contents = generate_parsed_requirements_contents(args) library_target = "@pip_parsed_deps_pypi__foo//:pkg" whl_target = "@pip_parsed_deps_pypi__foo//:whl" - all_requirements = 'all_requirements = ["{library_target}"]'.format(library_target=library_target) - all_whl_requirements = 'all_whl_requirements = ["{whl_target}"]'.format(whl_target=whl_target) + all_requirements = 'all_requirements = ["{library_target}"]'.format( + library_target=library_target + ) + all_whl_requirements = 'all_whl_requirements = ["{whl_target}"]'.format( + whl_target=whl_target + ) self.assertIn(all_requirements, contents, contents) self.assertIn(all_whl_requirements, contents, contents) self.assertIn(requirement_string, contents, contents) all_flags = extra_pip_args + ["--require-hashes", "True"] - self.assertIn("'extra_pip_args': {}".format(repr(all_flags)), contents, contents) - self.assertIn("'pip_data_exclude': {}".format(repr(pip_data_exclude)), contents, contents) + self.assertIn( + "'extra_pip_args': {}".format(repr(all_flags)), contents, contents + ) + self.assertIn( + "'pip_data_exclude': {}".format(repr(pip_data_exclude)), + contents, + contents, + ) self.assertIn("'python_interpreter': '/custom/python3'", contents, contents) - self.assertIn("'python_interpreter_target': '@custom_python//:exec'", contents, contents) + self.assertIn( + "'python_interpreter_target': '@custom_python//:exec'", + contents, + contents, + ) # Assert it gets set to an empty dict by default. self.assertIn("'environment': {}", contents, contents)
diff --git a/python/pip_install/pip_compile.py b/python/pip_install/pip_compile.py index ce40285..1e789e9 100644 --- a/python/pip_install/pip_compile.py +++ b/python/pip_install/pip_compile.py
@@ -49,7 +49,7 @@ # # Changing to the WORKSPACE root avoids 'file not found' errors when the `.update` target is run # from different directories within the WORKSPACE. - os.chdir(os.environ['BUILD_WORKSPACE_DIRECTORY']) + os.chdir(os.environ["BUILD_WORKSPACE_DIRECTORY"]) else: err_msg = ( "Expected to find BUILD_WORKSPACE_DIRECTORY (running under `bazel run`) or " @@ -61,11 +61,14 @@ ) sys.exit(1) -update_target_pkg = "/".join(requirements_in.split('/')[:-1]) +update_target_pkg = "/".join(requirements_in.split("/")[:-1]) # $(rootpath) in the workspace root gives ./requirements.in if update_target_pkg == ".": update_target_pkg = "" -update_command = os.getenv("CUSTOM_COMPILE_COMMAND") or "bazel run //%s:%s" % (update_target_pkg, update_target_name) +update_command = os.getenv("CUSTOM_COMPILE_COMMAND") or "bazel run //%s:%s" % ( + update_target_pkg, + update_target_name, +) os.environ["CUSTOM_COMPILE_COMMAND"] = update_command os.environ["PIP_CONFIG_FILE"] = os.getenv("PIP_CONFIG_FILE") or os.devnull @@ -100,18 +103,15 @@ if golden != out: import difflib - print(''.join(difflib.unified_diff(golden, out)), file=sys.stderr) + print("".join(difflib.unified_diff(golden, out)), file=sys.stderr) print( - "Lock file out of date. Run '" - + update_command - + "' to update.", + "Lock file out of date. Run '" + update_command + "' to update.", file=sys.stderr, ) sys.exit(1) sys.exit(0) else: print( - f"pip-compile unexpectedly exited with code {e.code}.", - file=sys.stderr + f"pip-compile unexpectedly exited with code {e.code}.", file=sys.stderr ) sys.exit(1)
diff --git a/python/runfiles/runfiles.py b/python/runfiles/runfiles.py index e8e867d..f11613d 100644 --- a/python/runfiles/runfiles.py +++ b/python/runfiles/runfiles.py
@@ -70,224 +70,231 @@ def CreateManifestBased(manifest_path): - return _Runfiles(_ManifestBased(manifest_path)) + return _Runfiles(_ManifestBased(manifest_path)) def CreateDirectoryBased(runfiles_dir_path): - return _Runfiles(_DirectoryBased(runfiles_dir_path)) + return _Runfiles(_DirectoryBased(runfiles_dir_path)) def Create(env=None): - """Returns a new `Runfiles` instance. + """Returns a new `Runfiles` instance. - The returned object is either: - - manifest-based, meaning it looks up runfile paths from a manifest file, or - - directory-based, meaning it looks up runfile paths under a given directory - path + The returned object is either: + - manifest-based, meaning it looks up runfile paths from a manifest file, or + - directory-based, meaning it looks up runfile paths under a given directory + path - If `env` contains "RUNFILES_MANIFEST_FILE" with non-empty value, this method - returns a manifest-based implementation. The object eagerly reads and caches - the whole manifest file upon instantiation; this may be relevant for - performance consideration. + If `env` contains "RUNFILES_MANIFEST_FILE" with non-empty value, this method + returns a manifest-based implementation. The object eagerly reads and caches + the whole manifest file upon instantiation; this may be relevant for + performance consideration. - Otherwise, if `env` contains "RUNFILES_DIR" with non-empty value (checked in - this priority order), this method returns a directory-based implementation. + Otherwise, if `env` contains "RUNFILES_DIR" with non-empty value (checked in + this priority order), this method returns a directory-based implementation. - If neither cases apply, this method returns null. + If neither cases apply, this method returns null. - Args: - env: {string: string}; optional; the map of environment variables. If None, - this function uses the environment variable map of this process. - Raises: - IOError: if some IO error occurs. - """ - env_map = os.environ if env is None else env - manifest = env_map.get("RUNFILES_MANIFEST_FILE") - if manifest: - return CreateManifestBased(manifest) + Args: + env: {string: string}; optional; the map of environment variables. If None, + this function uses the environment variable map of this process. + Raises: + IOError: if some IO error occurs. + """ + env_map = os.environ if env is None else env + manifest = env_map.get("RUNFILES_MANIFEST_FILE") + if manifest: + return CreateManifestBased(manifest) - directory = env_map.get("RUNFILES_DIR") - if directory: - return CreateDirectoryBased(directory) + directory = env_map.get("RUNFILES_DIR") + if directory: + return CreateDirectoryBased(directory) - return None + return None class _Runfiles(object): - """Returns the runtime location of runfiles. - - Runfiles are data-dependencies of Bazel-built binaries and tests. - """ - - def __init__(self, strategy): - self._strategy = strategy - - def Rlocation(self, path): - """Returns the runtime path of a runfile. + """Returns the runtime location of runfiles. Runfiles are data-dependencies of Bazel-built binaries and tests. - - The returned path may not be valid. The caller should check the path's - validity and that the path exists. - - The function may return None. In that case the caller can be sure that the - rule does not know about this data-dependency. - - Args: - path: string; runfiles-root-relative path of the runfile - Returns: - the path to the runfile, which the caller should check for existence, or - None if the method doesn't know about this runfile - Raises: - TypeError: if `path` is not a string - ValueError: if `path` is None or empty, or it's absolute or not normalized """ - if not path: - raise ValueError() - if not isinstance(path, str): - raise TypeError() - if (path.startswith("../") or "/.." in path or path.startswith("./") or - "/./" in path or path.endswith("/.") or "//" in path): - raise ValueError("path is not normalized: \"%s\"" % path) - if path[0] == "\\": - raise ValueError("path is absolute without a drive letter: \"%s\"" % path) - if os.path.isabs(path): - return path - return self._strategy.RlocationChecked(path) - def EnvVars(self): - """Returns environment variables for subprocesses. + def __init__(self, strategy): + self._strategy = strategy - The caller should set the returned key-value pairs in the environment of - subprocesses in case those subprocesses are also Bazel-built binaries that - need to use runfiles. + def Rlocation(self, path): + """Returns the runtime path of a runfile. - Returns: - {string: string}; a dict; keys are environment variable names, values are - the values for these environment variables - """ - return self._strategy.EnvVars() + Runfiles are data-dependencies of Bazel-built binaries and tests. + + The returned path may not be valid. The caller should check the path's + validity and that the path exists. + + The function may return None. In that case the caller can be sure that the + rule does not know about this data-dependency. + + Args: + path: string; runfiles-root-relative path of the runfile + Returns: + the path to the runfile, which the caller should check for existence, or + None if the method doesn't know about this runfile + Raises: + TypeError: if `path` is not a string + ValueError: if `path` is None or empty, or it's absolute or not normalized + """ + if not path: + raise ValueError() + if not isinstance(path, str): + raise TypeError() + if ( + path.startswith("../") + or "/.." in path + or path.startswith("./") + or "/./" in path + or path.endswith("/.") + or "//" in path + ): + raise ValueError('path is not normalized: "%s"' % path) + if path[0] == "\\": + raise ValueError('path is absolute without a drive letter: "%s"' % path) + if os.path.isabs(path): + return path + return self._strategy.RlocationChecked(path) + + def EnvVars(self): + """Returns environment variables for subprocesses. + + The caller should set the returned key-value pairs in the environment of + subprocesses in case those subprocesses are also Bazel-built binaries that + need to use runfiles. + + Returns: + {string: string}; a dict; keys are environment variable names, values are + the values for these environment variables + """ + return self._strategy.EnvVars() class _ManifestBased(object): - """`Runfiles` strategy that parses a runfiles-manifest to look up runfiles.""" + """`Runfiles` strategy that parses a runfiles-manifest to look up runfiles.""" - def __init__(self, path): - if not path: - raise ValueError() - if not isinstance(path, str): - raise TypeError() - self._path = path - self._runfiles = _ManifestBased._LoadRunfiles(path) + def __init__(self, path): + if not path: + raise ValueError() + if not isinstance(path, str): + raise TypeError() + self._path = path + self._runfiles = _ManifestBased._LoadRunfiles(path) - def RlocationChecked(self, path): - return self._runfiles.get(path) + def RlocationChecked(self, path): + return self._runfiles.get(path) - @staticmethod - def _LoadRunfiles(path): - """Loads the runfiles manifest.""" - result = {} - with open(path, "r") as f: - for line in f: - line = line.strip() - if line: - tokens = line.split(" ", 1) - if len(tokens) == 1: - result[line] = line - else: - result[tokens[0]] = tokens[1] - return result + @staticmethod + def _LoadRunfiles(path): + """Loads the runfiles manifest.""" + result = {} + with open(path, "r") as f: + for line in f: + line = line.strip() + if line: + tokens = line.split(" ", 1) + if len(tokens) == 1: + result[line] = line + else: + result[tokens[0]] = tokens[1] + return result - def _GetRunfilesDir(self): - if self._path.endswith("/MANIFEST") or self._path.endswith("\\MANIFEST"): - return self._path[:-len("/MANIFEST")] - elif self._path.endswith(".runfiles_manifest"): - return self._path[:-len("_manifest")] - else: - return "" + def _GetRunfilesDir(self): + if self._path.endswith("/MANIFEST") or self._path.endswith("\\MANIFEST"): + return self._path[: -len("/MANIFEST")] + elif self._path.endswith(".runfiles_manifest"): + return self._path[: -len("_manifest")] + else: + return "" - def EnvVars(self): - directory = self._GetRunfilesDir() - return { - "RUNFILES_MANIFEST_FILE": self._path, - "RUNFILES_DIR": directory, - # TODO(laszlocsomor): remove JAVA_RUNFILES once the Java launcher can - # pick up RUNFILES_DIR. - "JAVA_RUNFILES": directory, - } + def EnvVars(self): + directory = self._GetRunfilesDir() + return { + "RUNFILES_MANIFEST_FILE": self._path, + "RUNFILES_DIR": directory, + # TODO(laszlocsomor): remove JAVA_RUNFILES once the Java launcher can + # pick up RUNFILES_DIR. + "JAVA_RUNFILES": directory, + } class _DirectoryBased(object): - """`Runfiles` strategy that appends runfiles paths to the runfiles root.""" + """`Runfiles` strategy that appends runfiles paths to the runfiles root.""" - def __init__(self, path): - if not path: - raise ValueError() - if not isinstance(path, str): - raise TypeError() - self._runfiles_root = path + def __init__(self, path): + if not path: + raise ValueError() + if not isinstance(path, str): + raise TypeError() + self._runfiles_root = path - def RlocationChecked(self, path): - # Use posixpath instead of os.path, because Bazel only creates a runfiles - # tree on Unix platforms, so `Create()` will only create a directory-based - # runfiles strategy on those platforms. - return posixpath.join(self._runfiles_root, path) + def RlocationChecked(self, path): + # Use posixpath instead of os.path, because Bazel only creates a runfiles + # tree on Unix platforms, so `Create()` will only create a directory-based + # runfiles strategy on those platforms. + return posixpath.join(self._runfiles_root, path) - def EnvVars(self): - return { - "RUNFILES_DIR": self._runfiles_root, - # TODO(laszlocsomor): remove JAVA_RUNFILES once the Java launcher can - # pick up RUNFILES_DIR. - "JAVA_RUNFILES": self._runfiles_root, - } + def EnvVars(self): + return { + "RUNFILES_DIR": self._runfiles_root, + # TODO(laszlocsomor): remove JAVA_RUNFILES once the Java launcher can + # pick up RUNFILES_DIR. + "JAVA_RUNFILES": self._runfiles_root, + } -def _PathsFrom(argv0, runfiles_mf, runfiles_dir, is_runfiles_manifest, - is_runfiles_directory): - """Discover runfiles manifest and runfiles directory paths. +def _PathsFrom( + argv0, runfiles_mf, runfiles_dir, is_runfiles_manifest, is_runfiles_directory +): + """Discover runfiles manifest and runfiles directory paths. - Args: - argv0: string; the value of sys.argv[0] - runfiles_mf: string; the value of the RUNFILES_MANIFEST_FILE environment - variable - runfiles_dir: string; the value of the RUNFILES_DIR environment variable - is_runfiles_manifest: lambda(string):bool; returns true if the argument is - the path of a runfiles manifest file - is_runfiles_directory: lambda(string):bool; returns true if the argument is - the path of a runfiles directory + Args: + argv0: string; the value of sys.argv[0] + runfiles_mf: string; the value of the RUNFILES_MANIFEST_FILE environment + variable + runfiles_dir: string; the value of the RUNFILES_DIR environment variable + is_runfiles_manifest: lambda(string):bool; returns true if the argument is + the path of a runfiles manifest file + is_runfiles_directory: lambda(string):bool; returns true if the argument is + the path of a runfiles directory - Returns: - (string, string) pair, first element is the path to the runfiles manifest, - second element is the path to the runfiles directory. If the first element - is non-empty, then is_runfiles_manifest returns true for it. Same goes for - the second element and is_runfiles_directory respectively. If both elements - are empty, then this function could not find a manifest or directory for - which is_runfiles_manifest or is_runfiles_directory returns true. - """ - mf_alid = is_runfiles_manifest(runfiles_mf) - dir_valid = is_runfiles_directory(runfiles_dir) - - if not mf_alid and not dir_valid: - runfiles_mf = argv0 + ".runfiles/MANIFEST" - runfiles_dir = argv0 + ".runfiles" + Returns: + (string, string) pair, first element is the path to the runfiles manifest, + second element is the path to the runfiles directory. If the first element + is non-empty, then is_runfiles_manifest returns true for it. Same goes for + the second element and is_runfiles_directory respectively. If both elements + are empty, then this function could not find a manifest or directory for + which is_runfiles_manifest or is_runfiles_directory returns true. + """ mf_alid = is_runfiles_manifest(runfiles_mf) dir_valid = is_runfiles_directory(runfiles_dir) + + if not mf_alid and not dir_valid: + runfiles_mf = argv0 + ".runfiles/MANIFEST" + runfiles_dir = argv0 + ".runfiles" + mf_alid = is_runfiles_manifest(runfiles_mf) + dir_valid = is_runfiles_directory(runfiles_dir) + if not mf_alid: + runfiles_mf = argv0 + ".runfiles_manifest" + mf_alid = is_runfiles_manifest(runfiles_mf) + + if not mf_alid and not dir_valid: + return ("", "") + if not mf_alid: - runfiles_mf = argv0 + ".runfiles_manifest" - mf_alid = is_runfiles_manifest(runfiles_mf) + runfiles_mf = runfiles_dir + "/MANIFEST" + mf_alid = is_runfiles_manifest(runfiles_mf) + if not mf_alid: + runfiles_mf = runfiles_dir + "_manifest" + mf_alid = is_runfiles_manifest(runfiles_mf) - if not mf_alid and not dir_valid: - return ("", "") + if not dir_valid: + runfiles_dir = runfiles_mf[:-9] # "_manifest" or "/MANIFEST" + dir_valid = is_runfiles_directory(runfiles_dir) - if not mf_alid: - runfiles_mf = runfiles_dir + "/MANIFEST" - mf_alid = is_runfiles_manifest(runfiles_mf) - if not mf_alid: - runfiles_mf = runfiles_dir + "_manifest" - mf_alid = is_runfiles_manifest(runfiles_mf) - - if not dir_valid: - runfiles_dir = runfiles_mf[:-9] # "_manifest" or "/MANIFEST" - dir_valid = is_runfiles_directory(runfiles_dir) - - return (runfiles_mf if mf_alid else "", runfiles_dir if dir_valid else "") + return (runfiles_mf if mf_alid else "", runfiles_dir if dir_valid else "")
diff --git a/tools/bazel_integration_test/test_runner.py b/tools/bazel_integration_test/test_runner.py index 4e83d66..df7e528 100644 --- a/tools/bazel_integration_test/test_runner.py +++ b/tools/bazel_integration_test/test_runner.py
@@ -1,16 +1,18 @@ -from pathlib import Path import json import os import platform import re import shutil -from subprocess import Popen import sys import tempfile +from pathlib import Path +from subprocess import Popen from rules_python.python.runfiles import runfiles + r = runfiles.Create() + def modify_WORKSPACE(wksp, distro_path): """Update the WORKSPACE file in the example to point to our locally-built tar.gz This allows users to clone rules_python, cd into the example/dir, and run the example directly, @@ -20,58 +22,64 @@ wksp: filesystem absolute path of the bazel WORKSPACE file under test distro_path: runfiles path of the distro .tar.gz """ - with open(wksp, 'r') as wksp_file: + with open(wksp, "r") as wksp_file: content = wksp_file.read() # Replace the url for rules_python with our locally built one content = re.sub( r'url = "https://github.com/bazelbuild/rules_python/[^"]+"', 'url = "file://%s"' % r.Rlocation(distro_path), - content) + content, + ) # comment out sha256 and strip_prefix if present - content = re.sub(r'sha256 = "', '#\1', content) - content = re.sub(r'strip_prefix = "', '#\1', content) - with open(wksp, 'w') as wksp_file: + content = re.sub(r'sha256 = "', "#\1", content) + content = re.sub(r'strip_prefix = "', "#\1", content) + with open(wksp, "w") as wksp_file: wksp_file.write(content) + def main(conf_file): with open(conf_file) as j: config = json.load(j) - isWindows = platform.system() == 'Windows' - bazelBinary = r.Rlocation(os.path.join(config['bazelBinaryWorkspace'], 'bazel.exe' if isWindows else 'bazel')) - - workspacePath = config['workspaceRoot'] - # Canonicalize bazel external/some_repo/foo - if workspacePath.startswith('external/'): - workspacePath = '..' + workspacePath[len('external'):] + isWindows = platform.system() == "Windows" + bazelBinary = r.Rlocation( + os.path.join( + config["bazelBinaryWorkspace"], "bazel.exe" if isWindows else "bazel" + ) + ) - with tempfile.TemporaryDirectory(dir = os.environ['TEST_TMPDIR']) as tmpdir: + workspacePath = config["workspaceRoot"] + # Canonicalize bazel external/some_repo/foo + if workspacePath.startswith("external/"): + workspacePath = ".." + workspacePath[len("external") :] + + with tempfile.TemporaryDirectory(dir=os.environ["TEST_TMPDIR"]) as tmpdir: workdir = os.path.join(tmpdir, "wksp") print("copying workspace under test %s to %s" % (workspacePath, workdir)) shutil.copytree(workspacePath, workdir) - modify_WORKSPACE(os.path.join(workdir, 'WORKSPACE'), config['distro']) + modify_WORKSPACE(os.path.join(workdir, "WORKSPACE"), config["distro"]) - for command in config['bazelCommands']: - bazel_args = command.split(' ') + for command in config["bazelCommands"]: + bazel_args = command.split(" ") try: - doubleHyphenPos = bazel_args.index('--') + doubleHyphenPos = bazel_args.index("--") print("patch that in ", doubleHyphenPos) except ValueError: pass - - # Bazel's wrapper script needs this or you get + # Bazel's wrapper script needs this or you get # 2020/07/13 21:58:11 could not get the user's cache directory: $HOME is not defined - os.environ['HOME'] = str(Path.home()) + os.environ["HOME"] = str(Path.home()) bazel_args.insert(0, bazelBinary) - bazel_process = Popen(bazel_args, cwd = workdir) + bazel_process = Popen(bazel_args, cwd=workdir) bazel_process.wait() if bazel_process.returncode != 0: # Test failure in Bazel is exit 3 # https://github.com/bazelbuild/bazel/blob/486206012a664ecb20bdb196a681efc9a9825049/src/main/java/com/google/devtools/build/lib/util/ExitCode.java#L44 sys.exit(3) -if __name__ == '__main__': - main(sys.argv[1]) + +if __name__ == "__main__": + main(sys.argv[1])
diff --git a/tools/wheelmaker.py b/tools/wheelmaker.py index 9718e47..4b87a59 100644 --- a/tools/wheelmaker.py +++ b/tools/wheelmaker.py
@@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pathlib import Path import argparse import base64 import collections @@ -21,6 +20,7 @@ import re import sys import zipfile +from pathlib import Path def commonpath(path1, path2): @@ -38,8 +38,17 @@ class WheelMaker(object): - def __init__(self, name, version, build_tag, python_tag, abi, platform, - outfile=None, strip_path_prefixes=None): + def __init__( + self, + name, + version, + build_tag, + python_tag, + abi, + platform, + outfile=None, + strip_path_prefixes=None, + ): self._name = name self._version = version self._build_tag = build_tag @@ -47,17 +56,23 @@ self._abi = abi self._platform = platform self._outfile = outfile - self._strip_path_prefixes = strip_path_prefixes if strip_path_prefixes is not None else [] + self._strip_path_prefixes = ( + strip_path_prefixes if strip_path_prefixes is not None else [] + ) - self._distinfo_dir = (escape_filename_segment(self._name) + '-' + - escape_filename_segment(self._version) + - '.dist-info/') + self._distinfo_dir = ( + escape_filename_segment(self._name) + + "-" + + escape_filename_segment(self._version) + + ".dist-info/" + ) self._zipfile = None self._record = [] def __enter__(self): - self._zipfile = zipfile.ZipFile(self.filename(), mode="w", - compression=zipfile.ZIP_DEFLATED) + self._zipfile = zipfile.ZipFile( + self.filename(), mode="w", compression=zipfile.ZIP_DEFLATED + ) return self def __exit__(self, type, value, traceback): @@ -69,7 +84,7 @@ if self._build_tag: components.append(self._build_tag) components += [self._python_tag, self._abi, self._platform] - return '-'.join(components) + '.whl' + return "-".join(components) + ".whl" def filename(self) -> str: if self._outfile: @@ -77,7 +92,7 @@ return self.wheelname() def disttags(self): - return ['-'.join([self._python_tag, self._abi, self._platform])] + return ["-".join([self._python_tag, self._abi, self._platform])] def distinfo_path(self, basename): return self._distinfo_dir + basename @@ -86,36 +101,37 @@ # https://www.python.org/dev/peps/pep-0376/#record # "base64.urlsafe_b64encode(digest) with trailing = removed" digest = base64.urlsafe_b64encode(hash.digest()) - digest = b'sha256=' + digest.rstrip(b'=') + digest = b"sha256=" + digest.rstrip(b"=") return digest def add_string(self, filename, contents): """Add given 'contents' as filename to the distribution.""" if sys.version_info[0] > 2 and isinstance(contents, str): - contents = contents.encode('utf-8', 'surrogateescape') + contents = contents.encode("utf-8", "surrogateescape") self._zipfile.writestr(filename, contents) hash = hashlib.sha256() hash.update(contents) - self._add_to_record(filename, self._serialize_digest(hash), - len(contents)) + self._add_to_record(filename, self._serialize_digest(hash), len(contents)) def add_file(self, package_filename, real_filename): """Add given file to the distribution.""" def arcname_from(name): # Always use unix path separators. - normalized_arcname = name.replace(os.path.sep, '/') + normalized_arcname = name.replace(os.path.sep, "/") for prefix in self._strip_path_prefixes: if normalized_arcname.startswith(prefix): - return normalized_arcname[len(prefix):] + return normalized_arcname[len(prefix) :] return normalized_arcname if os.path.isdir(real_filename): directory_contents = os.listdir(real_filename) for file_ in directory_contents: - self.add_file("{}/{}".format(package_filename, file_), - "{}/{}".format(real_filename, file_)) + self.add_file( + "{}/{}".format(package_filename, file_), + "{}/{}".format(real_filename, file_), + ) return arcname = arcname_from(package_filename) @@ -124,7 +140,7 @@ # Find the hash and length hash = hashlib.sha256() size = 0 - with open(real_filename, 'rb') as f: + with open(real_filename, "rb") as f: while True: block = f.read(2 ** 20) if not block: @@ -140,13 +156,22 @@ Wheel-Version: 1.0 Generator: bazel-wheelmaker 1.0 Root-Is-Purelib: {} -""".format("true" if self._platform == "any" else "false") +""".format( + "true" if self._platform == "any" else "false" + ) for tag in self.disttags(): wheel_contents += "Tag: %s\n" % tag - self.add_string(self.distinfo_path('WHEEL'), wheel_contents) + self.add_string(self.distinfo_path("WHEEL"), wheel_contents) - def add_metadata(self, extra_headers, description, classifiers, python_requires, - requires, extra_requires): + def add_metadata( + self, + extra_headers, + description, + classifiers, + python_requires, + requires, + extra_requires, + ): """Write METADATA file to the distribution.""" # https://www.python.org/dev/peps/pep-0566/ # https://packaging.python.org/specifications/core-metadata/ @@ -167,29 +192,30 @@ metadata.append("Provides-Extra: %s" % option) for requirement in option_requires: metadata.append( - "Requires-Dist: %s; extra == '%s'" % (requirement, option)) + "Requires-Dist: %s; extra == '%s'" % (requirement, option) + ) - metadata = '\n'.join(metadata) + '\n\n' + metadata = "\n".join(metadata) + "\n\n" # setuptools seems to insert UNKNOWN as description when none is # provided. metadata += description if description else "UNKNOWN" metadata += "\n" - self.add_string(self.distinfo_path('METADATA'), metadata) + self.add_string(self.distinfo_path("METADATA"), metadata) def add_recordfile(self): """Write RECORD file to the distribution.""" - record_path = self.distinfo_path('RECORD') - entries = self._record + [(record_path, b'', b'')] + record_path = self.distinfo_path("RECORD") + entries = self._record + [(record_path, b"", b"")] entries.sort() - contents = b'' + contents = b"" for filename, digest, size in entries: if sys.version_info[0] > 2 and isinstance(filename, str): - filename = filename.encode('utf-8', 'surrogateescape') - contents += b'%s,%s,%s\n' % (filename, digest, size) + filename = filename.encode("utf-8", "surrogateescape") + contents += b"%s,%s,%s\n" % (filename, digest, size) self.add_string(record_path, contents) def _add_to_record(self, filename, hash, size): - size = str(size).encode('ascii') + size = str(size).encode("ascii") self._record.append((filename, hash, size)) @@ -204,7 +230,9 @@ return files -def resolve_version_stamp(version: str, volatile_status_stamp: Path, stable_status_stamp: Path) -> str: +def resolve_version_stamp( + version: str, volatile_status_stamp: Path, stable_status_stamp: Path +) -> str: """Resolve workspace status stamps format strings found in the version string Args: @@ -215,11 +243,14 @@ Returns: str: A resolved version string """ - lines = volatile_status_stamp.read_text().splitlines() + stable_status_stamp.read_text().splitlines() + lines = ( + volatile_status_stamp.read_text().splitlines() + + stable_status_stamp.read_text().splitlines() + ) for line in lines: if not line: continue - key, value = line.split(' ', maxsplit=1) + key, value = line.split(" ", maxsplit=1) stamp = "{" + key + "}" version = version.replace(stamp, value) @@ -227,82 +258,114 @@ def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description='Builds a python wheel') - metadata_group = parser.add_argument_group( - "Wheel name, version and platform") - metadata_group.add_argument('--name', required=True, - type=str, - help="Name of the distribution") - metadata_group.add_argument('--version', required=True, - type=str, - help="Version of the distribution") - metadata_group.add_argument('--build_tag', type=str, default='', - help="Optional build tag for the distribution") - metadata_group.add_argument('--python_tag', type=str, default='py3', - help="Python version, e.g. 'py2' or 'py3'") - metadata_group.add_argument('--abi', type=str, default='none') - metadata_group.add_argument('--platform', type=str, default='any', - help="Target platform. ") + parser = argparse.ArgumentParser(description="Builds a python wheel") + metadata_group = parser.add_argument_group("Wheel name, version and platform") + metadata_group.add_argument( + "--name", required=True, type=str, help="Name of the distribution" + ) + metadata_group.add_argument( + "--version", required=True, type=str, help="Version of the distribution" + ) + metadata_group.add_argument( + "--build_tag", + type=str, + default="", + help="Optional build tag for the distribution", + ) + metadata_group.add_argument( + "--python_tag", + type=str, + default="py3", + help="Python version, e.g. 'py2' or 'py3'", + ) + metadata_group.add_argument("--abi", type=str, default="none") + metadata_group.add_argument( + "--platform", type=str, default="any", help="Target platform. " + ) output_group = parser.add_argument_group("Output file location") - output_group.add_argument('--out', type=str, default=None, - help="Override name of ouptut file") - output_group.add_argument('--name_file', type=Path, - help="A file where the canonical name of the " - "wheel will be written") + output_group.add_argument( + "--out", type=str, default=None, help="Override name of ouptut file" + ) + output_group.add_argument( + "--name_file", + type=Path, + help="A file where the canonical name of the " "wheel will be written", + ) - output_group.add_argument('--strip_path_prefix', - type=str, - action="append", - default=[], - help="Path prefix to be stripped from input package files' path. " - "Can be supplied multiple times. " - "Evaluated in order." - ) + output_group.add_argument( + "--strip_path_prefix", + type=str, + action="append", + default=[], + help="Path prefix to be stripped from input package files' path. " + "Can be supplied multiple times. " + "Evaluated in order.", + ) wheel_group = parser.add_argument_group("Wheel metadata") wheel_group.add_argument( - '--header', action='append', + "--header", + action="append", help="Additional headers to be embedded in the package metadata. " - "Can be supplied multiple times.") - wheel_group.add_argument('--classifier', action='append', - help="Classifiers to embed in package metadata. " - "Can be supplied multiple times") - wheel_group.add_argument('--python_requires', - help="Version of python that the wheel will work with") - wheel_group.add_argument('--description_file', - help="Path to the file with package description") - wheel_group.add_argument('--entry_points_file', - help="Path to a correctly-formatted entry_points.txt file") + "Can be supplied multiple times.", + ) + wheel_group.add_argument( + "--classifier", + action="append", + help="Classifiers to embed in package metadata. " + "Can be supplied multiple times", + ) + wheel_group.add_argument( + "--python_requires", help="Version of python that the wheel will work with" + ) + wheel_group.add_argument( + "--description_file", help="Path to the file with package description" + ) + wheel_group.add_argument( + "--entry_points_file", + help="Path to a correctly-formatted entry_points.txt file", + ) contents_group = parser.add_argument_group("Wheel contents") contents_group.add_argument( - '--input_file', action='append', + "--input_file", + action="append", help="'package_path;real_path' pairs listing " - "files to be included in the wheel. " - "Can be supplied multiple times.") + "files to be included in the wheel. " + "Can be supplied multiple times.", + ) contents_group.add_argument( - '--input_file_list', action='append', - help='A file that has all the input files defined as a list to avoid the long command' + "--input_file_list", + action="append", + help="A file that has all the input files defined as a list to avoid the long command", ) requirements_group = parser.add_argument_group("Package requirements") requirements_group.add_argument( - '--requires', type=str, action='append', - help="List of package requirements. Can be supplied multiple times.") + "--requires", + type=str, + action="append", + help="List of package requirements. Can be supplied multiple times.", + ) requirements_group.add_argument( - '--extra_requires', type=str, action='append', + "--extra_requires", + type=str, + action="append", help="List of optional requirements in a 'requirement;option name'. " - "Can be supplied multiple times.") + "Can be supplied multiple times.", + ) build_group = parser.add_argument_group("Building requirements") build_group.add_argument( - '--volatile_status_file', type=Path, - help="Pass in the stamp info file for stamping" + "--volatile_status_file", + type=Path, + help="Pass in the stamp info file for stamping", ) build_group.add_argument( - '--stable_status_file', type=Path, - help="Pass in the stamp info file for stamping" + "--stable_status_file", + type=Path, + help="Pass in the stamp info file for stamping", ) return parser.parse_args(sys.argv[1:]) @@ -312,7 +375,7 @@ arguments = parse_args() if arguments.input_file: - input_files = [i.split(';') for i in arguments.input_file] + input_files = [i.split(";") for i in arguments.input_file] else: input_files = [] @@ -321,7 +384,7 @@ with open(input_file) as _file: input_file_list = _file.read().splitlines() for _input_file in input_file_list: - input_files.append(_input_file.split(';')) + input_files.append(_input_file.split(";")) all_files = get_files_to_package(input_files) # Sort the files for reproducible order in the archive. @@ -330,21 +393,24 @@ strip_prefixes = [p for p in arguments.strip_path_prefix] if arguments.volatile_status_file and arguments.stable_status_file: - version = resolve_version_stamp(arguments.version, - arguments.volatile_status_file, - arguments.stable_status_file) + version = resolve_version_stamp( + arguments.version, + arguments.volatile_status_file, + arguments.stable_status_file, + ) else: version = arguments.version - with WheelMaker(name=arguments.name, - version=version, - build_tag=arguments.build_tag, - python_tag=arguments.python_tag, - abi=arguments.abi, - platform=arguments.platform, - outfile=arguments.out, - strip_path_prefixes=strip_prefixes - ) as maker: + with WheelMaker( + name=arguments.name, + version=version, + build_tag=arguments.build_tag, + python_tag=arguments.python_tag, + abi=arguments.abi, + platform=arguments.platform, + outfile=arguments.out, + strip_path_prefixes=strip_prefixes, + ) as maker: for package_filename, real_filename in all_files: maker.add_file(package_filename, real_filename) maker.add_wheelfile() @@ -352,34 +418,37 @@ description = None if arguments.description_file: if sys.version_info[0] == 2: - with open(arguments.description_file, - 'rt') as description_file: + with open(arguments.description_file, "rt") as description_file: description = description_file.read() else: - with open(arguments.description_file, 'rt', - encoding='utf-8') as description_file: + with open( + arguments.description_file, "rt", encoding="utf-8" + ) as description_file: description = description_file.read() extra_requires = collections.defaultdict(list) if arguments.extra_requires: for extra in arguments.extra_requires: - req, option = extra.rsplit(';', 1) + req, option = extra.rsplit(";", 1) extra_requires[option].append(req) classifiers = arguments.classifier or [] python_requires = arguments.python_requires or "" requires = arguments.requires or [] extra_headers = arguments.header or [] - maker.add_metadata(extra_headers=extra_headers, - description=description, - classifiers=classifiers, - python_requires=python_requires, - requires=requires, - extra_requires=extra_requires) + maker.add_metadata( + extra_headers=extra_headers, + description=description, + classifiers=classifiers, + python_requires=python_requires, + requires=requires, + extra_requires=extra_requires, + ) if arguments.entry_points_file: - maker.add_file(maker.distinfo_path( - "entry_points.txt"), arguments.entry_points_file) + maker.add_file( + maker.distinfo_path("entry_points.txt"), arguments.entry_points_file + ) maker.add_recordfile() @@ -390,5 +459,5 @@ arguments.name_file.write_text(maker.wheelname()) -if __name__ == '__main__': +if __name__ == "__main__": main()