blob: 0726c4230da856702a60ce2286ca9ff06534abde [file] [log] [blame]
# Copyright 2023 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Bazel-related functions."""
from __future__ import annotations
import dataclasses
import shlex
import re
from typing import TYPE_CHECKING
from PB.recipe_modules.pigweed.bazel.options import Options
from recipe_engine import recipe_api
if TYPE_CHECKING: # pragma: no cover
from recipe_engine import config_types
from RECIPE_MODULES.pigweed.checkout import api as checkout_api
@dataclasses.dataclass
class BazelRunner:
api: recipe_api.RecipeApi
checkout: checkout_api.CheckoutContext
options: Options
_bazel: config_types.Path | None = None
continue_after_build_error: bool = False
download_all_artifacts: bool = False
def _ensure_bazelisk(self) -> config_types.Path:
ensure_file = self.api.cipd.EnsureFile()
ensure_file.add_package(
'fuchsia/third_party/bazelisk/${platform}',
self.options.bazelisk_version or 'latest',
)
root = self.api.path.mkdtemp()
self.api.cipd.ensure(root, ensure_file, name='ensure bazelisk')
return root / 'bazelisk'
def ensure(self) -> config_types.Path:
if self._bazel:
return self._bazel
self._bazel = self._ensure_bazelisk()
self.api.step('bazel version', [self._bazel, 'version'])
return self._bazel
def _override_args(self) -> list[str]:
if self.api.path.exists(self.checkout.root / 'MODULE.bazel'):
# We're in a bzlmod-managed workspace.
flag = "--override_module" # pragma: no cover
else:
# We're in a traditional workspace.
flag = "--override_repository"
return [
f'{flag}={repo}={path}'
for repo, path in self.checkout.bazel_overrides.items()
]
def run(self, **kwargs) -> None:
config_name = self.options.config_path or 'pigweed.json'
config_path = self.checkout.root / config_name
self.api.path.mock_add_file(config_path)
config = {}
if self.api.path.isfile(config_path):
config = self.api.file.read_json(
f'read {config_name}',
config_path,
test_data={
'pw': {
'bazel_presubmit': {
'remote_cache': True,
'upload_local_results': True,
'programs': {
'default': [
['build', '//...'],
['test', '//...'],
],
},
},
},
},
)
config = config.get('pw', config).get('bazel_presubmit', config)
base_args: list[str] = []
# Don't limit the amount Bazel will write to stdout/stderr.
base_args.append('--experimental_ui_max_stdouterr_bytes=-1')
# Don't download the remote build outputs to the local machine, since we
# will not use them, unless specifically requested.
if self.download_all_artifacts:
base_args.append('--remote_download_outputs=all')
else:
base_args.append('--remote_download_outputs=minimal')
if config.get('remote'):
# TODO: b/368128573 - Support remote execution on MacOS.
if self.api.platform.is_linux:
base_args.append('--config=remote')
else:
self.api.step.empty(
'ignoring remote because not running on Linux'
)
elif config.get('remote_cache'):
# --config=remote already implies --config=remote_cache.
base_args.append('--config=remote_cache')
if self.api.buildbucket.build.builder.project == 'pigweed':
instance_name = 'pigweed-rbe-open'
else:
instance_name = 'pigweed-rbe-private'
if self.api.buildbucket_util.is_tryjob:
instance_name += '-pre'
base_args.append(f'--bes_instance_name={instance_name}')
if instance_name == 'pigweed-rbe-open':
# Ted messed up and gave the pigweed-rbe-open RBE instance a
# different name (default-instance instead of default_instance).
# Sadly this is annoying to fix because instances cannot be renamed,
# and you can't have more than one instance in a GCP region.
#
# TODO: b/312215590 - Fix this.
base_args.append(
'--remote_instance_name=projects/pigweed-rbe-open/instances/default-instance'
)
else:
base_args.append(
f'--remote_instance_name=projects/{instance_name}/instances/default_instance'
)
if config.get('upload_local_results'):
if not config.get('remote_cache'):
self.api.step.empty(
'ignoring upload_local_results since remote_cache is False'
)
else:
base_args.append('--remote_upload_local_results=true')
base_args.extend(self._override_args())
if self.continue_after_build_error:
base_args.append('--keep_going')
with (
self.api.context(cwd=self.checkout.root),
self.api.defer.context() as defer,
):
for invocation in self.options.invocations:
assert invocation.args
name: str = ' '.join(['bazel'] + list(invocation.args))
defer(
self.api.step,
name,
[self.ensure(), *invocation.args, *base_args],
**kwargs,
)
programs = config.get('programs', {})
for program in self.options.program or ('default',):
with self.api.step.nest(program):
if program not in programs:
raise api.step.InfraFailure( # pragma: no cover
f'{program} not in {programs.keys()}'
)
if not programs[program]:
raise api.step.InfraFailure( # pragma: no cover
f'{program} is empty'
)
for args in programs[program]:
json_path = self.api.path.mkdtemp() / 'metadata.json'
cmd = [
self.api.bazel.resource('wrapper.py'),
'--json',
self.api.json.output(leak_to=json_path),
'--',
self.ensure(),
*args,
*base_args,
]
with self.api.step.nest(shlex.join(args)):
future = self.api.futures.spawn(
defer,
self.api.step,
'bazel',
cmd,
**kwargs,
)
# Ensure the bazel step shows up before the
# resultstore link step.
self.api.time.sleep(1)
def read_json() -> bool:
if not self.api.path.isfile(json_path):
return False
data = self.api.file.read_json(
f'read {i}',
json_path,
test_data=dict(
resultstore='https://result.store/',
),
)
if 'resultstore' not in data:
return False # pragma: no cover
pres.links['resultstore'] = data['resultstore']
pres.step_summary_text = ''
return True
found_resultstore_link = False
with self.api.step.nest('resultstore link') as pres:
pres.step_summary_text = 'link not found'
for i in range(1, 5):
self.api.time.sleep(i)
if i > 1:
self.api.path.mock_add_file(json_path)
if read_json():
found_resultstore_link = True
break
if future.done:
break # pragma: no cover
_ = future.result()
if not found_resultstore_link:
read_json() # pragma: no cover
class BazelApi(recipe_api.RecipeApi):
"""Bazel utilities."""
BazelRunner = BazelRunner
def new_runner(
self,
checkout: checkout_api.CheckoutContext,
options: Options | None,
*,
continue_after_build_error: bool = False,
download_all_artifacts: bool = True,
) -> BazelRunner:
return BazelRunner(
self.m,
checkout=checkout,
options=options,
continue_after_build_error=continue_after_build_error,
download_all_artifacts=download_all_artifacts,
)