blob: de7534c32acf8b60493978b08594786f42fce5bc [file] [log] [blame]
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Wrapper for 'pw presubmit' in the project source tree."""
import collections
import attr
from recipe_engine import recipe_api
from RECIPE_MODULES.fuchsia.utils import nice_duration
METADATA = {
'binary_sizes': (('target', 12345), ('target.budget', 12346)),
'test_runtimes': (('target', 200), ('target.max', 250)),
}
@attr.s
class Step:
_api = attr.ib()
name = attr.ib()
dir = attr.ib()
_export_dir_name = attr.ib(default=None)
metadata = attr.ib(default=attr.Factory(dict))
@property
def export_dir(self):
if not self._export_dir_name:
return None # pragma: no cover
return self.dir.join(self._export_dir_name)
@attr.s
class PresubmitContext:
_api = attr.ib()
options = attr.ib()
root = attr.ib()
checkout_root = attr.ib()
_step_objects = attr.ib(default=attr.Factory(collections.OrderedDict))
def add_step(self, name, step):
self._step_objects[name] = step
@property
def steps(self):
return self._step_objects.values()
class PwPresubmitApi(recipe_api.RecipeApi):
"""Calls to checkout code."""
def _step(self, ctx, name):
return Step(
self.m,
name,
ctx.root.join(name),
export_dir_name=ctx.options.export_dir_name,
)
def init(self, checkout_root, options=None, root=None):
options.command_name = options.command_name or 'python -m pw_cli'
ctx = PresubmitContext(
api=self.m,
options=options or self._options,
checkout_root=checkout_root,
root=root or checkout_root.join('p'),
)
if not ctx.options.step and not ctx.options.program:
raise self.m.step.StepFailure('no step or program properties')
for step_name in ctx.options.step:
ctx.add_step(step_name, self._step(ctx, step_name))
if ctx.options.program:
with self.m.step.nest('get steps from programs'):
for program in ctx.options.program:
# To get step_test_data line to pass pylint.
raw_io_stream_output = (
self.m.raw_io.test_api.stream_output_text
)
program_steps = (
self._run(
ctx,
['--program', program, '--only-list-steps'],
name=program,
stdout=self.m.raw_io.output_text(),
step_test_data=lambda: raw_io_stream_output(
'{0}_0\n{0}_1\n'.format(program),
),
)
.stdout.strip()
.splitlines()
)
for step_name in program_steps:
ctx.add_step(step_name, self._step(ctx, step_name))
return ctx
def _step_timeout(self):
# Amount of time elapsed in the run.
elapsed_time = (
self.m.time.time() - self.m.buildbucket.build.start_time.seconds
)
# Amount of time before build times out.
time_remaining = (
self.m.buildbucket.build.execution_timeout.seconds - elapsed_time
)
# Give a buffer before build times out and kill this step then. This
# should give enough time to read any logfiles and maybe upload to
# logdog/GCS before the build times out.
step_timeout = time_remaining - 60
# If the timeout would be negative or very small set it to 30 seconds.
# We likely won't have enough information to debug these steps, but in
# case they're fast there's no reason to kill them much before the
# build is terminated.
if step_timeout < 30:
step_timeout = 30
return step_timeout
def _run(self, ctx, args, name='run', **kwargs):
cmd = ctx.options.command_name.split()
cmd += [
'--directory',
ctx.checkout_root,
'--loglevel',
'debug',
'presubmit',
'--output-directory',
ctx.root,
]
if ctx.options.continue_after_build_error:
cmd.append('--continue-after-build-error')
cmd.extend(args)
timeout = self._step_timeout()
with self.m.step.nest('timeout {}'.format(nice_duration(timeout))):
pass
if self.m.resultdb.enabled:
return self.m.step(
name,
self.m.resultdb.wrap(
cmd,
base_variant={
'builder': self.m.buildbucket.builder_name,
'step': name,
},
include=True,
),
timeout=timeout,
**kwargs,
)
else:
return self.m.step(name, cmd, timeout=timeout, **kwargs)
def _process_metadata(self, step):
if not step.export_dir:
return # pragma: no cover
for name, test_data in METADATA.items():
step.metadata.setdefault(name, {})
json_path = step.export_dir.join(f'{name}.json')
self.m.path.mock_add_file(json_path)
if self.m.path.isfile(json_path):
with self.m.step.nest(self.m.path.basename(json_path)):
step.metadata[name] = self.m.file.read_json(
'read', json_path, test_data=dict(test_data)
)
def run(self, ctx, step, env=None, log_dir=None):
with self.m.step.nest(step.name) as pres:
args = []
if ctx.options.only_on_changed_files:
args.extend(('--base', 'HEAD~1'))
elif not ctx.options.do_not_use_full_argument:
args.append('--full')
args.extend(('--step', step.name))
if env and env.override_gn_args:
for key, value in env.override_gn_args.items():
args.append('--override-gn-arg')
if isinstance(value, str):
args.append(f'{key}="{value}"')
else:
args.append(f'{key}={value!r}')
with self.m.step.defer_results():
self._run(ctx, args, name=step.name)
if log_dir:
step_log_dir = log_dir.join(step.name)
else:
log_dir = step.export_dir
if step.export_dir:
self.m.file.ensure_directory(
'mkdir {}'.format(ctx.options.export_dir_name),
step.export_dir,
)
if log_dir and log_dir != step.export_dir:
self.m.file.ensure_directory('create log dir', log_dir)
self.m.build.save_logs(step.dir, log_dir)
self._process_metadata(step)
def build_id(self, ctx):
command = ctx.options.command_name.split()
command.extend(['--directory', ctx.checkout_root, 'build-id'])
step_data = self.m.step(
'get build id',
command,
stdout=self.m.raw_io.output_text(),
step_test_data=lambda: self.m.raw_io.test_api.stream_output_text(
'123-1234567890'
),
ok_ret='any',
)
namespace = None
if step_data.exc_result.retcode == 0:
namespace = step_data.stdout.strip()
if namespace == '0':
namespace = None
return namespace