blob: 9af00b23ff39ab8d1d624375a8184f3d38520d92 [file] [log] [blame]
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Calls to build code."""
import attr
from recipe_engine import recipe_api
@attr.s
class Context:
_api = attr.ib()
checkout_root = attr.ib()
root = attr.ib()
options = attr.ib()
class BuildApi(recipe_api.RecipeApi):
"""Calls to build code."""
CAS_DIGEST_PROPERTY_NAME = 'cas_build_digest'
def create(self, checkout_root, options, root=None):
if not root:
root = checkout_root / 'out'
return Context(self.m, checkout_root, root, options)
def __call__(self, ctx):
self.install_packages(ctx)
self.gn_gen(ctx)
self.ninja(ctx)
def install_packages(self, ctx):
if not ctx.options.packages:
return
with self.m.step.nest('install packages'):
cmd = ['python', '-m', 'pw_cli', 'package', 'install']
for package in ctx.options.packages:
self.m.step(package, cmd + [package])
def gn_gen(self, ctx):
cmd = ['gn', 'gen']
for gn_arg in ctx.options.gn_args:
cmd.append(f'--args={gn_arg}')
# Infrequently needed but harmless to always add this.
cmd.append('--export-compile-commands')
cmd.append(ctx.root)
with self.m.context(cwd=ctx.checkout_root):
self.m.step('gn gen', cmd)
def get_gn_args(self, ctx, test_data=None):
context_kwargs = {'cwd': ctx.checkout_root} if ctx.checkout_root else {}
with self.m.context(**context_kwargs):
cmd = ['gn', 'args', ctx.root, '--list', '--json']
args = self.m.step(
'all gn args',
cmd,
stdout=self.m.json.output(),
step_test_data=lambda: self.m.json.test_api.output_stream(
test_data or []
),
).stdout
return {x['name']: x for x in args or ()}
def ninja(self, ctx):
cmd = ['ninja', '-C', ctx.root]
cmd.extend(ctx.options.ninja_targets)
with self.m.default_timeout():
self.m.step('ninja', cmd)
def archive_to_cas(self, ctx):
# TODO(b/234879756) Only archive necessary files.
with self.m.step.nest('archive to cas') as pres:
digest = self.m.cas.archive('archive', ctx.root, ctx.root)
pres.properties[self.CAS_DIGEST_PROPERTY_NAME] = digest
return digest
def download_from_cas(self, ctx, digest):
return self.m.cas.download('download from cas', digest, ctx.root)
def log_longest_build_steps(self, ninja_log):
"""Parse the build log and log the longest-running build steps."""
steps = []
for line in ninja_log.splitlines():
try:
start_ms, end_ms, _, name, _ = line.split()
duration = (int(end_ms) - int(start_ms)) / 1000.0
steps.append((duration, name))
except (ValueError, TypeError):
# This processing is best-effort and should never be the cause
# of a build failure. In case there's something wrong with this
# logfile silently ignore the error--in that case it's very
# likely something else also went wrong and that should be the
# error presented to the user.
pass
steps.sort(reverse=True)
if steps:
with self.m.step.nest('longest build steps'):
for dur, name in steps[0:10]:
with self.m.step.nest(name) as pres:
pres.step_summary_text = f'{dur:.1f}s'
def save_logs(self, build_dir=None, export_dir=None, pres=None):
"""Save common build logs from the build directory.
Read common build logs so they appear in logdog and if export_dir is
set copy these logs there. If there's a ninja log call
log_longest_build_steps() on it.
"""
globs = [
'*.gn',
'*.log',
'*.json',
'*.compdb',
'*.graph',
'*_log',
]
self.m.path.mock_add_file(build_dir / '.ninja_log')
self.m.path.mock_add_file(build_dir / 'failure-summary.log')
self.m.path.mock_add_file(build_dir / 'links.json')
found_files = []
with self.m.step.nest('logs'):
with self.m.step.nest('glob'):
for glob in globs:
test_data = []
if glob == '*.log':
test_data = [
'.ninja_log',
'failure-summary.log',
'links.json',
]
found_files.extend(
self.m.file.glob_paths(
glob,
build_dir,
glob,
include_hidden=True,
test_data=test_data,
)
)
# Read these files and discard them so contents will be in logdog.
ninja_log = None
failure_summary_log = None
for path in sorted(found_files):
if not self.m.path.isfile(path):
continue # pragma: no cover
name = self.m.path.basename(path)
test_data = ''
if name == '.ninja_log':
test_data = (
'2000 5000 0 medium 0\n'
'3000 8000 0 long 0\n'
'malformed line\n'
'4000 5000 0 short 0\n'
'5000 x 0 malformed-end-time 0\n'
)
elif name == 'failure-summary.log':
test_data = '[5/10] foo.c\nerror: ???\n'
elif name == 'links.json':
test_data = [
{'description': 'description', 'url': 'https://url',},
]
# No need to defer results here, but since some callers will be
# deferring results and others won't this makes it so we always
# need to call .get_result().
with self.m.step.defer_results():
if name.endswith('.json'):
contents = self.m.file.read_json(
name, path, test_data=test_data
)
else:
contents = self.m.file.read_text(
name, path, test_data=test_data
)
# 'ninja.log' won't show up except in projects that haven't
# rolled pwrev/114792.
# TODO(mohrr) Remove ninja.log reference.
if name in ('.ninja_log', 'ninja.log'):
ninja_log = contents.get_result()
elif name in (
'failure-summary.log',
'ninja-failure-summary.log',
):
failure_summary_log = contents.get_result()
elif name == 'links.json':
if pres:
for entry in contents.get_result():
pres.links[entry['description']] = entry['url']
if failure_summary_log:
with self.m.step.nest('failure summary') as fail_pres:
fail_pres.step_summary_text = self.m.buildbucket_util.summary_message(
failure_summary_log,
'(truncated, see "full contents" for details)',
)
fail_pres.status = 'FAILURE'
fail_pres.logs['full contents'] = failure_summary_log
if ninja_log:
self.log_longest_build_steps(ninja_log)
if export_dir and found_files:
log_dir = export_dir / 'build_logs'
self.m.file.ensure_directory('mkdir build_logs', log_dir)
with self.m.step.nest('copy'):
for path in found_files:
name = self.m.path.basename(path)
self.m.file.copy(name, path, log_dir / name)