blob: e52798b0202c0cd0878ee6539c1a33a45a0becee [file] [log] [blame]
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Calls to build code."""
from recipe_engine import recipe_api
class BuildApi(recipe_api.RecipeApi):
"""Calls to build code."""
CAS_DIGEST_PROPERTY_NAME = 'cas_build_digest'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dir = None
def initialize(self):
self.dir = self.m.path['start_dir'].join('build')
def gn_gen(self, checkout_dir, options):
cmd = ['gn', 'gen']
for gn_arg in options.gn_args:
cmd.append('--args={}'.format(gn_arg))
# Infrequently needed but harmless to always add this.
cmd.append('--export-compile-commands')
cmd.append(self.dir)
with self.m.context(cwd=checkout_dir):
self.m.step('gn gen', cmd)
def get_gn_args(self, checkout_root=None, test_data=None):
context_kwargs = {'cwd': checkout_root} if checkout_root else {}
with self.m.context(**context_kwargs):
cmd = ['gn', 'args', self.dir, '--list', '--json']
args = self.m.step(
'all gn args',
cmd,
stdout=self.m.json.output(),
step_test_data=lambda: self.m.json.test_api.output_stream(
test_data or []
),
).stdout
return {x['name']: x for x in args or ()}
def ninja(self, options):
cmd = ['ninja', '-C', self.dir]
cmd.extend(options.ninja_targets)
with self.m.default_timeout():
self.m.step('ninja', cmd)
def __call__(self, checkout_dir, options):
self.gn_gen(checkout_dir, options)
self.ninja(options)
def archive_to_cas(self):
# TODO(b/234879756) Only archive necessary files.
with self.m.step.nest('archive to cas') as pres:
digest = self.m.cas.archive('archive', self.dir, self.dir)
pres.properties[self.CAS_DIGEST_PROPERTY_NAME] = digest
def download_from_cas(self, digest):
return self.m.cas.download('download from cas', digest, self.dir)
def log_longest_build_steps(self, ninja_log):
"""Parse the build log and log the longest-running build steps."""
steps = []
for line in ninja_log.splitlines():
try:
start_ms, end_ms, _, name, _ = line.split()
duration = (int(end_ms) - int(start_ms)) / 1000.0
steps.append((duration, name))
except (ValueError, TypeError):
# This processing is best-effort and should never be the cause
# of a build failure. In case there's something wrong with this
# logfile silently ignore the error--in that case it's very
# likely something else also went wrong and that should be the
# error presented to the user.
pass
steps.sort(reverse=True)
if steps:
with self.m.step.nest('longest build steps'):
for dur, name in steps[0:10]:
with self.m.step.nest(name) as pres:
pres.step_summary_text = '{:.1f}s'.format(dur)
def save_logs(self, build_dir=None, export_dir=None, pres=None):
"""Save common build logs from the build directory.
Read common build logs so they appear in logdog and if export_dir is
set copy these logs there. If there's a ninja log call
log_longest_build_steps() on it.
"""
if build_dir is None:
build_dir = self.dir
globs = [
'*.gn',
'*.log',
'*.json',
'*.compdb',
'*.graph',
'*_log',
]
self.m.path.mock_add_file(build_dir.join('.ninja_log'))
self.m.path.mock_add_file(build_dir.join('failure-summary.log'))
self.m.path.mock_add_file(build_dir.join('links.json'))
found_files = []
with self.m.step.nest('logs'):
with self.m.step.nest('glob'):
for glob in globs:
test_data = []
if glob == '*.log':
test_data = [
'.ninja_log',
'failure-summary.log',
'links.json',
]
found_files.extend(
self.m.file.glob_paths(
glob,
build_dir,
glob,
include_hidden=True,
test_data=test_data,
)
)
# Read these files and discard them so contents will be in logdog.
ninja_log = None
failure_summary_log = None
for path in sorted(found_files):
if not self.m.path.isfile(path):
continue # pragma: no cover
name = self.m.path.basename(path)
test_data = ''
if name == '.ninja_log':
test_data = (
'2000 5000 0 medium 0\n'
'3000 8000 0 long 0\n'
'malformed line\n'
'4000 5000 0 short 0\n'
'5000 x 0 malformed-end-time 0\n'
)
elif name == 'failure-summary.log':
test_data = '[5/10] foo.c\nerror: ???\n'
elif name == 'links.json':
test_data = [
{'description': 'description', 'url': 'https://url',},
]
# No need to defer results here, but since some callers will be
# deferring results and others won't this makes it so we always
# need to call .get_result().
with self.m.step.defer_results():
if name.endswith('.json'):
contents = self.m.file.read_json(
name, path, test_data=test_data
)
else:
contents = self.m.file.read_text(
name, path, test_data=test_data
)
# 'ninja.log' won't show up except in projects that haven't
# rolled pwrev/114792.
# TODO(mohrr) Remove ninja.log reference.
if name in ('.ninja_log', 'ninja.log'):
ninja_log = contents.get_result()
elif name in (
'failure-summary.log',
'ninja-failure-summary.log',
):
failure_summary_log = contents.get_result()
elif name == 'links.json':
if pres:
for entry in contents.get_result():
pres.links[entry['description']] = entry['url']
if failure_summary_log:
with self.m.step.nest('failure summary') as fail_pres:
fail_pres.step_summary_text = self.m.buildbucket_util.summary_message(
failure_summary_log,
'(truncated, see "full contents" for details)',
)
fail_pres.status = 'FAILURE'
fail_pres.logs['full contents'] = failure_summary_log
if ninja_log:
self.log_longest_build_steps(ninja_log)
if export_dir and found_files:
log_dir = export_dir.join('build_logs')
self.m.file.ensure_directory('mkdir build_logs', log_dir)
with self.m.step.nest('copy'):
for path in found_files:
name = self.m.path.basename(path)
self.m.file.copy(name, path, log_dir.join(name))