blob: fb28031d6946579f2db4294cc5cec01476ea20c0 [file] [log] [blame]
# Copyright 2023 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Utilities for saving logs."""
from recipe_engine import config_types, engine_types, recipe_api
class PresubmitError(recipe_api.StepFailure):
pass
class SaveLogsApi(recipe_api.RecipeApi):
"""Utilities for saving logs."""
PresubmitError = PresubmitError
def __call__(
self,
dirs: config_types.Path,
export_dir: config_types.Path = None,
pres: engine_types.StepPresentation | None = None,
step_passed: bool = True,
step_name: str | None = None,
) -> None:
"""Save common build logs from the build directory.
Read common build logs so they appear in logdog and if export_dir is
set copy these logs there. If there's a ninja log call
log_longest_build_steps() on it.
"""
globs: list[str] = [
'pip_install_log.txt',
'*/pip_install_log.txt',
'*/*/pip_install_log.txt',
'*.bat',
'*.compdb',
'*.gn',
'*.graph',
'*.json',
'*.log',
'*.sh',
'*.stderr',
'*.stdout',
'*.txt',
'*/*.cfg',
'*/*.ensure',
'*/*.json',
'*/*.log',
'*/*.txt',
'*_log',
'pigweed_environment.gni',
'coverage_reports/*.tar.gz',
]
self.m.path.mock_add_file(dirs[0] / '.ninja_log')
self.m.path.mock_add_file(dirs[0] / 'coverage_reports' / 'foo.tar.gz')
self.m.path.mock_add_file(dirs[0] / 'failure-summary.log')
self.m.path.mock_add_file(dirs[0] / 'foo.log')
self.m.path.mock_add_file(dirs[0] / 'links.json')
found_files: set[config_types.Path] = set()
with self.m.step.nest('logs') as logs_pres:
if not step_passed:
logs_pres.status = 'FAILURE'
with self.m.step.nest('glob'):
for glob in globs:
test_data: list[str] = []
if glob == '*.log':
test_data = [
'.ninja_log',
'coverage_reports/foo.tar.gz',
'failure-summary.log',
'foo.log',
'links.json',
'links.json',
'CMakeCache.txt',
]
for dir in dirs:
try:
with self.m.time.timeout(30):
found_files.update(
self.m.file.glob_paths(
glob,
dir,
glob,
include_hidden=True,
test_data=test_data,
)
)
# Ok to ignore these failures-they're just loading logs.
except self.m.step.InfraFailure: # pragma: no cover
pass
def ignore(path: config_types.Path):
ignored_names: set[str] = set(['CMakeCache.txt'])
return self.m.path.basename(path) in ignored_names
found_files: set[config_types.Path] = set(
x for x in found_files if not ignore(x)
)
# Read these files and discard them so contents will be in logdog.
ninja_log: str | None = None
failure_summary_log: str | None = None
for path in sorted(found_files):
if not self.m.path.isfile(path):
continue # pragma: no cover
names: list[str] = [
self.m.path.relpath(path, dir) for dir in dirs
]
name: str = max(names, key=len)
test_data = ''
if name == '.ninja_log':
test_data = (
'2000 5000 0 medium 0\n'
'3000 8000 0 long 0\n'
'malformed line\n'
'4000 5000 0 short 0\n'
'5000 x 0 malformed-end-time 0\n'
)
elif name == 'failure-summary.log':
test_data = '[5/10] foo.c\nerror: ???\n'
elif name == 'links.json':
test_data = [
{
'description': 'description',
'url': 'https://url',
},
]
# JSON and text could have parse errors. Fall back to raw if
# they fail.
if name.endswith('.json'):
read_funcs = (self.m.file.read_json, self.m.file.read_raw)
elif name.endswith(('.gz', '.bz2')):
read_funcs = (self.m.file.read_raw,)
else:
read_funcs = (self.m.file.read_text, self.m.file.read_raw)
for read_func in read_funcs:
try:
contents = read_func(name, path, test_data=test_data)
break
except Exception:
contents = None
# If we're changing the function to be used, it's likely
# the original test_data will no longer be useful.
test_data = None
if not contents:
continue
if name == '.ninja_log':
ninja_log = contents
elif name in (
'failure-summary.log',
'ninja-failure-summary.log',
):
failure_summary_log = contents
elif name == 'links.json':
if pres:
for entry in contents:
pres.links[entry['description']] = entry['url']
short_failure_summary = None
if failure_summary_log:
with self.m.step.nest('failure summary') as fail_pres:
short_failure_summary = self.m.buildbucket_util.summary_message(
failure_summary_log,
'(truncated, see "full contents" for details)',
)
fail_pres.step_summary_text = short_failure_summary
fail_pres.status = 'FAILURE'
fail_pres.logs['full contents'] = failure_summary_log
if ninja_log:
self.log_longest_build_steps(ninja_log)
if export_dir and found_files:
log_dir: config_types.Path = export_dir / 'build_logs'
self.m.file.ensure_directory('mkdir build_logs', log_dir)
with self.m.step.nest('copy'):
for path in sorted(found_files):
name: str = self.m.path.basename(path)
self.m.file.copy(name, path, log_dir / name)
if not step_passed and short_failure_summary:
if step_name:
raise PresubmitError(
f'{step_name} failed:\n\n{short_failure_summary}'
)
raise PresubmitError(short_failure_summary) # pragma: no cover
def log_longest_build_steps(self, ninja_log: config_types.Path) -> None:
"""Parse the build log and log the longest-running build steps."""
steps: list[tuple[int, str]] = []
for line in ninja_log.splitlines():
try:
start_ms, end_ms, _, name, _ = line.split()
duration = (int(end_ms) - int(start_ms)) / 1000.0
steps.append((duration, name))
except (ValueError, TypeError):
# This processing is best-effort and should never be the cause
# of a build failure. In case there's something wrong with this
# logfile silently ignore the error--in that case it's very
# likely something else also went wrong and that should be the
# error presented to the user.
pass
steps.sort(reverse=True)
if steps:
with self.m.step.nest('longest build steps'):
for dur, name in steps[0:10]:
with self.m.step.nest(name) as pres:
pres.step_summary_text = f'{dur:.1f}s'