| # Copyright 2023 The Pigweed Authors |
| # |
| # Licensed under the Apache License, Version 2.0 (the "License"); you may not |
| # use this file except in compliance with the License. You may obtain a copy of |
| # the License at |
| # |
| # https://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
| # License for the specific language governing permissions and limitations under |
| # the License. |
| """Utilities for saving logs.""" |
| |
| from typing import List, Optional, Set, Tuple |
| |
| from recipe_engine import config_types, engine_types, recipe_api |
| |
| |
| class SaveLogsApi(recipe_api.RecipeApi): |
| """Utilities for saving logs.""" |
| |
| def __call__( |
| self, |
| dirs: config_types.Path, |
| export_dir: config_types.Path = None, |
| pres: Optional[engine_types.StepPresentation] = None, |
| ) -> None: |
| """Save common build logs from the build directory. |
| |
| Read common build logs so they appear in logdog and if export_dir is |
| set copy these logs there. If there's a ninja log call |
| log_longest_build_steps() on it. |
| """ |
| |
| globs: List[str] = [ |
| '**/pip_install_log.txt', |
| '*.bat', |
| '*.compdb', |
| '*.gn', |
| '*.graph', |
| '*.json', |
| '*.log', |
| '*.sh', |
| '*.stderr', |
| '*.stdout', |
| '*.txt', |
| '*/*.cfg', |
| '*/*.ensure', |
| '*/*.json', |
| '*/*.log', |
| '*/*.txt', |
| '*_log', |
| 'pigweed_environment.gni', |
| 'coverage_reports/*.tar.gz', |
| ] |
| self.m.path.mock_add_file(dirs[0] / '.ninja_log') |
| self.m.path.mock_add_file(dirs[0] / 'coverage_reports' / 'foo.tar.gz') |
| self.m.path.mock_add_file(dirs[0] / 'failure-summary.log') |
| self.m.path.mock_add_file(dirs[0] / 'links.json') |
| |
| found_files: Set[config_types.Path] = set() |
| |
| with self.m.step.nest('logs'): |
| with self.m.step.nest('glob'): |
| for glob in globs: |
| test_data: List[str] = [] |
| if glob == '*.log': |
| test_data = [ |
| '.ninja_log', |
| 'coverage_reports/foo.tar.gz', |
| 'failure-summary.log', |
| 'links.json', |
| 'links.json', |
| 'CMakeCache.txt', |
| ] |
| |
| for dir in dirs: |
| found_files.update( |
| self.m.file.glob_paths( |
| glob, |
| dir, |
| glob, |
| include_hidden=True, |
| test_data=test_data, |
| ) |
| ) |
| |
| def ignore(path: config_types.Path): |
| ignored_names: Set[str] = set(['CMakeCache.txt']) |
| return self.m.path.basename(path) in ignored_names |
| |
| found_files: Set[config_types.Path] = set( |
| x for x in found_files if not ignore(x) |
| ) |
| |
| # Read these files and discard them so contents will be in logdog. |
| ninja_log: Optional[str] = None |
| failure_summary_log: Optional[str] = None |
| for path in sorted(found_files): |
| if not self.m.path.isfile(path): |
| continue # pragma: no cover |
| names: List[str] = [ |
| self.m.path.relpath(path, dir) for dir in dirs |
| ] |
| name: str = max(names, key=len) |
| |
| test_data = '' |
| if name == '.ninja_log': |
| test_data = ( |
| '2000 5000 0 medium 0\n' |
| '3000 8000 0 long 0\n' |
| 'malformed line\n' |
| '4000 5000 0 short 0\n' |
| '5000 x 0 malformed-end-time 0\n' |
| ) |
| |
| elif name == 'failure-summary.log': |
| test_data = '[5/10] foo.c\nerror: ???\n' |
| |
| elif name == 'links.json': |
| test_data = [ |
| {'description': 'description', 'url': 'https://url',}, |
| ] |
| |
| # No need to defer results here, but since some callers will be |
| # deferring results and others won't this makes it so we always |
| # need to call .get_result(). |
| with self.m.step.defer_results(): |
| if name.endswith('.json'): |
| read_func = self.m.file.read_json |
| elif name.endswith(('.gz', '.bz2')): |
| read_func = self.m.file.read_raw |
| else: |
| read_func = self.m.file.read_text |
| |
| contents = read_func(name, path, test_data=test_data) |
| |
| if name == '.ninja_log': |
| ninja_log = contents.get_result() |
| elif name in ( |
| 'failure-summary.log', |
| 'ninja-failure-summary.log', |
| ): |
| failure_summary_log = contents.get_result() |
| elif name == 'links.json': |
| if pres: |
| for entry in contents.get_result(): |
| pres.links[entry['description']] = entry['url'] |
| |
| if failure_summary_log: |
| with self.m.step.nest('failure summary') as fail_pres: |
| fail_pres.step_summary_text = self.m.buildbucket_util.summary_message( |
| failure_summary_log, |
| '(truncated, see "full contents" for details)', |
| ) |
| fail_pres.status = 'FAILURE' |
| fail_pres.logs['full contents'] = failure_summary_log |
| |
| if ninja_log: |
| self.log_longest_build_steps(ninja_log) |
| |
| if export_dir and found_files: |
| log_dir: config_types.Path = export_dir / 'build_logs' |
| self.m.file.ensure_directory('mkdir build_logs', log_dir) |
| with self.m.step.nest('copy'): |
| for path in sorted(found_files): |
| name: str = self.m.path.basename(path) |
| self.m.file.copy(name, path, log_dir / name) |
| |
| def log_longest_build_steps(self, ninja_log): |
| """Parse the build log and log the longest-running build steps.""" |
| steps: List[Tuple[int, str]] = [] |
| for line in ninja_log.splitlines(): |
| try: |
| start_ms, end_ms, _, name, _ = line.split() |
| duration = (int(end_ms) - int(start_ms)) / 1000.0 |
| steps.append((duration, name)) |
| except (ValueError, TypeError): |
| # This processing is best-effort and should never be the cause |
| # of a build failure. In case there's something wrong with this |
| # logfile silently ignore the error--in that case it's very |
| # likely something else also went wrong and that should be the |
| # error presented to the user. |
| pass |
| |
| steps.sort(reverse=True) |
| |
| if steps: |
| with self.m.step.nest('longest build steps'): |
| for dur, name in steps[0:10]: |
| with self.m.step.nest(name) as pres: |
| pres.step_summary_text = f'{dur:.1f}s' |