| # Copyright 2021 The Pigweed Authors |
| # |
| # Licensed under the Apache License, Version 2.0 (the "License"); you may not |
| # use this file except in compliance with the License. You may obtain a copy of |
| # the License at |
| # |
| # https://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
| # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
| # License for the specific language governing permissions and limitations under |
| # the License. |
| """Calls to build code.""" |
| |
| from recipe_engine import recipe_api |
| |
| |
| class BuildApi(recipe_api.RecipeApi): |
| """Calls to build code.""" |
| |
| CAS_DIGEST_PROPERTY_NAME = 'cas_build_digest' |
| |
| def __init__(self, *args, **kwargs): |
| super().__init__(*args, **kwargs) |
| self.dir = None |
| |
| def initialize(self): |
| self.dir = self.m.path['start_dir'].join('build') |
| |
| def gn_gen(self, checkout_dir, options): |
| cmd = ['gn', 'gen'] |
| |
| for gn_arg in options.gn_args: |
| cmd.append('--args={}'.format(gn_arg)) |
| |
| # Infrequently needed but harmless to always add this. |
| cmd.append('--export-compile-commands') |
| |
| cmd.append(self.dir) |
| |
| with self.m.context(cwd=checkout_dir): |
| self.m.step('gn gen', cmd) |
| |
| def get_gn_args(self, checkout_root=None, test_data=None): |
| context_kwargs = {'cwd': checkout_root} if checkout_root else {} |
| with self.m.context(**context_kwargs): |
| cmd = ['gn', 'args', self.dir, '--list', '--json'] |
| args = self.m.step( |
| 'all gn args', |
| cmd, |
| stdout=self.m.json.output(), |
| step_test_data=lambda: self.m.json.test_api.output_stream( |
| test_data or [] |
| ), |
| ).stdout |
| return {x['name']: x for x in args or ()} |
| |
| def ninja(self, options): |
| cmd = ['ninja', '-C', self.dir] |
| cmd.extend(options.ninja_targets) |
| self.m.step('ninja', cmd) |
| |
| def __call__(self, checkout_dir, options): |
| self.gn_gen(checkout_dir, options) |
| self.ninja(options) |
| |
| def archive_to_cas(self): |
| # TODO(pwbug/389) Only archive necessary files. |
| with self.m.step.nest('archive to cas') as pres: |
| digest = self.m.cas.archive('archive', self.dir, self.dir) |
| pres.properties[self.CAS_DIGEST_PROPERTY_NAME] = digest |
| |
| def download_from_cas(self, digest): |
| return self.m.cas.download('download from cas', digest, self.dir) |
| |
| def log_longest_build_steps(self, ninja_log): |
| """Parse the build log and log the longest-running build steps.""" |
| steps = [] |
| for line in ninja_log.splitlines(): |
| try: |
| start_ms, end_ms, _, name, _ = line.split() |
| duration = (int(end_ms) - int(start_ms)) / 1000.0 |
| steps.append((duration, name)) |
| except (ValueError, TypeError): |
| # This processing is best-effort and should never be the cause |
| # of a build failure. In case there's something wrong with this |
| # logfile silently ignore the error--in that case it's very |
| # likely something else also went wrong and that should be the |
| # error presented to the user. |
| pass |
| |
| steps.sort(reverse=True) |
| |
| if steps: |
| with self.m.step.nest('longest build steps'): |
| for dur, name in steps[0:10]: |
| with self.m.step.nest(name) as pres: |
| pres.step_summary_text = '{:.1f}s'.format(dur) |
| |
| def save_logs(self, build_dir=None, export_dir=None): |
| """Save common build logs from the build directory. |
| |
| Read common build logs so they appear in logdog and if export_dir is |
| set copy these logs there. If there's a ninja log call |
| log_longest_build_steps() on it. |
| """ |
| |
| if build_dir is None: |
| build_dir = self.dir |
| |
| globs = [ |
| '*.gn', |
| '*.log', |
| '*.json', |
| '*.compdb', |
| '*.graph', |
| '*_log', |
| ] |
| self.m.path.mock_add_file(build_dir.join('ninja.log')) |
| |
| found_files = [] |
| |
| # Read these files and discard them so contents will be in logdog. |
| ninja_log = None |
| for glob in globs: |
| test_data = [] |
| if glob == '*.log': |
| test_data = ['ninja.log'] |
| |
| paths = self.m.file.glob_paths( |
| 'glob {}'.format(glob), |
| build_dir, |
| glob, |
| include_hidden=True, |
| test_data=test_data, |
| ) |
| |
| found_files.extend(paths) |
| |
| for path in paths: |
| if not self.m.path.isfile(path): |
| continue # pragma: no cover |
| name = self.m.path.basename(path) |
| |
| test_data = '' |
| if name in ('.ninja_log', 'ninja.log'): |
| test_data = ( |
| '2000 5000 0 medium 0\n' |
| '3000 8000 0 long 0\n' |
| 'malformed line\n' |
| '4000 5000 0 short 0\n' |
| '5000 x 0 malformed-end-time 0\n' |
| ) |
| |
| # No need to defer results here, but since some callers will be |
| # deferring results and others won't this makes it so we always |
| # need to call .get_result(). |
| with self.m.step.defer_results(): |
| contents = self.m.file.read_text( |
| name, path, test_data=test_data |
| ) |
| |
| if name in ('.ninja_log', 'ninja.log'): |
| ninja_log = contents.get_result() |
| |
| if ninja_log: |
| self.log_longest_build_steps(ninja_log) |
| |
| if export_dir and found_files: |
| with self.m.step.nest('copy'): |
| for path in found_files: |
| name = self.m.path.basename(path) |
| self.m.file.copy(name, path, export_dir.join(name)) |