blob: 919ec65612aadf37a77cd15075f6701c6fb804cf [file] [log] [blame]
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Recipe for testing Pigweed using presubmit_checks.py script."""
import datetime
import re
from typing import Generator
from PB.go.chromium.org.luci.buildbucket.proto import common
from PB.recipes.pigweed.pw_presubmit import InputProperties, StepName
from PB.recipe_engine import result
from recipe_engine import (
config_types,
post_process,
recipe_api,
recipe_test_api,
)
DEPS = [
'fuchsia/gsutil',
'pigweed/checkout',
'pigweed/ci_status',
'pigweed/environment',
'pigweed/pw_presubmit',
'pigweed/util',
'recipe_engine/cq',
'recipe_engine/cv',
'recipe_engine/defer',
'recipe_engine/file',
'recipe_engine/futures',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/time',
]
PROPERTIES = InputProperties
# The path to a public key used to sign release builds. Only set on release
# builders.
RELEASE_PUBKEY_PATH = '/etc/release_keys/release_key_pub.pem'
# The name of the public key file uploaded in release builds.
RELEASE_PUBKEY_FILENAME = 'publickey.pem'
def _try_sign_archive(
api: recipe_api.RecipeScriptApi,
archive_path: config_types.Path,
name: str,
) -> str:
cmd: list[str | config_types.Path] = [
'vpython3',
'-vpython-spec',
api.resource('sign.py.vpython'),
'-u',
api.resource('sign.py'),
'--archive-file',
archive_path,
]
return api.step(
f'sign {name}',
cmd,
stdout=api.raw_io.output_text(),
).stdout
def RunSteps(
api: recipe_api.RecipeScriptApi,
props: InputProperties,
) -> result.RawResult | None:
"""Run Pigweed presubmit checks."""
gcs_bucket = props.gcs_bucket
if res := api.ci_status.exit_early_in_recipe_testing_if_failing():
return res # pragma: no cover
checkout = api.checkout(props.checkout_options)
env = api.environment.init(checkout, props.environment_options)
with env():
presubmit = api.pw_presubmit.init(checkout, props.pw_presubmit_options)
for change in checkout.changes:
if 'build-errors: continue' in change.commit_message.lower():
presubmit.options.continue_after_build_error = True
with api.defer.context() as defer:
for step in presubmit.steps:
defer(api.pw_presubmit.run, ctx=presubmit, step=step, env=env)
metadata = {}
steps_with_metadata = set()
for step in presubmit.steps:
for metadata_type, data in step.metadata.items():
metadata.setdefault(metadata_type, {})
for key, value in data.items():
steps_with_metadata.add(step.name)
metadata[metadata_type][f'{step.name}.{key}'] = value
if metadata:
# Change metadata output like the following:
#
# "binary_sizes": {
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# For (STEP_NAME_DEFAULT and one step) or WITH_WITHOUT_STEP_NAME:
#
# "binary_sizes": {
# "foo": 123,
# "bar": 456,
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# For ONLY_WITHOUT_STEP_NAME:
#
# "binary_sizes": {
# "foo": 123,
# "bar": 456,
# }
#
# For (STEP_NAME_DEFAULT and multiple steps) or ONLY_WITH_STEP_NAME
# (unchanged):
#
# "binary_sizes": {
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# These options exist because we might be comparing size outputs from
# steps with different names, or we might want soft transitions from
# one step name to another.
with api.step.nest('metadata') as pres:
step_usage = StepName.Name(props.metadata_step_name_usage)
if step_usage == 'STEP_NAME_DEFAULT':
if len(steps_with_metadata) == 1:
step_usage = 'WITH_WITHOUT_STEP_NAME'
else:
step_usage = 'ONLY_WITH_STEP_NAME'
for data in metadata.values():
for key in set(data.keys()):
if step_usage == 'WITH_WITHOUT_STEP_NAME':
# Need both 'foo' and 'step1.foo'.
data[key.split('.', 1)[1]] = data[key]
elif step_usage == 'ONLY_WITH_STEP_NAME':
# Only need 'step1.foo', good as is.
pass
elif step_usage == 'ONLY_WITHOUT_STEP_NAME':
# Only need 'foo', need to delete 'step1.foo'.
data[key.split('.', 1)[1]] = data[key]
del data[key]
else:
raise ValueError(str(step_usage)) # pragma: no cover
for name, data in metadata.items():
pres.properties[name] = data
if gcs_bucket:
uploaded_public_key = False
with api.step.nest('upload') as pres:
with env():
namespace = api.pw_presubmit.build_id(presubmit)
checkout_dir = api.path.start_dir / 'checkout_upload'
checkout.snapshot_to_dir(checkout_dir)
futures = [
api.futures.spawn(
api.gsutil.upload_namespaced_directory,
source=checkout_dir,
bucket=gcs_bucket,
subpath='checkout',
namespace=namespace,
)
]
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=api.json.input(api.util.build_metadata()),
bucket=gcs_bucket,
subpath='build_metadata.json',
namespace=namespace,
)
)
for step in presubmit.steps:
if not step.export_dir:
continue # pragma: no cover
# In testing this will never be true because of the
# mock_add_file() call for binary_sizes.json.
if not api.path.exists(step.export_dir):
continue # pragma: no cover
for entry in api.file.listdir(
f'ls {step.name}/{presubmit.options.export_dir_name}',
step.export_dir,
recursive=True,
):
metadata = None
ext = api.path.splitext(entry)[1]
if ext in props.extensions_to_sign:
signature = _try_sign_archive(
api,
entry,
name=api.path.relpath(entry, presubmit.root),
)
if signature:
metadata = {
"x-goog-meta-signature": signature,
}
if not uploaded_public_key:
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=RELEASE_PUBKEY_PATH,
bucket=gcs_bucket,
subpath=RELEASE_PUBKEY_FILENAME,
namespace=namespace,
)
)
uploaded_public_key = True
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=entry,
bucket=gcs_bucket,
subpath='{}/{}'.format(
step.name,
api.path.relpath(entry, step.export_dir),
),
namespace=namespace,
metadata=metadata,
)
)
# Need to wait for results but don't care about their values.
_ = [f.result() for f in futures]
# This file tells other users of the bucket that the upload is
# complete.
api.gsutil.upload_namespaced_file(
source=api.raw_io.input(''),
bucket=gcs_bucket,
subpath='upload_complete',
namespace=namespace,
)
browse_link = api.gsutil.namespaced_directory_url(gcs_bucket)
pres.links['browse'] = browse_link
return result.RawResult(
summary_markdown=f'[artifacts]({browse_link})',
status=common.SUCCESS,
)
def GenTests(api) -> Generator[recipe_test_api.TestData, None, None]:
"""Create tests."""
def ls_export(step_name, *files):
return api.path.exists(
api.path.start_dir / 'presubmit' / step_name / 'export'
) + api.step_data(
f'upload.ls {step_name}/export',
api.file.listdir(files),
)
def signature(step_name, filename):
return api.step_data(
f'upload.sign {step_name}/export/{filename}',
stdout=api.raw_io.output_text('John Hancock'),
)
def properties(
*,
num_ci_failures_to_trigger_exiting_early=0,
extensions_to_sign=('.out',),
gcs_bucket=None,
metadata_step_name_usage=None,
**kwargs,
):
props = InputProperties()
props.checkout_options.CopyFrom(api.checkout.git_options())
props.pw_presubmit_options.CopyFrom(api.pw_presubmit.options(**kwargs))
if metadata_step_name_usage:
props.metadata_step_name_usage = StepName.Value(
metadata_step_name_usage
)
props.extensions_to_sign.extend(extensions_to_sign)
props.num_ci_failures_to_trigger_exiting_early = (
num_ci_failures_to_trigger_exiting_early
)
if gcs_bucket:
props.gcs_bucket = gcs_bucket
return api.properties(props)
def ran(x):
return api.post_process(post_process.MustRun, x)
def drop_expectations_must_be_last():
return api.post_process(post_process.DropExpectation)
yield (
api.test('one_step_no_exit_passing_in_ci')
+ properties(step=['step1'])
+ api.checkout.try_test_data()
+ api.cv(run_mode=api.cq.DRY_RUN)
+ ran('step1')
+ drop_expectations_must_be_last()
)
yield (
api.test('one_step_no_exit_not_tryjob')
+ properties(step=['step1'])
+ api.checkout.ci_test_data()
+ api.cv(run_mode=api.cv.DRY_RUN)
+ ran('step1')
+ drop_expectations_must_be_last()
)
yield (
api.test('one_step_no_exit_not_in_cv')
+ properties(step=['step1'])
+ api.checkout.try_test_data()
+ ran('step1')
+ drop_expectations_must_be_last()
)
yield (
api.test('two_steps')
+ properties(step=['step1', 'step2'], gcs_bucket='bucket')
+ api.checkout.try_test_data(
start_time=datetime.datetime.utcfromtimestamp(1600000000),
execution_timeout=120,
)
+ api.checkout.cl_branch_parents(message='Build-Errors: continue')
+ api.step_data('upload.get build id', retcode=1)
+ ls_export('step1', 'foo')
+ api.time.seed(1600000000)
+ api.time.step(20.0)
+ ran('step1')
+ ran('step2')
+ ran('upload')
+ drop_expectations_must_be_last()
)
yield (
api.test('sign')
+ properties(
step=['release'],
gcs_bucket='bucket',
extensions_to_sign=['.foo'],
metadata_step_name_usage='ONLY_WITHOUT_STEP_NAME',
)
+ api.checkout.ci_test_data()
+ ls_export('release', '1.foo', '2.bar')
+ signature('release', '1.foo')
+ ran('release')
+ ran('upload.sign release/export/1.foo')
+ drop_expectations_must_be_last()
)