blob: af235183e7c517d203eabeb6e5b263829c0fc4e7 [file] [log] [blame]
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Recipe for testing Pigweed using presubmit_checks.py script."""
import datetime
from PB.go.chromium.org.luci.buildbucket.proto import common
from PB.recipes.pigweed.pw_presubmit import InputProperties
from PB.recipe_engine import result
DEPS = [
'fuchsia/gsutil',
'fuchsia/status_check',
'pigweed/checkout',
'pigweed/environment',
'pigweed/pw_presubmit',
'pigweed/util',
'recipe_engine/file',
'recipe_engine/futures',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/time',
]
PROPERTIES = InputProperties
# The path to a public key used to sign release builds. Only set on release
# builders.
RELEASE_PUBKEY_PATH = '/etc/release_keys/release_key_pub.pem'
# The name of the public key file uploaded in release builds.
RELEASE_PUBKEY_FILENAME = 'publickey.pem'
def _try_sign_archive(api, archive_path, name):
cmd = [
'vpython',
'-vpython-spec',
api.resource('sign.py.vpython'),
'-u',
api.resource('sign.py'),
'--archive-file',
archive_path,
]
return api.step(
'sign {}'.format(name), cmd, stdout=api.raw_io.output_text(),
).stdout
def RunSteps(api, props):
"""Run Pigweed presubmit checks."""
gcs_bucket = props.gcs_bucket
checkout = api.checkout(props.checkout_options)
root = checkout.root
env = api.environment.init(root, props.environment_options)
with env():
presubmit = api.pw_presubmit.init(root, props.pw_presubmit_options)
for change in checkout.changes:
if 'build-errors: continue' in change.commit_message.lower():
presubmit.options.continue_after_build_error = True
with api.step.defer_results():
for step in presubmit.steps:
api.pw_presubmit.run(ctx=presubmit, step=step, env=env)
metadata = {}
steps_with_metadata = set()
for step in presubmit.steps:
for metadata_type, data in step.metadata.items():
metadata.setdefault(metadata_type, {})
for key, value in data.items():
steps_with_metadata.add(step.name)
metadata[metadata_type][f'{step.name}.{key}'] = value
if metadata:
# When only one step has run, change metadata output like the following:
#
# "binary_sizes": {
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# Into the following:
#
# "binary_sizes": {
# "foo": 123,
# "bar": 456,
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# This is because we might be comparing size outputs from steps with
# different names.
#
# Keep the step-prefixed values as well because those will persist if
# additional steps are added to the builder.
if len(steps_with_metadata) == 1:
for data in metadata.values():
for key in set(data.keys()):
data[key.split('.', 1)[1]] = data[key]
with api.step.nest('metadata') as pres:
for name, data in metadata.items():
pres.properties[name] = data
if gcs_bucket:
uploaded_public_key = False
with api.step.nest('upload') as pres:
with env():
namespace = api.pw_presubmit.build_id(presubmit)
checkout_dir = api.path['start_dir'].join('checkout_upload')
checkout.snapshot_to_dir(checkout_dir)
futures = [
api.futures.spawn(
api.gsutil.upload_namespaced_directory,
source=checkout_dir,
bucket=gcs_bucket,
subpath='checkout',
namespace=namespace,
)
]
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=api.json.input(api.util.build_metadata()),
bucket=gcs_bucket,
subpath='build_metadata.json',
namespace=namespace,
)
)
for step in presubmit.steps:
if not step.export_dir:
continue # pragma: no cover
# In testing this will never be true because of the
# mock_add_file() call for binary_sizes.json.
if not api.path.exists(step.export_dir):
continue # pragma: no cover
for entry in api.file.listdir(
'ls {}/{}'.format(
step.name, presubmit.options.export_dir_name,
),
step.export_dir,
recursive=True,
):
metadata = None
ext = api.path.splitext(entry)[1]
if ext in props.extensions_to_sign:
signature = _try_sign_archive(
api,
entry,
name=api.path.relpath(entry, presubmit.root),
)
if signature:
metadata = {
"x-goog-meta-signature": signature,
}
if not uploaded_public_key:
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=RELEASE_PUBKEY_PATH,
bucket=gcs_bucket,
subpath=RELEASE_PUBKEY_FILENAME,
namespace=namespace,
)
)
uploaded_public_key = True
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=entry,
bucket=gcs_bucket,
subpath='{}/{}'.format(
step.name,
api.path.relpath(entry, step.export_dir),
),
namespace=namespace,
metadata=metadata,
)
)
# Need to wait for results but don't care about their values.
_ = [f.result() for f in futures]
# This file tells other users of the bucket that the upload is
# complete.
api.gsutil.upload_namespaced_file(
source=api.raw_io.input(''),
bucket=gcs_bucket,
subpath='upload_complete',
namespace=namespace,
)
browse_link = api.gsutil.namespaced_directory_url(gcs_bucket)
pres.links['browse'] = browse_link
return result.RawResult(
summary_markdown='[artifacts]({})'.format(browse_link),
status=common.SUCCESS,
)
def GenTests(api): # pylint: disable=invalid-name
"""Create tests."""
def ls_export(step_name, *files):
return api.path.exists(
api.path['start_dir'].join('presubmit', step_name, 'export')
) + api.step_data(
'upload.ls {}/export'.format(step_name), api.file.listdir(files),
)
def signature(step_name, filename):
return api.step_data(
'upload.sign {}/export/{}'.format(step_name, filename),
stdout=api.raw_io.output_text('John Hancock'),
)
def properties(*, gcs_bucket=None, extensions_to_sign=None, **kwargs):
new_kwargs = api.checkout.git_properties()
new_kwargs['pw_presubmit_options'] = {'export_dir_name': 'export'}
if gcs_bucket:
new_kwargs['gcs_bucket'] = gcs_bucket
if extensions_to_sign:
new_kwargs['extensions_to_sign'] = extensions_to_sign
new_kwargs['pw_presubmit_options'].update(kwargs)
return api.properties(**new_kwargs)
yield (
api.status_check.test('step')
+ properties(step=['step1', 'step2'], gcs_bucket='bucket')
+ api.checkout.try_test_data(
start_time=datetime.datetime.utcfromtimestamp(1600000000),
execution_timeout=120,
)
+ api.checkout.cl_branch_parents(message='Build-Errors: continue')
+ api.step_data('upload.get build id', retcode=1)
+ ls_export('step1', 'foo')
+ api.time.seed(1600000000)
+ api.time.step(20.0)
)
yield (
api.status_check.test('sign')
+ properties(
step=['release'], gcs_bucket='bucket', extensions_to_sign=['.foo'],
)
+ api.checkout.ci_test_data()
+ ls_export('release', '1.foo', '2.bar')
+ signature('release', '1.foo')
)