blob: cc69c040a677b87ec1f980612568eb13e24d5e05 [file] [log] [blame]
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Recipe for testing Pigweed using presubmit_checks.py script."""
import datetime
from PB.go.chromium.org.luci.buildbucket.proto import common
from PB.recipes.pigweed.pw_presubmit import InputProperties, StepName
from PB.recipe_engine import result
DEPS = [
'fuchsia/gsutil',
'fuchsia/status_check',
'pigweed/checkout',
'pigweed/environment',
'pigweed/pw_presubmit',
'pigweed/util',
'recipe_engine/file',
'recipe_engine/futures',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/time',
]
PROPERTIES = InputProperties
# The path to a public key used to sign release builds. Only set on release
# builders.
RELEASE_PUBKEY_PATH = '/etc/release_keys/release_key_pub.pem'
# The name of the public key file uploaded in release builds.
RELEASE_PUBKEY_FILENAME = 'publickey.pem'
def _try_sign_archive(api, archive_path, name):
cmd = [
'vpython',
'-vpython-spec',
api.resource('sign.py.vpython'),
'-u',
api.resource('sign.py'),
'--archive-file',
archive_path,
]
return api.step(
f'sign {name}', cmd, stdout=api.raw_io.output_text(),
).stdout
def RunSteps(api, props):
"""Run Pigweed presubmit checks."""
gcs_bucket = props.gcs_bucket
checkout = api.checkout(props.checkout_options)
env = api.environment.init(checkout, props.environment_options)
with env():
presubmit = api.pw_presubmit.init(
checkout.root, props.pw_presubmit_options
)
for change in checkout.changes:
if 'build-errors: continue' in change.commit_message.lower():
presubmit.options.continue_after_build_error = True
with api.step.defer_results():
for step in presubmit.steps:
api.pw_presubmit.run(ctx=presubmit, step=step, env=env)
metadata = {}
steps_with_metadata = set()
for step in presubmit.steps:
for metadata_type, data in step.metadata.items():
metadata.setdefault(metadata_type, {})
for key, value in data.items():
steps_with_metadata.add(step.name)
metadata[metadata_type][f'{step.name}.{key}'] = value
if metadata:
# Change metadata output like the following:
#
# "binary_sizes": {
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# For (STEP_NAME_DEFAULT and one step) or WITH_WITHOUT_STEP_NAME:
#
# "binary_sizes": {
# "foo": 123,
# "bar": 456,
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# For ONLY_WITHOUT_STEP_NAME:
#
# "binary_sizes": {
# "foo": 123,
# "bar": 456,
# }
#
# For (STEP_NAME_DEFAULT and multiple steps) or ONLY_WITH_STEP_NAME
# (unchanged):
#
# "binary_sizes": {
# "step1.foo": 123,
# "step1.bar": 456,
# }
#
# These options exist because we might be comparing size outputs from
# steps with different names, or we might want soft transitions from
# one step name to another.
with api.step.nest('metadata') as pres:
step_usage = StepName.Name(props.metadata_step_name_usage)
if step_usage == 'STEP_NAME_DEFAULT':
if len(steps_with_metadata) == 1:
step_usage = 'WITH_WITHOUT_STEP_NAME'
else:
step_usage = 'ONLY_WITH_STEP_NAME'
for data in metadata.values():
for key in set(data.keys()):
if step_usage == 'WITH_WITHOUT_STEP_NAME':
# Need both 'foo' and 'step1.foo'.
data[key.split('.', 1)[1]] = data[key]
elif step_usage == 'ONLY_WITH_STEP_NAME':
# Only need 'step1.foo', good as is.
pass
elif step_usage == 'ONLY_WITHOUT_STEP_NAME':
# Only need 'foo', need to delete 'step1.foo'.
data[key.split('.', 1)[1]] = data[key]
del data[key]
else:
raise ValueError(str(step_usage)) # pragma: no cover
for name, data in metadata.items():
pres.properties[name] = data
if gcs_bucket:
uploaded_public_key = False
with api.step.nest('upload') as pres:
with env():
namespace = api.pw_presubmit.build_id(presubmit)
checkout_dir = api.path['start_dir'].join('checkout_upload')
checkout.snapshot_to_dir(checkout_dir)
futures = [
api.futures.spawn(
api.gsutil.upload_namespaced_directory,
source=checkout_dir,
bucket=gcs_bucket,
subpath='checkout',
namespace=namespace,
)
]
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=api.json.input(api.util.build_metadata()),
bucket=gcs_bucket,
subpath='build_metadata.json',
namespace=namespace,
)
)
for step in presubmit.steps:
if not step.export_dir:
continue # pragma: no cover
# In testing this will never be true because of the
# mock_add_file() call for binary_sizes.json.
if not api.path.exists(step.export_dir):
continue # pragma: no cover
for entry in api.file.listdir(
f'ls {step.name}/{presubmit.options.export_dir_name}',
step.export_dir,
recursive=True,
):
metadata = None
ext = api.path.splitext(entry)[1]
if ext in props.extensions_to_sign:
signature = _try_sign_archive(
api,
entry,
name=api.path.relpath(entry, presubmit.root),
)
if signature:
metadata = {
"x-goog-meta-signature": signature,
}
if not uploaded_public_key:
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=RELEASE_PUBKEY_PATH,
bucket=gcs_bucket,
subpath=RELEASE_PUBKEY_FILENAME,
namespace=namespace,
)
)
uploaded_public_key = True
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=entry,
bucket=gcs_bucket,
subpath='{}/{}'.format(
step.name,
api.path.relpath(entry, step.export_dir),
),
namespace=namespace,
metadata=metadata,
)
)
# Need to wait for results but don't care about their values.
_ = [f.result() for f in futures]
# This file tells other users of the bucket that the upload is
# complete.
api.gsutil.upload_namespaced_file(
source=api.raw_io.input(''),
bucket=gcs_bucket,
subpath='upload_complete',
namespace=namespace,
)
browse_link = api.gsutil.namespaced_directory_url(gcs_bucket)
pres.links['browse'] = browse_link
return result.RawResult(
summary_markdown=f'[artifacts]({browse_link})',
status=common.SUCCESS,
)
def GenTests(api): # pylint: disable=invalid-name
"""Create tests."""
def ls_export(step_name, *files):
return api.path.exists(
api.path['start_dir'].join('presubmit', step_name, 'export')
) + api.step_data(
f'upload.ls {step_name}/export', api.file.listdir(files),
)
def signature(step_name, filename):
return api.step_data(
f'upload.sign {step_name}/export/{filename}',
stdout=api.raw_io.output_text('John Hancock'),
)
def properties(
*,
gcs_bucket=None,
extensions_to_sign=None,
metadata_step_name_usage=None,
**kwargs,
):
new_kwargs = api.checkout.git_properties()
new_kwargs['pw_presubmit_options'] = {'export_dir_name': 'export'}
if gcs_bucket:
new_kwargs['gcs_bucket'] = gcs_bucket
if extensions_to_sign:
new_kwargs['extensions_to_sign'] = extensions_to_sign
if metadata_step_name_usage:
new_kwargs['metadata_step_name_usage'] = StepName.Value(
metadata_step_name_usage
)
new_kwargs['pw_presubmit_options'].update(kwargs)
return api.properties(**new_kwargs)
yield (
api.status_check.test('one_step')
+ properties(step=['step1'])
+ api.checkout.try_test_data()
)
yield (
api.status_check.test('two_steps')
+ properties(step=['step1', 'step2'], gcs_bucket='bucket')
+ api.checkout.try_test_data(
start_time=datetime.datetime.utcfromtimestamp(1600000000),
execution_timeout=120,
)
+ api.checkout.cl_branch_parents(message='Build-Errors: continue')
+ api.step_data('upload.get build id', retcode=1)
+ ls_export('step1', 'foo')
+ api.time.seed(1600000000)
+ api.time.step(20.0)
)
yield (
api.status_check.test('sign')
+ properties(
step=['release'],
gcs_bucket='bucket',
extensions_to_sign=['.foo'],
metadata_step_name_usage='ONLY_WITHOUT_STEP_NAME',
)
+ api.checkout.ci_test_data()
+ ls_export('release', '1.foo', '2.bar')
+ signature('release', '1.foo')
)