blob: edc940cbb3bebf9628b5a61c96e42058e2384b8e [file] [log] [blame]
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Recipe for testing Pigweed using presubmit_checks.py script."""
import datetime
from PB.go.chromium.org.luci.buildbucket.proto import common
from PB.recipes.pigweed.pw_presubmit import InputProperties
from PB.recipe_engine import result
DEPS = [
'fuchsia/gsutil',
'fuchsia/status_check',
'pigweed/checkout',
'pigweed/environment',
'pigweed/pw_presubmit',
'pigweed/util',
'recipe_engine/file',
'recipe_engine/futures',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/time',
]
PROPERTIES = InputProperties
PYTHON_VERSION_COMPATIBILITY = "PY3"
# The path to a public key used to sign release builds. Only set on release
# builders.
RELEASE_PUBKEY_PATH = '/etc/release_keys/release_key_pub.pem'
# The name of the public key file uploaded in release builds.
RELEASE_PUBKEY_FILENAME = 'publickey.pem'
def _try_sign_archive(api, archive_path, name):
args = [
'--archive-file',
archive_path,
]
return api.python(
'sign {}'.format(name),
api.resource('sign.py'),
args,
venv=api.resource('sign.py.vpython'),
stdout=api.raw_io.output_text(),
).stdout
def RunSteps(api, props):
"""Run Pigweed presubmit checks."""
gcs_bucket = props.gcs_bucket
api.checkout()
root = api.checkout.root
env = api.environment.init(root, props.environment_options)
with env():
api.pw_presubmit.init(root)
with api.step.defer_results():
for step in api.pw_presubmit.steps():
api.pw_presubmit.run(step)
binary_size_data = {}
for step in api.pw_presubmit.steps():
if not step.export_dir:
continue # pragma: no cover
binary_sizes_json = step.export_dir.join('binary_sizes.json')
api.path.mock_add_file(binary_sizes_json)
if api.path.isfile(binary_sizes_json):
with api.step.nest('binary sizes {}'.format(step.name)):
binary_size_data[step.name] = api.file.read_json(
'read',
binary_sizes_json,
test_data={'target': 12345, 'target.budget': 12346},
)
binary_sizes = {}
if len(binary_size_data) == 1:
_, binary_sizes = binary_size_data.popitem()
elif len(binary_size_data) > 1:
for step_name, values in binary_size_data.items():
for name, size in values.items():
binary_sizes['{}.{}'.format(step_name, name)] = size
if binary_sizes:
with api.step.nest('binary sizes') as pres:
pres.properties['binary_sizes'] = binary_sizes
if gcs_bucket:
uploaded_public_key = False
with api.step.nest('upload') as pres:
with env():
command = api.pw_presubmit.command_name.split()
command.extend(['--directory', root, 'build-id'])
step_data = api.step(
'get build id',
command,
stdout=api.raw_io.output_text(),
step_test_data=lambda: api.raw_io.test_api.stream_output_text(
'123-1234567890'
),
ok_ret='any',
)
namespace = None
if step_data.exc_result.retcode == 0:
namespace = step_data.stdout.strip()
if namespace == '0':
namespace = None
checkout_dir = api.path['start_dir'].join('checkout_upload')
api.checkout.snapshot_to_dir(checkout_dir)
futures = [
api.futures.spawn(
api.gsutil.upload_namespaced_directory,
source=checkout_dir,
bucket=gcs_bucket,
subpath='checkout',
namespace=namespace,
)
]
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=api.json.input(api.util.build_metadata()),
bucket=gcs_bucket,
subpath='build_metadata.json',
namespace=namespace,
)
)
for step in api.pw_presubmit.steps():
if not api.pw_presubmit.export_dir_name:
continue # pragma: no cover
step_dir = api.pw_presubmit.root.join(step.name)
export_dir = step_dir.join(api.pw_presubmit.export_dir_name)
# In testing this will never be true because of the
# mock_add_file() call for binary_sizes.json.
if not api.path.exists(export_dir):
continue # pragma: no cover
for entry in api.file.listdir(
'ls {}/{}'.format(
step.name, api.pw_presubmit.export_dir_name,
),
export_dir,
recursive=True,
):
metadata = None
ext = api.path.splitext(entry)[1]
if ext in props.extensions_to_sign:
signature = _try_sign_archive(
api,
entry,
name=api.path.relpath(entry, api.pw_presubmit.root),
)
if signature:
metadata = {
"x-goog-meta-signature": signature,
}
if not uploaded_public_key:
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=RELEASE_PUBKEY_PATH,
bucket=gcs_bucket,
subpath=RELEASE_PUBKEY_FILENAME,
namespace=namespace,
)
)
uploaded_public_key = True
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=entry,
bucket=gcs_bucket,
subpath='{}/{}'.format(
step.name, api.path.relpath(entry, export_dir),
),
namespace=namespace,
metadata=metadata,
)
)
# Need to wait for results but don't care about their values.
_ = [f.result() for f in futures]
# This file tells other users of the bucket that the upload is
# complete.
api.gsutil.upload_namespaced_file(
source=api.raw_io.input(''),
bucket=gcs_bucket,
subpath='upload_complete',
namespace=namespace,
)
browse_link = api.gsutil.namespaced_directory_url(gcs_bucket)
pres.links['browse'] = browse_link
return result.RawResult(
summary_markdown='[artifacts]({})'.format(browse_link),
status=common.SUCCESS,
)
def GenTests(api): # pylint: disable=invalid-name
"""Create tests."""
def ls_export(step_name, *files):
return api.path.exists(
api.path['start_dir'].join('presubmit', step_name, 'export')
) + api.step_data(
'upload.ls {}/export'.format(step_name), api.file.listdir(files),
)
def signature(step_name, filename):
return api.step_data(
'upload.sign {}/export/{}'.format(step_name, filename),
stdout=api.raw_io.output_text('John Hancock'),
)
def properties(**kwargs):
new_kwargs = api.checkout.git_properties()
new_kwargs['$pigweed/pw_presubmit'] = {'export_dir_name': 'export'}
if 'gcs_bucket' in kwargs:
new_kwargs['gcs_bucket'] = kwargs.pop('gcs_bucket')
if 'extensions_to_sign' in kwargs:
new_kwargs['extensions_to_sign'] = kwargs.pop('extensions_to_sign')
new_kwargs['$pigweed/pw_presubmit'].update(kwargs)
return api.properties(**new_kwargs)
yield (
api.status_check.test('step')
+ properties(step=['step1', 'step2'], gcs_bucket='bucket')
+ api.checkout.try_test_data(
start_time=datetime.datetime.utcfromtimestamp(1600000000),
execution_timeout=120,
)
+ api.step_data('upload.get build id', retcode=1)
+ ls_export('step1', 'foo')
+ api.time.seed(1600000000)
+ api.time.step(20.0)
)
yield (
api.status_check.test('sign')
+ properties(
step=['release'], gcs_bucket='bucket', extensions_to_sign=['.foo'],
)
+ api.checkout.ci_test_data()
+ ls_export('release', '1.foo', '2.bar')
+ signature('release', '1.foo')
)
yield (
api.status_check.test('sign-nobuildid')
+ properties(
step=['release'], gcs_bucket='bucket', extensions_to_sign=['.foo'],
)
+ api.checkout.ci_test_data()
+ api.step_data(
'upload.get build id', stdout=api.raw_io.output_text('0\n'),
)
+ ls_export('release', '1.foo', '2.bar')
+ signature('release', '1.foo')
)