blob: 9e86ee640a7eb703f697d59c30b0d6fc50bfed87 [file] [log] [blame]
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Recipe for testing Pigweed using presubmit_checks.py script."""
import datetime
from PB.go.chromium.org.luci.buildbucket.proto import common
from PB.recipes.pigweed.pw_presubmit import InputProperties
from PB.recipe_engine import result
DEPS = [
'fuchsia/gsutil',
'fuchsia/status_check',
'pigweed/build',
'pigweed/checkout',
'pigweed/environment',
'pigweed/util',
'recipe_engine/buildbucket',
'recipe_engine/file',
'recipe_engine/futures',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/time',
]
PROPERTIES = InputProperties
PYTHON_VERSION_COMPATIBILITY = "PY3"
# The path to a public key used to sign release builds. Only set on release
# builders.
RELEASE_PUBKEY_PATH = '/etc/release_keys/release_key_pub.pem'
# The name of the public key file uploaded in release builds.
RELEASE_PUBKEY_FILENAME = 'publickey.pem'
def _step_timeout(api):
# Amount of time elapsed in the run.
elapsed_time = api.time.time() - api.buildbucket.build.start_time.seconds
# Amount of time before build times out.
time_remaining = (
api.buildbucket.build.execution_timeout.seconds - elapsed_time
)
# Give a buffer before build times out and kill this step then. This should
# give enough time to read any logfiles and maybe upload to logdog/GCS
# before the build times out.
step_timeout = time_remaining - 60
# If the timeout would be negative or very small set it to 30 seconds. We
# likely won't have enough information to debug these steps, but in case
# they're fast there's no reason to kill them much before the build is
# terminated.
if step_timeout < 30:
step_timeout = 30
return step_timeout
def _try_sign_archive(api, archive_path, name):
args = [
'--archive-file',
archive_path,
]
return api.python(
'sign {}'.format(name),
api.resource('sign.py'),
args,
venv=api.resource('sign.py.vpython'),
stdout=api.raw_io.output_text(),
).stdout
def RunSteps(api, props):
"""Run Pigweed presubmit checks."""
# TODO(mohrr) Transition to passing a list in through properties.
command_name = props.command_name or 'python -m pw_cli'
gcs_bucket = props.gcs_bucket
api.checkout()
root = api.checkout.root
if not props.step and not props.program:
with api.step.nest('nothing to do, exiting'):
return
api.environment.init(root)
presubmit_dir = api.path['start_dir'].join('presubmit')
prefix = command_name.split()
prefix += [
'--directory',
root,
'--loglevel',
'debug',
'presubmit',
'--package-root',
api.path['cache'],
'--output-directory',
presubmit_dir,
]
if props.only_on_changed_files:
prefix.extend(('--base', 'HEAD~1'))
with api.environment():
steps = []
steps.extend(props.step)
if props.program:
with api.step.nest('get steps from programs'):
for program in props.program:
# To get step_test_data line to pass pylint.
raw_io_stream_output = (
api.raw_io.test_api.stream_output_text
)
program_steps = (
api.step(
program,
prefix
+ ['--program', program, '--only-list-steps'],
stdout=api.raw_io.output_text(),
step_test_data=lambda: raw_io_stream_output(
'{0}_0\n{0}_1\n'.format(program),
),
)
.stdout.strip()
.splitlines()
)
steps.extend(x for x in program_steps if x not in steps)
with api.step.defer_results():
for step in steps:
with api.step.nest(step) as pres:
api.step(
'run',
prefix + ['--step', step],
timeout=_step_timeout(api),
)
build_dir = presubmit_dir.join(step)
export_dir = None
if props.export_dir_name:
export_dir = build_dir.join(props.export_dir_name)
api.file.ensure_directory(
'mkdir {}'.format(props.export_dir_name),
export_dir,
)
api.build.save_logs(build_dir, export_dir)
if props.export_dir_name:
binary_size_data = {}
for step in steps:
binary_sizes_json = presubmit_dir.join(
step, props.export_dir_name, 'binary_sizes.json'
)
api.path.mock_add_file(binary_sizes_json)
if api.path.isfile(binary_sizes_json):
with api.step.nest('binary sizes {}'.format(step)):
binary_size_data[step] = api.file.read_json(
'read',
binary_sizes_json,
test_data={'target': 12345, 'target.budget': 12346},
)
binary_sizes = {}
if len(binary_size_data) == 1:
_, binary_sizes = binary_size_data.popitem()
elif len(binary_size_data) > 1:
for step, values in binary_size_data.items():
for name, size in values.items():
binary_sizes['{}.{}'.format(step, name)] = size
with api.step.nest('binary sizes') as pres:
pres.properties['binary_sizes'] = binary_sizes
if gcs_bucket:
uploaded_public_key = False
with api.step.nest('upload') as pres:
with api.environment():
command = command_name.split()
command.extend(['--directory', root, 'build-id'])
step_data = api.step(
'get build id',
command,
stdout=api.raw_io.output_text(),
step_test_data=lambda: api.raw_io.test_api.stream_output_text(
'123-1234567890'
),
ok_ret='any',
)
namespace = None
if step_data.exc_result.retcode == 0:
namespace = step_data.stdout.strip()
if namespace == '0':
namespace = None
checkout_dir = api.path['start_dir'].join('checkout_upload')
api.checkout.snapshot_to_dir(checkout_dir)
futures = [
api.futures.spawn(
api.gsutil.upload_namespaced_directory,
source=checkout_dir,
bucket=gcs_bucket,
subpath='checkout',
namespace=namespace,
)
]
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=api.json.input(api.util.build_metadata()),
bucket=gcs_bucket,
subpath='build_metadata.json',
namespace=namespace,
)
)
if props.export_dir_name:
for step_dir in api.file.listdir(
'ls presubmit', presubmit_dir, test_data=props.step,
):
step_name = api.path.basename(step_dir)
export_dir = step_dir.join(props.export_dir_name)
# In testing this will never be true because of the
# mock_add_file() call for binary_sizes.json.
if not api.path.exists(export_dir):
continue # pragma: no cover
for entry in api.file.listdir(
'ls {}/{}'.format(step_name, props.export_dir_name),
export_dir,
recursive=True,
):
metadata = None
ext = api.path.splitext(entry)[1]
if ext in props.extensions_to_sign:
signature = _try_sign_archive(
api,
entry,
name=api.path.relpath(entry, presubmit_dir),
)
if signature:
metadata = {
"x-goog-meta-signature": signature,
}
if not uploaded_public_key:
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=RELEASE_PUBKEY_PATH,
bucket=gcs_bucket,
subpath=RELEASE_PUBKEY_FILENAME,
namespace=namespace,
)
)
uploaded_public_key = True
futures.append(
api.futures.spawn(
api.gsutil.upload_namespaced_file,
source=entry,
bucket=gcs_bucket,
subpath='{}/{}'.format(
step_name,
api.path.relpath(entry, export_dir),
),
namespace=namespace,
metadata=metadata,
)
)
# Need to wait for results but don't care about their values.
_ = [f.result() for f in futures]
# This file tells other users of the bucket that the upload is
# complete.
api.gsutil.upload_namespaced_file(
source=api.raw_io.input(''),
bucket=gcs_bucket,
subpath='upload_complete',
namespace=namespace,
)
browse_link = api.gsutil.namespaced_directory_url(gcs_bucket)
pres.links['browse'] = browse_link
return result.RawResult(
summary_markdown='[artifacts]({})'.format(browse_link),
status=common.SUCCESS,
)
def GenTests(api): # pylint: disable=invalid-name
"""Create tests."""
def ls_export(step_name, *files):
return api.path.exists(
api.path['start_dir'].join('presubmit', step_name, 'export')
) + api.step_data(
'upload.ls {}/export'.format(step_name), api.file.listdir(files),
)
def signature(step_name, filename):
return api.step_data(
'upload.sign {}/export/{}'.format(step_name, filename),
stdout=api.raw_io.output_text('John Hancock'),
)
def properties(**kwargs):
new_kwargs = api.checkout.git_properties()
new_kwargs['export_dir_name'] = 'export'
new_kwargs.update(kwargs)
return api.properties(**new_kwargs)
yield (
api.status_check.test('exit-early')
+ properties()
+ api.checkout.ci_test_data()
)
yield (
api.status_check.test('pigweed')
+ properties(command_name='foo', program=['full'])
+ api.checkout.ci_test_data()
)
yield (
api.status_check.test('step')
+ properties(step=['step1', 'step2'], gcs_bucket='bucket')
+ api.checkout.try_test_data(
start_time=datetime.datetime.utcfromtimestamp(1600000000),
execution_timeout=120,
)
+ api.step_data('upload.get build id', retcode=1)
+ ls_export('step1', 'foo')
+ api.time.seed(1600000000)
+ api.time.step(20.0)
)
manifest = 'https://pigweed.googlesource.com/pigweed/manifest'
yield (
api.status_check.test('repo')
+ properties(
step=['step'],
only_on_changed_files=True,
**api.checkout.repo_properties(remote=manifest)
)
+ api.checkout.ci_test_data(manifest)
+ api.checkout.manifest_test_data(name='pigweed')
)
yield (
api.status_check.test('sign')
+ properties(
step=['release'], gcs_bucket='bucket', extensions_to_sign=['.foo'],
)
+ api.checkout.ci_test_data()
+ ls_export('release', '1.foo', '2.bar')
+ signature('release', '1.foo')
)
yield (
api.status_check.test('sign-nobuildid')
+ properties(
step=['release'], gcs_bucket='bucket', extensions_to_sign=['.foo'],
)
+ api.checkout.ci_test_data()
+ api.step_data(
'upload.get build id', stdout=api.raw_io.output_text('0\n'),
)
+ ls_export('release', '1.foo', '2.bar')
+ signature('release', '1.foo')
)