blob: 2dd234fa3dceb3b04c37847c049f42c351bc202b [file] [log] [blame]
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Roll a submodule of a git repository."""
import re
import attr
from PB.recipes.pigweed.submodule_roller import InputProperties
from recipe_engine import post_process
from six.moves import configparser
from six.moves import urllib
from six import StringIO
DEPS = [
'fuchsia/auto_roller',
'fuchsia/git',
'fuchsia/status_check',
'pigweed/checkout',
'pigweed/cq_deps',
'pigweed/roll_util',
'recipe_engine/buildbucket',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/properties',
'recipe_engine/step',
]
PROPERTIES = InputProperties
PYTHON_VERSION_COMPATIBILITY = "PY3"
@attr.s
class _RevisionChange(object):
old = attr.ib(type=str)
new = attr.ib(type=str)
def _update_submodule(api, path, new_revision):
old_revision = api.checkout.get_revision(
path, 'get old revision', test_data='1' * 40
)
with api.context(cwd=path):
api.git('git fetch', 'fetch', 'origin', new_revision)
api.git('git checkout', 'checkout', 'FETCH_HEAD')
# In case new_revision is a branch name we need to retrieve the hash it
# resolved to.
if not re.search(r'^[0-9a-f]{40}$', new_revision):
new_revision = api.checkout.get_revision(
path, 'get new revision', test_data='2' * 40
)
return _RevisionChange(old=old_revision, new=str(new_revision))
def _process_deps(api, rolls, submodules, gerrit_name):
"""Process any dependencies and add them to rolls.
Process any dependencies ("Requires:" lines) in the one initial entry in
rolls and add them to rolls.
Args:
api: Recipe api object.
rolls (dict[str,api.roll_util.Roll]): List of rolls, initially of
length one but added to by this function.
submodules (list[api.checkout.Submodule]): List of submodules in this
checkout.
gerrit_name (str): Name of initial gerrit server.
Returns:
Nothing.
"""
deps = {}
# Initially, there should only be one entry in rolls.
assert len(rolls) == 1
submodule_path = list(rolls)[0]
submodule_dir = api.checkout.root.join(submodule_path)
for commit in rolls[submodule_path].commits:
dependencies, unresolved = api.cq_deps.resolve(
gerrit_name, commit.hash, statuses=('MERGED',)
)
if unresolved:
with api.step.nest('failed to resolve some dependencies') as pres:
for dep in unresolved:
pres.links[dep.name] = dep.gerrit_url
for dep in dependencies:
deps[dep.name] = dep
# Ok, this is complicated, so I'll go over what it's supposed to do
# with an example. Suppose we're trying to roll commit 1 of submodule A.
# That commit depends on commit 2 of submodule B and commit 3 of
# submodule C. Commit 3 of submodule C depends on commit 4 of submodule
# B. Submodule B is referenced twice, with commits 2 and 4. Commit 4 is
# on top of commit 2, so we should roll B to commit 4 and skip the
# intermediate step of commit 2.
# Loop over all the dependencies and apply them all. If we see commit B4
# first, we ignore commit B2 when we see it because it's a backwards
# roll to go from B4 to B2. If we see commit B2 first, we roll to it
# (B2~1..B2) and then we update the roll when we see B4 (to B2~1..B4).
# The end result is that we are only rolling to B4 but the various
# commits that roll brings includes B2.
# The list of revisions between old_revision and new_revision isn't
# processed until api.roll_util.message() is called in the
# api.auto_roller.attempt_roll() call, so having an intermediate state
# where we have a partial version of the final roll doesn't affect
# anything.
# Also handle various other cases like where the dependency is not in
# the checkout or access to the dependency is forbidden. Does not yet
# handle the case where an unrolled parent CL has unrolled dependencies.
with api.step.nest('deps') as pres:
pres.step_summary_text = repr(deps)
for dep in deps.values():
found_dep = False
for sub in submodules:
if not api.checkout.remotes_equivalent(dep.remote, sub.remote):
continue
found_dep = True
with api.step.nest('applying {}'.format(dep.name)) as pres:
old_revision = sub.hash
if sub.relative_path in rolls:
old_revision = rolls[sub.relative_path].old_revision
with api.context(cwd=submodule_dir):
api.git(
'fetch {}'.format(dep.commit),
'fetch',
dep.remote,
dep.commit,
)
direction = api.roll_util.get_roll_direction(
submodule_dir, old_revision, dep.commit
)
if api.roll_util.can_roll(direction):
_update_submodule(api, sub.path, dep.commit)
rolls[sub.relative_path] = api.roll_util.Roll(
project_name=sub.relative_path,
old_revision=old_revision,
new_revision=dep.commit,
proj_dir=sub.path,
direction=direction,
)
pres.step_summary_text = 'applied'
else:
api.roll_util.skip_roll_step(
dep.remote, old_revision, dep.commit,
)
pres.step_summary_text = 'already applied'
if not found_dep:
with api.step.nest('skipping required {}'.format(dep.name)) as pres:
pres.step_summary_text = 'repository is not in checkout'
def RunSteps(api, props): # pylint: disable=invalid-name
submodule_path = props.submodule_path
submodule_name = props.submodule_name or submodule_path
submodule_branch = props.submodule_branch or None
dry_run = props.dry_run
cc_authors_on_rolls = props.cc_authors_on_rolls
cc_reviewers_on_rolls = props.cc_reviewers_on_rolls
cc_domains = props.cc_domains
always_cc = props.always_cc
# The checkout module will try to use trigger data to pull in a specific
# patch. Since the triggering commit is in a different repository that
# needs to be disabled.
api.checkout(use_trigger=False)
new_revision = None
# Try to get new_revision from the trigger.
bb_remote = None
commit = api.buildbucket.build.input.gitiles_commit
if commit and commit.project:
new_revision = commit.id
host = commit.host
bb_remote = 'https://{}/{}'.format(host, commit.project)
# Confirm the given path is actually a submodule.
gitmodules = api.file.read_text(
'read .gitmodules', api.checkout.root.join('.gitmodules')
)
# Example .gitmodules file:
# [submodule "third_party/pigweed"]
# path = third_party/pigweed
# url = https://pigweed.googlesource.com/pigweed/pigweed
# configparser doesn't like leading whitespace on lines, despite what its
# documentation says.
gitmodules = re.sub(r'\n\s+', '\n', gitmodules)
parser = configparser.RawConfigParser()
parser.readfp(StringIO(gitmodules))
section = 'submodule "{}"'.format(submodule_name)
if not parser.has_section(section):
sections = parser.sections()
submodules = sorted(re.sub(r'^.*"(.*)"$', r'\1', x) for x in sections)
raise api.step.StepFailure(
'no submodule "{}" (submodules: {})'.format(
submodule_name, ', '.join('"{}"'.format(x) for x in submodules)
)
)
if not submodule_branch:
try:
submodule_branch = parser.get(section, 'branch')
except configparser.NoOptionError:
submodule_branch = 'main'
# If we still don't have a revision then there wasn't a trigger. (Perhaps
# this was manually triggered.) In this case we update to the
# property-specified or inferred submodule branch HEAD.
if new_revision is None:
new_revision = submodule_branch
# This isn't used until much later but needs to be invoked before any
# submodules get updated.
submodules = api.checkout.submodules(api.checkout.root)
submodule_dir = api.checkout.root.join(submodule_path)
remote = api.roll_util.normalize_remote(
parser.get(section, 'url'), api.checkout.remote,
)
# If this was triggered by a gitiles poller, check that the triggering
# repository matches submodule_path.
if bb_remote:
if not api.checkout.remotes_equivalent(remote, bb_remote):
raise api.step.StepFailure(
'triggering repository ({}) does not match submodule remote '
'({})'.format(bb_remote, remote)
)
change = _update_submodule(api, submodule_dir, new_revision)
direction = api.roll_util.get_roll_direction(
submodule_dir, change.old, change.new
)
# If the primary roll is not necessary or is backwards we can exit
# immediately and don't need to check deps.
if not api.roll_util.can_roll(direction):
api.roll_util.skip_roll_step(remote, change.old, change.new)
return
rolls = {
submodule_path: api.roll_util.Roll(
project_name=str(submodule_path),
old_revision=change.old,
new_revision=change.new,
proj_dir=submodule_dir,
direction=direction,
),
}
gerrit_name = urllib.parse.urlparse(remote).netloc.split('.')[0]
if len(rolls[submodule_path].commits) >= 10:
with api.step.nest('too many commits, not processing dependencies'):
pass
else:
_process_deps(api, rolls, submodules, gerrit_name)
cc = set()
authors = api.roll_util.authors(*rolls.values())
if cc_authors_on_rolls:
cc.update(authors)
if cc_reviewers_on_rolls:
cc.update(api.roll_util.reviewers(*rolls.values()))
def include_cc(email):
return api.roll_util.include_cc(
email, cc_domains, api.checkout.gerrit_host()
)
# include_cc() writes steps, so we want things sorted before calling it.
cc = sorted(set(cc))
cc = [x for x in cc if include_cc(x)]
roll_kwargs = {}
if always_cc:
roll_kwargs['cc'] = [x.email for x in cc]
else:
roll_kwargs['cc_on_failure'] = [x.email for x in cc]
author_override = None
with api.step.nest('authors') as pres:
pres.step_summary_text = repr(authors)
if len(authors) == 1 and props.forge_author:
author_override = api.roll_util.fake_author(
next(iter(authors))
)._asdict()
change = api.auto_roller.attempt_roll(
gerrit_host=api.checkout.gerrit_host(),
gerrit_project=api.checkout.gerrit_project(),
upstream_ref=api.checkout.branch,
repo_dir=api.checkout.root,
commit_message=api.roll_util.message(*rolls.values()),
dry_run=dry_run,
labels_to_set=api.roll_util.labels_to_set,
labels_to_wait_on=api.roll_util.labels_to_wait_on,
bot_commit=props.bot_commit,
author_override=author_override,
**roll_kwargs
)
return api.auto_roller.raw_result(change)
def GenTests(api): # pylint: disable=invalid-name
"""Create tests."""
def _url(x):
if x.startswith(('https://', 'sso://', '.')):
return x
return 'https://foo.googlesource.com/' + x
def trigger(url, **kwargs):
return api.checkout.ci_test_data(git_repo=_url(url), **kwargs)
def gitmodules(**submodules):
branches = {}
for k, v in submodules.items():
if k.endswith('_branch'):
branches[k.replace('_branch', '')] = v
for x in branches:
del submodules['{}_branch'.format(x)]
text = []
for k, v in sorted(submodules.items()):
text.append(
'[submodule "{0}"]\n\tpath = {0}\n\turl = {1}\n'.format(
k, _url(v)
)
)
if k in branches:
text.append('\tbranch = {}\n'.format(branches[k]))
return api.step_data(
'read .gitmodules', api.file.read_text(''.join(text))
)
def properties(**kwargs):
new_kwargs = api.checkout.git_properties()
new_kwargs['forge_author'] = True
new_kwargs['dry_run'] = True
new_kwargs.update(kwargs)
return api.properties(**new_kwargs)
def commit_data(name, **kwargs):
return api.roll_util.commit_data(
name,
api.roll_util.commit('a' * 40, 'foo\nbar\n\nChange-Id: I1111'),
**kwargs
)
yield (
api.status_check.test('success-sso-cc-authors')
+ properties(submodule_path='a1', cc_authors_on_rolls=True)
+ api.roll_util.properties(commit_divider='--divider--')
+ trigger('a1')
+ commit_data('a1', prefix='')
+ gitmodules(a1='sso://foo/a1')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_success()
)
yield (
api.status_check.test('failure-cc-authors', status='failure')
+ properties(submodule_path='a1', cc_authors_on_rolls=True)
+ trigger('a1')
+ commit_data('a1', prefix='')
+ gitmodules(a1='https://foo.googlesource.com/a1')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_failure()
)
yield (
api.status_check.test('relative-dot', status='failure')
+ properties(submodule_path='a1', cc_authors_on_rolls=True)
+ trigger('https://pigweed.googlesource.com/pigweed/pigweed/a1')
+ commit_data('a1', prefix='')
+ gitmodules(a1='./a1')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_failure()
)
yield (
api.status_check.test('relative-dotdot', status='failure')
+ properties(submodule_path='a1', cc_authors_on_rolls=True)
+ trigger('https://pigweed.googlesource.com/pigweed/a1')
+ commit_data('a1', prefix='')
+ gitmodules(a1='../a1')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_failure()
)
yield (
api.status_check.test(
'relative-dotdot-dotdot-always-cc-reviewers', status='failure',
)
+ properties(
submodule_path='a1', cc_reviewers_on_rolls=True, always_cc=True,
)
+ trigger('https://pigweed.googlesource.com/a1')
+ commit_data('a1', prefix='')
+ gitmodules(a1='../../a1')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_failure()
)
yield (
api.status_check.test('name-not-found', status='failure')
+ properties(submodule_path='a1')
+ trigger('a1')
+ gitmodules(b2='b2', c3='c3', d4='d4')
)
yield (
api.status_check.test('trigger-mismatch', status='failure')
+ properties(submodule_path='a1')
+ trigger('a1')
+ gitmodules(a1='b2')
)
yield (
api.status_check.test('trigger-mismatch-equivalent')
+ properties(
submodule_path='a1',
**api.checkout.git_properties(
equivalent_remotes=(
(
'https://foo.googlesource.com/a1',
'https://foo.googlesource.com/b2',
),
),
)
)
+ trigger('a1')
+ commit_data('a1', prefix='')
+ gitmodules(a1='b2')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_success()
)
yield (
api.status_check.test('with-branch-prop-filter-emails')
+ properties(
submodule_path='a1',
submodule_branch='branch',
cc_authors_on_rolls=True,
cc_reviewers_on_rolls=True,
cc_domains=['google.com'],
)
+ commit_data('a1', prefix='')
+ gitmodules(a1='a1', a1_branch='not_used')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_success()
)
yield (
api.status_check.test('no-revision')
+ properties(submodule_path='a1')
+ commit_data('a1', prefix='')
+ gitmodules(a1='a1', a1_branch='custom')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_success()
)
yield (
api.status_check.test('backwards')
+ properties(submodule_path='a1')
+ trigger('a1')
+ gitmodules(a1='a1')
+ api.roll_util.backward_roll()
)
def assert_too_many():
return api.post_process(
post_process.MustRun,
'too many commits, not processing dependencies',
)
atoz = 'abcdefghijklmnopqrstuvwxyz'
yield (
api.status_check.test('too-many-skip-deps')
+ properties(submodule_path='a1', cc_authors_on_rolls=True)
+ trigger('a1')
+ api.roll_util.commit_data(
'a1', *[api.roll_util.commit(x * 40, x) for x in atoz]
)
+ gitmodules(a1='sso://foo/a1')
+ api.roll_util.forward_roll()
+ api.auto_roller.dry_run_success()
+ assert_too_many()
)
# Much of the step data in the tests using "Requires:" is sufficient to
# test the logic that processes the immediate result of the the
# corresponding step, but does not make sense as a whole.
def requires_test(name, *requires, **kwargs):
assert requires
status = kwargs.pop('status', 'success')
assert not kwargs
return (
api.status_check.test(name, status=status)
+ properties(submodule_path='spam')
+ trigger('spam', revision='2' * 40)
+ gitmodules(spam='spam', ham='ham')
+ api.checkout.submodules(
spam='https://foo.googlesource.com/spam',
ham='https://foo.googlesource.com/ham',
eggs='https://foo.googlesource.com/eggs',
prefix='',
)
+ api.cq_deps.details(
'foo:2000', message='Requires: {}'.format(','.join(requires)),
)
+ api.roll_util.commit_data(
'spam', api.roll_util.commit('2' * 40), prefix='',
)
+ api.roll_util.forward_roll()
)
# CL 2000 requires CL 444 in ham which has not rolled.
yield (
requires_test('with-requires', 'foo:444')
+ api.cq_deps.details('foo:444', status='MERGED', project='ham')
+ api.roll_util.commit_data(
'ham', api.roll_util.commit('2' * 40), prefix='applying foo:444.',
)
+ api.roll_util.forward_roll('applying foo:444.')
+ api.auto_roller.dry_run_success()
)
# CL 2000 requires CL 444 in ham which has already rolled.
yield (
requires_test('with-requires-already-applied', 'foo:444')
+ api.cq_deps.details('foo:444', status='MERGED', project='ham')
+ api.roll_util.noop_roll('applying foo:444.')
+ api.auto_roller.dry_run_success()
)
# CL 2000 requires CL 444 which is forbidden.
yield (
requires_test('with-requires-forbidden', 'foo:444')
+ api.cq_deps.forbidden('foo:444')
+ api.auto_roller.dry_run_success()
)
# CL 2000 requires CL 444 which is not in this checkout.
yield (
requires_test('with-requires-not-in-checkout', 'foo:444')
+ api.cq_deps.details(
'foo:444', status='MERGED', project='not-in-this-checkout',
)
+ api.auto_roller.dry_run_success()
)
# CL 2000 requires CL 444 in ham which requires CL 555 in eggs.
yield (
requires_test('with-requires-transitive', 'foo:444')
+ api.cq_deps.details(
'foo:444',
status='MERGED',
project='ham',
message='Requires: foo:555',
)
+ api.roll_util.commit_data(
'ham', api.roll_util.commit('2' * 40), prefix='applying foo:444.',
)
+ api.roll_util.forward_roll('applying foo:444.')
+ api.cq_deps.details('foo:555', status='MERGED', project='eggs')
+ api.roll_util.commit_data(
'eggs', api.roll_util.commit('2' * 40), prefix='applying foo:555.',
)
+ api.roll_util.forward_roll('applying foo:555.')
+ api.auto_roller.dry_run_success()
)
# CL 2000 requires CL 444 in ham which requires CL 2000.
yield (
requires_test('with-requires-loop', 'foo:444')
+ api.cq_deps.details(
'foo:444',
status='MERGED',
project='ham',
message='Requires: foo:2000',
)
+ api.roll_util.commit_data(
'ham', api.roll_util.commit('2' * 40), prefix='applying foo:444.',
)
+ api.roll_util.forward_roll('applying foo:444.')
+ api.auto_roller.dry_run_success()
)
# CL 2000 requires CL 444 in ham and CL 555 in ham, both of which are
# submitted.
def parent_child_test(*args, **kwargs):
return (
requires_test(*args, **kwargs)
+ api.cq_deps.details('foo:444', status='MERGED', project='ham',)
+ api.roll_util.commit_data(
'ham',
api.roll_util.commit('4' * 40),
prefix='applying foo:444.',
)
+ api.roll_util.forward_roll('applying foo:444.')
+ api.cq_deps.details('foo:555', status='MERGED', project='ham',)
)
# CL 2000 requires CL 444 in ham and CL 555 in ham, both of which are
# submitted. CL 444 is a parent of CL 555.
yield (
parent_child_test('with-requires-child', 'foo:444', 'foo:555')
+ api.roll_util.forward_roll('applying foo:555.')
+ api.roll_util.commit_data(
'ham', api.roll_util.commit('5' * 40), prefix='applying foo:555.',
)
+ api.auto_roller.dry_run_success()
)
# CL 2000 requires CL 444 in ham and CL 555 in ham, both of which are
# submitted. CL 555 is a parent of CL 444.
yield (
parent_child_test('with-requires-parent', 'foo:444', 'foo:555')
+ api.roll_util.backward_roll('applying foo:555.')
+ api.auto_roller.dry_run_success()
)