blob: 2487e77f5c49f305bbf5b51f1733da3636fabb0b [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for rolling CIPD prebuilts into Fuchsia."""
import re
from recipe_engine.config import List
from recipe_engine.post_process import DoesNotRunRE
from recipe_engine.recipe_api import Property
from RECIPE_MODULES.fuchsia.utils import memoize
DEPS = [
'fuchsia/auto_roller',
'fuchsia/build',
'fuchsia/buildbucket_util',
'fuchsia/debug_symbols',
'fuchsia/jiri',
'fuchsia/status_check',
'recipe_engine/archive',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/raw_io',
'recipe_engine/step',
]
PROPERTIES = {
'project':
Property(kind=str, help='Jiri remote manifest project', default=None),
'manifest':
Property(kind=str, help='Jiri manifest to use'),
'remote':
Property(kind=str, help='Remote manifest repository'),
'import_in':
Property(
kind=str, help='Path to the manifest to edit relative to $project'),
'packages':
Property(
kind=List(str),
help='The list of CIPD packages to update in $import_in'),
'packages_requiring_ref':
Property(
kind=List(str),
default=(),
help='The subset of packages that must have the specified ref'),
'debug_symbol_attribute':
Property(
kind=str,
default='debug-symbols',
help='Jiri attribute to match debug symbol packages'),
'debug_symbol_gcs_paths':
Property(
kind=List(list),
default=(),
help='GCS bucket and namespace pairs to upload debug symbols to'),
'lockfiles':
Property(
kind=List(str),
default=(),
help='The list of lockfiles to update in "${manifest}=${lockfile}" format'
),
'dry_run':
Property(
kind=bool,
default=False,
help='Whether to dry-run the auto-roller (CQ+1 and abandon the change)'
),
'tag':
Property(
kind=str,
default='version',
help='A CIPD tag prefix common to all $packages where a common version can be extracted'
),
'ref':
Property(
kind=str,
default='latest',
help='A common CIPD ref to resolve when rolling a set of packages'),
'owners':
Property(
kind=List(str),
default=(),
help=('The owners responsible for watching this roller '
'(example: "username@google.com").')),
'test_multipliers':
Property(
kind=List(dict),
default=None,
help=('A list of test multipliers to pass into the roll CL '
'MULTIPLY footer.')),
}
COMMIT_MESSAGE = """[roll] Roll {roller} CIPD packages:
{packages}
From: {old_version}
To: {version}
{multiply}
Cq-Cl-Tag: roller-builder:{builder}
Cq-Cl-Tag: roller-bid:{build_id}
CQ-Do-Not-Cancel-Tryjobs: true"""
CIPD_URL = 'https://chrome-infra-packages.appspot.com/p/{package}/+/{version}'
@memoize
def get_tags_from_cipd(api, cipd_pkg, tag_prefix, ref):
"""Returns all tags for a given CIPD package instance.
Args:
api (RecipeApi): The recipe api to use.
cipd_pkg (str): The name of the CIPD package.
tag_prefix (str): The tag prefix to filter by.
ref (str): The ref of the desired instance.
Returns:
The package's tags that start with the given prefix.
"""
cipd_tags = api.cipd.describe(cipd_pkg, ref).tags
tags = set(t.tag for t in cipd_tags if t.tag.startswith(tag_prefix))
api.step.active_result.presentation.step_summary_text = ''.join(
'\n' + tag for tag in sorted(tags))
return tags
def nested_get_tags_from_cipd(api, cipd_pkg, tag_prefix, ref):
with api.step.nest(cipd_pkg):
return get_tags_from_cipd(api, cipd_pkg, tag_prefix, ref)
def find_common_tag(api, packages, packages_requiring_ref, tag, ref):
"""Returns a common tag shared by some instance of all the packages.
The chosen instances of all of the packages listed in
`packages_requiring_ref` will have the given `ref` (or else an
`AssertionError` will be raised in the case where the `ref` instances of
the `packages_requiring_ref` don't have a shared tag). If
`packages_requiring_ref` is empty, then at least one (and generally most,
if not all) of the chosen instances will have the given `ref`.
The only time that some of the chosen instances won't currently have that
ref is when some of the packages have been updated to a later version and
some haven't (e.g. if another roller that does the updating is running in
parallel with this recipe). In that case, we'll choose older instances of
the updated packages, whose versions correspond to the `ref` instances of
the non-updated packages.
Args:
api (RecipeApi): The recipe API to use.
packages (seq(str)): The names of the packages to search.
packages_requiring_ref (seq(str)): The subset of packages that MUST be
pinned to the specified `ref`.
tag (str): The prefix to filter by when searching for a common tag. E.g.,
if `tag` is "version", then the common tag returned will start with
"version".
ref (str): The CIPD ref that we should try to pin the packages to, e.g.
"latest".
Returns:
The name of a tag such that for every package, some instance of the
package has that tag. This will often, but not always, be the newest such
tag.
"""
# The set of packages that don't need to be pinned to `ref`.
# We use a list comprehension instead of set operations here to preserve the
# ordering of the packages, which makes writing tests much easier.
flexible_packages = [
pkg for pkg in packages if pkg not in packages_requiring_ref
]
index = 0
if packages_requiring_ref:
common_tags = set.intersection(*(
nested_get_tags_from_cipd(api, pkg, tag, ref)
for pkg in packages_requiring_ref))
if not common_tags:
raise api.step.StepFailure('unable to find common tag to roll')
else:
# Fetch the `ref` instance of the first package, and consider it to be the
# "representative" instance; we'll try to find a subset of its tags that
# are shared by the `ref` instances of all the other packages.
common_tags = nested_get_tags_from_cipd(api, flexible_packages[0], tag, ref)
index += 1
found_outdated_package = False
while index < len(flexible_packages):
pkg = flexible_packages[index]
index += 1
with api.step.nest(pkg):
tags = get_tags_from_cipd(api, pkg, tag, ref)
intersection = common_tags.intersection(tags)
if intersection:
# This package's `ref` instance shares some tags with the previous
# packages' `ref` instances, so we still have some common tag
# candidates and can move on to the next package.
common_tags = intersection
continue
# Else, the instance of `pkg` at `ref` does not share any tags with all
# the previous packages' fetched instances. We'll see if `pkg` has any
# (probably older) instance that has a shared tag with all the previous
# packages' chosen instances.
for common_tag in common_tags:
older_instances = api.cipd.search(pkg, common_tag)
if older_instances:
break
if older_instances:
# The representative package instance has not yet been updated to this
# package's `ref` version (or at least the representative hadn't been
# updated at the time that we fetched its tags), but there are older
# instances of this package that *do* match the version of the
# representative instance, so we'll use one of those instead.
# NOTE: There is a chance that `pkg` was actually out of date relative
# to the representative package, but got updated to the same version as
# the previous packages in the time between "cipd describe" and "cipd
# search", in which case `older_instance` will actually be newer than
# the original instance and we will end up with the same result as if
# `pkg` had already been up-to-date when we first "cipd described" it.
older_instance = older_instances[0].instance_id
older_tags = get_tags_from_cipd(api, pkg, tag, older_instance)
common_tags.intersection_update(older_tags)
assert common_tags # should contain at least `common_tag`
elif not found_outdated_package and not packages_requiring_ref:
# This package has not yet been updated to the version of the
# representative package instance. Go back and start over with *this*
# package as the new representative *unless* we have some packages that
# must be pinned to `ref`, in which case we've already hit a dead end
# as it will be impossible to satisfy that requirement.
found_outdated_package = True
common_tags = tags
index = 0
step = api.step('package out of date', None)
step.presentation.step_summary_text = (
'\nbacktracking to check older versions of previous packages')
else:
# We should "backtrack" at most once (i.e., find a package whose `ref`
# instance version doesn't correspond to a version of all the other
# packages), or zero times if there are `packages_requiring_ref`. If it
# happens twice, there's probably something weird going on and we won't
# be able to reconcile the tags by continuing.
raise api.step.StepFailure('unable to find common tag to roll')
# Choose one of the common tags (it doesn't matter which) as the version to
# pin all packages to.
return common_tags.pop()
def _get_platform_specific_packages(package, output):
platform_regex = '(?<=' + package.replace('${platform}',
r'\${platform=).*(?=})')
pattern = re.compile(platform_regex)
match = pattern.search(output)
if match:
platforms = match.group(0).split(',')
return [package.replace('${platform}', platform) for platform in platforms]
return [package]
def _append_urls(packages, old_version, new_version):
package_line = '{package} old:{old} new:{new}'
packages_with_urls = []
for package in packages:
if '${platform}' in package:
packages_with_urls.append(package)
else:
packages_with_urls.append(
package_line.format(
old=CIPD_URL.format(package=package, version=old_version),
new=CIPD_URL.format(package=package, version=new_version),
package=package))
return packages_with_urls
def RunSteps(api, project, manifest, remote, import_in, packages,
packages_requiring_ref, debug_symbol_attribute,
debug_symbol_gcs_paths, lockfiles, dry_run, tag, ref, owners,
test_multipliers):
assert set(packages_requiring_ref).issubset(packages), (
'`packages_requiring_ref` must be a subset of `packages`')
with api.context(infra_steps=True):
if owners:
owners_step = api.step('owners', None)
owners_step.presentation.step_summary_text = ', '.join(owners)
api.jiri.init(use_lock_file=True)
api.jiri.import_manifest(manifest, remote, project)
api.jiri.update(run_hooks=False)
api.jiri.run_hooks()
project_dir = api.path['start_dir'].join(*project.split('/'))
with api.step.nest('find common tag'):
version = find_common_tag(api, packages, packages_requiring_ref, tag, ref)
with api.context(cwd=project_dir):
changes = api.jiri.edit_manifest(
import_in, packages=[(package, version) for package in packages])
if not changes['packages']:
api.step('manifest up-to-date; nothing to roll', None)
return
old_version = changes['packages'][0]['old_version']
exact_packages = set()
# Test data for jiri.resolve
package_test_data = [
('@Subdir prebuilt/tools/buildbucket\n' +
package[:package.index('${platform}')] +
'${platform=linux-amd64,mac-amd64} git_revision:aa2dae..')
for package in packages
if '${platform}' in package
]
test_data = '\n'.join(package_test_data)
# Update the lockfiles.
for lock_entry in lockfiles:
fields = lock_entry.split('=')
manifest = fields[0]
lock = fields[1]
resolve_output = api.jiri.resolve(
local_manifest=True,
output=lock,
manifests=[manifest],
step_test_data=test_data).stdout
for p in packages:
if '${platform}' in p:
platform_pkgs = _get_platform_specific_packages(p, resolve_output)
exact_packages = exact_packages.union(platform_pkgs)
else:
exact_packages.add(p)
exact_packages = sorted(exact_packages)
packages_with_urls = _append_urls(exact_packages, old_version, version)
multiply = ''
if test_multipliers:
multiply = '\nMULTIPLY: `%s`\n' % api.json.dumps(
test_multipliers, indent=2)
message = COMMIT_MESSAGE.format(
roller=api.buildbucket.builder_name.replace('-roller', ''),
packages='\n'.join(packages_with_urls),
old_version=old_version,
version=version,
builder=api.buildbucket.builder_name,
build_id=api.buildbucket_util.id,
multiply=multiply,
)
# Land the changes.
change = api.auto_roller.attempt_roll(
gerrit_project=project,
repo_dir=project_dir,
commit_message=message,
dry_run=dry_run,
)
rolled = change and change.success
# If roll succeeded, upload any debug symbols that were rolled.
if rolled and debug_symbol_gcs_paths:
with api.step.nest('fetch and upload debug symbols'):
debug_symbol_packages = []
with api.context(cwd=project_dir):
for package in packages:
package_def = api.jiri.read_manifest_element(
manifest=import_in, element_type='package', element_name=package)
attributes = package_def.get('attributes', '').split(',')
if debug_symbol_attribute in attributes:
debug_symbol_packages.append(package)
api.debug_symbols.fetch_and_upload(
packages=debug_symbol_packages,
version=version,
gcs_paths=debug_symbol_gcs_paths)
def GenTests(api):
default_packages = ['pkgA', 'pkgB', 'pkgC']
debug_symbol_packages = ['pkgX/debug', 'pkgY/debug']
platform_packages = ['pkgM/${platform}', 'pkgN/${platform}']
default_lockfiles = ['integration/flower=integration/jiri.lock']
default_properties = api.properties(
project='integration',
manifest='minimal',
remote='https://fuchsia.googlesource.com',
import_in='chromium/chromium',
packages=default_packages,
lockfiles=default_lockfiles,
owners=['nobody@google.com', 'noreply@google.com'],
)
debug_symbols_properties = api.properties(
project='integration',
manifest='minimal',
remote='https://fuchsia.googlesource.com',
import_in='chromium/chromium',
packages=default_packages + debug_symbol_packages,
debug_symbol_gcs_paths=[['foo-bucket', ''], ['bar-bucket', 'namespace']],
lockfiles=default_lockfiles,
)
platform_pkg_properties = api.properties(
project='integration',
manifest='minimal',
remote='https://fuchsia.googlesource.com',
import_in='fuchsia/prebuilts',
packages=platform_packages,
lockfiles=default_lockfiles,
tag='git_revision',
)
def cipd_describe(pkg, instance_id, tags, older=False, backtracked=False):
"""Mock a `cipd describe` call that fetches a package's tags.
Args:
pkg (str): The name of the package.
instance_id (str): The mock instance ID to return.
tags (seq(str)): The mocked tags to return (generally each tag starts
with "version:").
older (bool): Whether we're describing an older instance of this
package, after finding out its `ref` version is ahead of the `ref`
versions of other packages.
backtracked (bool): Whether this describe attempt happens after we've
already hit an out-of-date package and backtracked.
"""
suffix = ' (2)' if older and not backtracked else ''
nest_suffix = ' (2)' if backtracked else ''
return api.step_data(
'find common tag.{pkg}{nest_suffix}.cipd describe {pkg}{suffix}'.format(
pkg=pkg, nest_suffix=nest_suffix, suffix=suffix),
api.cipd.example_describe(
package_name=pkg, version=instance_id, test_data_tags=tags))
def cipd_search(pkg, tag, instances=None, backtracked=False):
nest_suffix = ' (2)' if backtracked else ''
return api.step_data(
'find common tag.{pkg}{nest_suffix}.cipd search {pkg} {tag}'.format(
pkg=pkg, nest_suffix=nest_suffix, tag=tag),
api.cipd.example_search(package_name=pkg, instances=instances),
)
def cipd_search_and_describe(pkg, instance_id, tag, backtracked=False):
return cipd_search(
pkg,
tag,
instances=[instance_id],
backtracked=backtracked,
) + cipd_describe(
pkg,
instance_id,
[tag],
older=True,
backtracked=backtracked,
)
# Use this to assert that no commit is made, and thus that no roll CL is
# created.
def assert_no_roll():
return api.post_process(DoesNotRunRE, 'commit')
# yapf: disable
yield (
api.status_check.test('default_with_multipliers')
+ default_properties
+ api.properties(test_multipliers=[
{'name': 'test1', 'total_runs': 5},
])
+ cipd_describe('pkgA', instance_id='A2', tags=['version:2'])
+ cipd_describe('pkgB', instance_id='B2', tags=['version:2'])
+ cipd_describe('pkgC', instance_id='C2', tags=['version:2', 'version:1'])
+ api.auto_roller.success_step_data()
+ api.buildbucket.ci_build(builder='chromium-roller')
)
yield (
api.status_check.test('last_package_out_of_date')
+ default_properties
# These two packages share a version tag; all good so far.
+ cipd_describe('pkgA', instance_id='102', tags=['version:2'])
+ cipd_describe('pkgB', instance_id='202', tags=['version:2'])
# But pkgC's latest instance doesn't share a tag with the other two
# packages' latest instances.
+ cipd_describe('pkgC', instance_id='301', tags=['version:1'])
# So we look search for versions of pkgC that *do* share a tag with the
# other two packages' latest instances (but there are none).
+ cipd_search('pkgC', tag='version:2', instances=[])
# So instead, we'll go back and see if the previous packages have
# instances that correspond to the pkgC's package's latest version.
+ cipd_search_and_describe('pkgA', tag='version:1', instance_id='101', backtracked=True)
+ cipd_search_and_describe('pkgB', tag='version:1', instance_id='201', backtracked=True)
# We succeed in finding such instances, so there should be a roll.
+ api.auto_roller.success_step_data()
+ api.buildbucket.ci_build(builder='tools-roller')
)
yield (
api.status_check.test('first_package_out_of_date')
+ default_properties
+ cipd_describe('pkgA', instance_id='A1', tags=['version:1'])
# No shared tag with previous package.
# So we look search for versions of this package that *do* share a tag
# with the first package's latest instance, and we find one.
+ cipd_describe('pkgB', instance_id='B2', tags=['version:2'])
+ cipd_search_and_describe('pkgB', tag='version:1', instance_id='B1')
# No shared tag with previous chosen packages.
# So we look search for versions of the webrunner package that *do* share
# a tag with the other two package's latest instances, and find one.
+ cipd_describe('pkgC', instance_id='C2', tags=['version:2'])
+ cipd_search_and_describe('pkgC', tag='version:1', instance_id='C1')
+ api.auto_roller.success_step_data()
+ api.buildbucket.ci_build(builder='tools-roller')
)
yield (
api.status_check.test('packages_requiring_ref')
+ default_properties
+ api.properties(packages_requiring_ref=['pkgA', 'pkgB'])
+ cipd_describe('pkgA', instance_id='A2', tags=['version:1'])
+ cipd_describe('pkgB', instance_id='B2', tags=['version:1'])
+ cipd_describe('pkgC', instance_id='C1', tags=['version:1'])
+ api.auto_roller.success_step_data()
+ api.buildbucket.ci_build(builder='tools-roller')
)
yield (
# If there aren't any tags shared by all `ref` instances of the
# `packages_requiring_ref` packages, then there's no valid tag that
# we can pin the package to, so the build should fail.
api.status_check.test('inconsistent_packages_requiring_ref', status='failure')
+ default_properties
+ api.properties(packages_requiring_ref=['pkgA', 'pkgB'])
+ cipd_describe('pkgA', instance_id='A2', tags=['version:1'])
+ cipd_describe('pkgB', instance_id='B2', tags=['version:2'])
+ assert_no_roll()
)
yield (
# The packages requiring `ref` share a tag, but the other package doesn't
# have any instance with that tag. So we should abort rather than
# backtracking, since pkgA and pkgB MUST be pinned to the `ref`
# instances.
api.status_check.test('cant_use_required_ref', status='failure')
+ assert_no_roll()
+ default_properties
+ api.properties(packages_requiring_ref=['pkgA', 'pkgB'])
+ cipd_describe('pkgA', instance_id='A2', tags=['version:2'])
+ cipd_describe('pkgB', instance_id='B2', tags=['version:2'])
+ cipd_describe('pkgC', instance_id='301', tags=['version:1'])
+ cipd_search('pkgC', tag='version:2', instances=[])
)
yield (
api.status_check.test('inconsistent_versions', status='failure')
+ assert_no_roll()
+ default_properties
# These two packages share a version tag; all good so far.
+ cipd_describe('pkgA', instance_id='A2', tags=['version:2'])
+ cipd_describe('pkgB', instance_id='B2', tags=['version:2'])
# But pkgC's latest instance doesn't share a tag with the other two
# packages' latest instances.
+ cipd_describe('pkgC', instance_id='C1', tags=['version:1'])
# So we look search for versions of pkgC that *do* share a tag with the
# other two packages' latest instances (but there are none).
+ cipd_search('pkgC', tag='version:2', instances=[])
# So we'll go back and see if the previous packages have instances that
# correspond to the pkgC's latest version. But this package doesn't have
# an instance that matches that version, which should trigger a failure.
+ cipd_search('pkgA', tag='version:1', instances=[], backtracked=True)
)
yield (
api.status_check.test('noop')
+ default_properties
+ cipd_describe('pkgA', instance_id='A1', tags=['version:1'])
+ cipd_describe('pkgB', instance_id='B1', tags=['version:1'])
+ cipd_describe('pkgC', instance_id='C1', tags=['version:1'])
+ api.step_data('jiri edit', api.json.output({'packages': []}))
)
yield (
api.status_check.test('default_with_platform')
+ platform_pkg_properties
+ cipd_describe('pkgM/${platform}', instance_id='M1', tags=['git_revision:a'])
+ cipd_describe('pkgN/${platform}', instance_id='N1', tags=['git_revision:a'])
+ api.auto_roller.success_step_data()
+ api.buildbucket.ci_build(builder='tools-roller')
)
yield (
api.status_check.test('default_platform_not_resolved')
+ platform_pkg_properties
+ cipd_describe('pkgM/${platform}', instance_id='M1', tags=['git_revision:a'])
+ cipd_describe('pkgN/${platform}', instance_id='N1', tags=['git_revision:a'])
+ api.step_data('jiri resolve', api.raw_io.stream_output(''))
+ api.auto_roller.success_step_data()
+ api.buildbucket.ci_build(builder='tools-roller')
)
def fetch_debug_symbols(pkg, attributes=None):
test_output = {'path': pkg}
if attributes:
test_output['attributes'] = attributes
return api.jiri.read_manifest_element(
api,
'chromium/chromium',
'package',
pkg,
test_output=test_output,
nesting='fetch and upload debug symbols',
)
yield (
api.status_check.test('with_debug_symbols')
+ debug_symbols_properties
+ cipd_describe('pkgA', instance_id='A2', tags=['version:2'])
+ cipd_describe('pkgB', instance_id='B2', tags=['version:2'])
+ cipd_describe('pkgC', instance_id='C2', tags=['version:2'])
+ cipd_describe('pkgX/debug', instance_id='X2', tags=['version:2'])
+ cipd_describe('pkgY/debug', instance_id='Y2', tags=['version:2'])
+ fetch_debug_symbols(
'pkgX/debug', attributes='debug-symbols,debug-symbols-amd64')
+ fetch_debug_symbols(
'pkgY/debug', attributes='debug-symbols,debug-symbols-amd64')
+ fetch_debug_symbols('pkgA')
+ fetch_debug_symbols('pkgB')
+ fetch_debug_symbols('pkgC')
+ api.auto_roller.success_step_data()
+ api.buildbucket.ci_build(builder='chromium-roller')
)
# yapf: enable