blob: 9e66e667905192a6792f099bc641f73174cf4a1a [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for rolling CIPD prebuilts into Fuchsia."""
import re
from google.protobuf import json_format as jsonpb
from recipe_engine.post_process import DoesNotRunRE
from PB.go.chromium.org.luci.buildbucket.proto import build as build_pb2
from PB.recipes.fuchsia.fuchsia_cipd_roller import InputProperties
from RECIPE_MODULES.fuchsia.utils import memoize
PYTHON_VERSION_COMPATIBILITY = "PY3"
DEPS = [
"fuchsia/auto_roller",
"fuchsia/buildbucket_util",
"fuchsia/checkout",
"fuchsia/debug_symbols",
"fuchsia/gerrit",
"fuchsia/jiri",
"fuchsia/utils",
"recipe_engine/buildbucket",
"recipe_engine/cipd",
"recipe_engine/context",
"recipe_engine/json",
"recipe_engine/path",
"recipe_engine/properties",
"recipe_engine/step",
"recipe_engine/time",
]
PROPERTIES = InputProperties
COMMIT_MESSAGE_TITLE = (
"""{prepend}[{type}] {type_descr} {roller} packages to {version}"""
)
COMMIT_MESSAGE_DO_NOT_SUBMIT = "DO NOT SUBMIT "
COMMIT_MESSAGE = """
{packages}
From: {old_version}
To: {version}
{multiply}
{divider}
"""
CIPD_URL = "https://chrome-infra-packages.appspot.com/p/{package}/+/{version}"
# cipd will return an error message matching this regex if the requested
# version (identified by either a ref or a tag) of the package doesn't exist.
MISSING_VERSION_REGEX = re.compile(r"no such (ref|tag)")
# The CIPD CLI will substitute this string in a package name for the name of
# the platform that the CLI is running on. Some Jiri manifests use this feature
# to avoid requiring multiple manifest elements for packages that support
# multiple platforms.
CIPD_PLATFORM_MAGIC_STRING = "${platform}"
@memoize
def get_tags_from_cipd(api, cipd_pkg, tag_prefix, version):
"""Returns all tags for a given CIPD package instance.
Args:
api (RecipeApi): The recipe api to use.
cipd_pkg (str): The name of the CIPD package.
tag_prefix (str): The tag prefix to filter by.
version (str): The ref or a tag of the desired instance.
Returns:
The package's tags that start with the given prefix, or an empty set if
the specified version wasn't found.
"""
try:
all_tags = api.cipd.describe(cipd_pkg, version).tags
except api.cipd.Error as e:
# If the tag/ref wasn't found, return an empty set. If another error
# occurred, propagate it.
match = MISSING_VERSION_REGEX.search(e.reason)
if not match:
raise
describe_step = api.step.active_result
describe_step.presentation.step_text = match.group()
# These errors are generally recoverable, so mark the step as passed so
# it doesn't clutter up the Milo UI unnecessarily.
describe_step.presentation.status = "SUCCESS"
return set()
filtered_tags = [t for t in all_tags if t.tag.startswith(tag_prefix)]
# Sort tags so that we'll present the newest ones first.
filtered_tags.sort(key=lambda t: t.registered_ts, reverse=True)
tag_names = [t.tag for t in filtered_tags]
step_text_lines = tag_names
max_step_text_lines = 10
if len(step_text_lines) > max_step_text_lines:
num_excluded = len(step_text_lines) - max_step_text_lines
step_text_lines = step_text_lines[:max_step_text_lines]
step_text_lines.append("(%d more)" % num_excluded)
api.step.active_result.presentation.step_text = "".join(
"\n" + line for line in step_text_lines
)
return set(tag_names)
def find_common_tags(api, packages, packages_requiring_ref, tag, ref):
"""Returns the tags shared by the latest possible instances of all the
packages.
The chosen instances of all of the packages listed in
`packages_requiring_ref` will have the given `ref` (or else an
`AssertionError` will be raised in the case where the `ref` instances of
the `packages_requiring_ref` don't have a shared tag). If
`packages_requiring_ref` is empty, then at least one (and generally most,
if not all) of the chosen instances will have the given `ref`.
The only time that some of the chosen instances won't currently have that
ref is when some of the packages have been updated to a later version and
some haven't (e.g. if another roller that does the updating is running in
parallel with this recipe). In that case, we'll choose older instances of
the updated packages, whose versions correspond to the `ref` instances of
the non-updated packages.
Args:
api (RecipeApi): The recipe API to use.
packages (seq(str)): The names of the packages to search.
packages_requiring_ref (seq(str)): The subset of packages that MUST be
pinned to the specified `ref`.
tag (str): The prefix to filter by when searching for a common tag. E.g.,
if `tag` is "version", then the common tag returned will start with
"version".
ref (str): The CIPD ref that we should try to pin the packages to, e.g.
"latest".
Returns:
A set of tags such that:
- Either:
- each of the packages in `packages_requiring_ref` has an instance
with the specified `ref` that has all those tags.
- OR if `packages_requiring_ref` is empty, at least one package
in `packages` has an instance with the specified `ref` that has
all those tags.
- AND every other package in `packages` has *some* instance with all
of those tags (although not necessarily the instance with the
specified `ref`).
Conceptually, the packages can be considered "in sync" as long as
they're all pinned to one of these tags.
"""
# The set of packages that don't need to be pinned to `ref`.
# We use a list comprehension instead of set operations here to preserve the
# ordering of the packages, which makes writing tests much easier.
flexible_packages = [pkg for pkg in packages if pkg not in packages_requiring_ref]
index = 0
if packages_requiring_ref:
common_tags = set.intersection(
*(
get_tags_from_cipd(api, pkg, tag, ref)
for pkg in sorted(packages_requiring_ref)
)
)
if not common_tags:
raise api.step.StepFailure("Unable to find common tag to roll.")
else:
# Fetch the `ref` instance of the first package that actually has an
# instance with `ref`. We'll consider it to be the "representative"
# instance; we'll try to find a subset of its tags that are shared by the
# `ref` instances of all the other packages.
# Note that we'll have some duplicated `get_tags_from_cipd` calls in the
# main while loop below, which is ok since that function is memoized.
common_tags = set()
for pkg in flexible_packages:
common_tags = get_tags_from_cipd(api, pkg, tag, ref)
if common_tags:
break
if not common_tags:
raise api.step.StepFailure(
"none of the packages has an instance with the %r ref" % ref
)
found_outdated_package = False
while index < len(flexible_packages):
pkg = flexible_packages[index]
index += 1
tags = get_tags_from_cipd(api, pkg, tag, ref)
intersection = common_tags.intersection(tags)
if intersection:
# This package's `ref` instance shares some tags with the previous
# packages' `ref` instances, so we still have some common tag
# candidates and can move on to the next package.
common_tags = intersection
continue
# Else, the instance of `pkg` at `ref` does not share any tags with all
# the previous packages' fetched instances. We'll see if `pkg` has any
# (probably older) instance that has a shared tag with all the previous
# packages' chosen instances.
for common_tag in common_tags:
older_instance_tags = get_tags_from_cipd(api, pkg, tag, common_tag)
if older_instance_tags:
break
if older_instance_tags:
# The representative package instance has not yet been updated to
# this package's `ref` version (or at least the representative
# hadn't been updated at the time that we fetched its tags), but
# there is an older instances of this package that *does* match the
# version of the representative instance, so we'll use that one
# instead instead.
# NOTE: There is a chance that `pkg` was actually
# out of date relative to the representative package, but got
# updated to the same version as the previous packages in the time
# between "cipd describe" and "cipd search", in which case the
# "older" instance will actually be newer than the original
# instance and we will end up with the same result as if `pkg` had
# already been up-to-date when we first "cipd described" it.
common_tags.intersection_update(older_instance_tags)
assert common_tags # should contain at least `common_tag`
elif not found_outdated_package and not packages_requiring_ref:
# This package has not yet been updated to the version of the
# representative package instance. Go back and start over with *this*
# package as the new representative *unless* we have some packages that
# must be pinned to `ref`, in which case we've already hit a dead end
# as it will be impossible to satisfy that requirement.
found_outdated_package = True
common_tags = tags
index = 0
api.step.empty(
"package out of date",
step_text="\nbacktracking to check older versions of previous packages",
)
else:
# We should "backtrack" at most once (i.e., find a package whose `ref`
# instance version doesn't correspond to a version of all the other
# packages), or zero times if there are `packages_requiring_ref`. If it
# happens twice, there's probably something weird going on and we won't
# be able to reconcile the tags by continuing.
raise api.step.StepFailure("unable to find common tag to roll")
return common_tags
def manifest_up_to_date(api, manifest, packages, candidate_versions):
"""Determines whether every package in the manifest is pinned to one of
the candidate versions.
Args:
manifest (str): The path to the jiri manifest where the packages are
pinned.
packages (seq of str): The names of the packages to check.
candidate_versions (set of str): Each package must be pinned to one
of these versions for it to be considered up-to-date. If any
package is pinned to a version that's *not* in this set, the
function will return False.
"""
for package in packages:
element = api.jiri.read_manifest_element(
manifest,
name="current version of %s" % package,
element_type="package",
element_name=package,
step_test_data=lambda: api.json.test_api.output_stream(
{"version": "version:0"}
),
)
current_version = element["version"]
api.step.active_result.presentation.step_text = current_version
if current_version not in candidate_versions:
return False
return True
def check_packages_not_stale(api, manifest, packages, max_stale_days):
if max_stale_days <= 0:
return True
for package in packages:
element = api.jiri.read_manifest_element(
manifest,
name="current version of %s" % package,
element_type="package",
element_name=package,
step_test_data=lambda: api.json.test_api.output_stream(
{"version": "version:0"}
),
)
pkg_desc = api.cipd.describe(package, element["version"])
if api.time.time() - pkg_desc.registered_ts > max_stale_days * 24 * 60 * 60:
return False
return True
def get_platform_specific_packages(api, manifest, package):
"""Resolve the platform-specific versions of a package name.
Uses jiri to determine the platform-specific versions that are included
in the manifest.
For example:
- If the package doesn't have platform-specific versions:
"pkgA" -> ["pkgA"]
- If the manifest specifies that the package is supported on
mac-amd64 and linux-amd64:
"pkgA/${platform}" -> ["pkgA/mac-amd64", "pkgA/linux-amd64"]
"""
if CIPD_PLATFORM_MAGIC_STRING not in package:
return [package]
package_def = api.jiri.read_manifest_element(manifest, "package", package)
platforms = [
p.strip() for p in package_def.get("platforms", "").split(",") if p.strip()
]
# Jiri has default platforms that it uses for any platform-dependent
# package whose manifest element doesn't specify a `packages` field. So
# Jiri should always return a non-empty list of platforms as long as the
# package name contains `CIPD_PLATFORM_MAGIC_STRING`. This is just a safety
# check to ensure we exit early with a clear error message if that
# assumption is violated.
assert platforms, (
"package %s is platform-dependent but its jiri manifest doesn't specify any "
"platforms"
) % package
return [
package.replace(CIPD_PLATFORM_MAGIC_STRING, platform) for platform in platforms
]
def append_urls(packages, old_version, new_version):
package_line = "{package} old:{old} new:{new}"
packages_with_urls = []
for package in packages:
packages_with_urls.append(
package_line.format(
old=CIPD_URL.format(package=package, version=old_version),
new=CIPD_URL.format(package=package, version=new_version),
package=package,
)
)
return packages_with_urls
def generate_message(
builder_name, packages, old_version, version, build_id, multiply, divider, dry_run
):
roller_string = builder_name.replace("-roller", "").replace("-dryrun", "")
if dry_run:
message_title = COMMIT_MESSAGE_TITLE.format(
prepend=COMMIT_MESSAGE_DO_NOT_SUBMIT,
type="dryrun",
type_descr="Dry run",
roller=roller_string,
version=version,
)
else:
message_title = COMMIT_MESSAGE_TITLE.format(
prepend="",
type="roll",
type_descr="Roll",
roller=roller_string,
version=version,
)
message_body = COMMIT_MESSAGE.format(
roller=roller_string,
packages=packages,
old_version=old_version,
version=version,
builder=builder_name,
build_id=build_id,
multiply=multiply,
divider=divider,
)
return "".join([message_title, message_body])
def RunSteps(api, props):
props.debug_symbol_attribute = props.debug_symbol_attribute or "debug-symbols"
props.tag = props.tag or "version"
props.ref = props.ref or "latest"
if props.owners:
api.step.empty("owners", step_text=", ".join(props.owners))
checkout_dir = api.path["start_dir"]
api.checkout.with_options(
path=checkout_dir,
manifest=props.checkout_manifest,
remote=props.remote,
project=props.project,
# Ignore the build input; we should always check out the manifest
# repository at HEAD before updating the manifest to reduce the
# likelihood of merge conflicts.
build_input=build_pb2.Build.Input(),
use_lock_file=True,
)
with api.context(cwd=checkout_dir):
project_json = api.jiri.project(
projects=[props.project],
test_data=[{"path": str(checkout_dir.join(props.project))}],
).json.output[0]
project_dir = api.path.abs_to_path(project_json["path"])
packages_requiring_ref = set(props.packages_requiring_ref)
with api.step.nest("resolve package platforms"), api.context(cwd=project_dir):
unresolved_packages_by_manifest = props.packages_by_manifest
packages_by_manifest = {}
for manifest, packages in unresolved_packages_by_manifest.items():
manifest_resolved_packages = []
for package in packages:
resolved_packages = get_platform_specific_packages(
api, manifest, package
)
manifest_resolved_packages.extend(resolved_packages)
if package in packages_requiring_ref:
packages_requiring_ref.remove(package)
packages_requiring_ref.update(resolved_packages)
packages_by_manifest[manifest] = manifest_resolved_packages
all_packages = sorted(
p for packages in packages_by_manifest.values() for p in packages
)
assert packages_requiring_ref.issubset(
all_packages
), "`packages_requiring_ref` must be a subset of the specified packages"
with api.step.nest("find common tags", status="last"):
candidate_versions = find_common_tags(
api, all_packages, packages_requiring_ref, props.tag, props.ref
)
version = sorted(candidate_versions)[-1]
with api.step.nest("edit manifests") as presentation, api.context(cwd=project_dir):
changed_packages = []
# We have to use the non-platform-specific packages here because those
# are the names that are in the manifests.
for manifest, packages in sorted(unresolved_packages_by_manifest.items()):
if manifest_up_to_date(api, manifest, packages, candidate_versions):
if check_packages_not_stale(
api, manifest, packages, props.max_stale_days
):
continue
raise api.step.StepFailure(
"packages in manifest %s are stale; nothing to roll for over %d days"
% (manifest, props.max_stale_days)
)
changes = api.jiri.edit_manifest(
manifest,
packages=[(package, version) for package in packages],
name="jiri edit %s" % manifest,
)
changed_packages.extend(changes["packages"])
if not changed_packages:
presentation.step_text = "manifest up-to-date; nothing to roll"
return api.auto_roller.nothing_to_roll()
old_version = changed_packages[0]["old_version"]
# Update the lockfiles.
for lock_entry in props.lockfiles:
fields = lock_entry.split("=")
manifest = fields[0]
lock = fields[1]
api.jiri.resolve(
local_manifest=True,
output=lock,
manifests=[manifest],
)
packages_with_urls = append_urls(sorted(all_packages), old_version, version)
multiply = ""
if props.test_multipliers:
multiply = "\nMULTIPLY: `%s`\n" % api.json.dumps(
[
jsonpb.MessageToDict(m, preserving_proto_field_name=True)
for m in props.test_multipliers
],
indent=2,
)
message = generate_message(
builder_name=api.buildbucket.builder_name,
packages="\n".join(packages_with_urls),
old_version=old_version,
version=version,
build_id=api.buildbucket_util.id,
multiply=multiply,
divider=props.commit_divider,
dry_run=props.dry_run,
)
if props.preroll_debug_symbol_gcs_buckets:
with api.step.nest("preroll fetch and upload debug symbols"), api.context(
cwd=project_dir
):
debug_symbol_packages = []
# Determine which packages are debug symbol packages.
for manifest, packages in unresolved_packages_by_manifest.items():
for package in packages:
package_def = api.jiri.read_manifest_element(
manifest=manifest,
element_type="package",
element_name=package,
)
attributes = package_def.get("attributes", "").split(",")
if props.debug_symbol_attribute in attributes:
debug_symbol_packages.append(package)
# Attempt to populate preroll GCS buckets with debug symbols. This
# step serves to check debug symbols for validity e.g. .debug_info
# sections are present, and to assist symbolization of stack traces
# from the packages under roll.
build_id_dirs = api.debug_symbols.fetch_and_upload(
packages=debug_symbol_packages,
version=version,
buckets=props.preroll_debug_symbol_gcs_buckets,
)
# Land the changes.
change = api.auto_roller.attempt_roll(
api.gerrit.host_from_remote_url(props.remote),
gerrit_project=props.project,
repo_dir=project_dir,
commit_message=message,
cl_notify_option=props.cl_notify_option,
create_unique_id=props.create_unique_change_id,
dry_run=props.dry_run,
force_submit=props.force_submit,
roller_owners=props.owners,
include_tryjobs=props.include_tryjobs,
)
rolled = change and change.success
# If roll succeeded, upload any debug symbols that were rolled.
if rolled and props.postroll_debug_symbol_gcs_buckets:
with api.context(cwd=project_dir):
api.debug_symbols.upload(
step_name="postroll upload debug symbols",
build_id_dirs=build_id_dirs,
buckets=props.postroll_debug_symbol_gcs_buckets,
)
return api.auto_roller.raw_result(
change, success_text=(None if props.dry_run else "Rolled to %s" % version)
)
def GenTests(api):
default_packages = ["pkgA", "pkgB", "pkgC"]
def properties(**kwargs):
props = {
"project": "integration",
"checkout_manifest": "minimal",
"remote": "https://fuchsia.googlesource.com",
"packages_by_manifest": {"chromium/chromium": default_packages},
"lockfiles": ["integration/flower=integration/jiri.lock"],
"owners": ["nobody@google.com", "noreply@google.com"],
"commit_divider": "BEGIN_FOOTER",
}
props.update(kwargs)
return api.properties(**props)
def cipd_describe(
pkg, instance_id, tags, older=False, backtracked=False, error=None
):
"""Mock a `cipd describe` call that fetches a package's tags.
Args:
pkg (str): The name of the package.
instance_id (str): The mock instance ID to return. If None, the step
will be considered to have failed.
tags (seq(str)): The mocked tags to return (generally each tag starts
with "version:").
older (bool): Whether we're describing an older instance of this
package, after finding out its `ref` version is ahead of the `ref`
versions of other packages.
backtracked (bool): Whether this describe attempt happens after we've
already hit an out-of-date package and backtracked.
error (str or None): If set, the mock data will be an error
response with this value in the "error" field.
"""
name = "find common tags.cipd describe {pkg}{suffix}".format(
pkg=pkg, suffix=" (2)" if backtracked or older else ""
)
if error is not None:
return api.step_data(name, api.json.output({"error": error}), retcode=1)
return api.step_data(
name,
api.cipd.example_describe(
package_name=pkg, version=instance_id, test_data_tags=tags
),
)
def failed_cipd_describe(pkg, error="no such ref", **kwargs):
return cipd_describe(pkg, instance_id=None, error=error, tags=[], **kwargs)
def cipd_describe_tag(pkg, instance_id, tag):
return cipd_describe(pkg, instance_id, [tag], older=True, backtracked=True)
def check_current_version(pkg, version):
return api.jiri.read_manifest_element(
element_name=pkg,
# read_manifest_element requires `manifest` and `element_type`
# arguments even though they're no-ops for test data.
manifest="",
element_type="package",
test_output={"version": version},
step_name="edit manifests.current version of %s" % pkg,
)
def get_platforms(pkg, platforms):
return api.jiri.read_manifest_element(
element_name=pkg,
# read_manifest_element requires `manifest` and `element_type`
# arguments even though they're no-ops for test data.
manifest="",
element_type="package",
test_output={"name": pkg, "platforms": ",".join(platforms)},
step_name="resolve package platforms.read manifest for %s" % pkg,
)
# Use this to assert that no commit is made, and thus that no roll CL is
# created.
def assert_no_roll():
return api.post_process(DoesNotRunRE, r".*commit.*")
yield (
api.buildbucket_util.test("default_with_multipliers", builder="chromium-roller")
+ properties(test_multipliers=[{"name": "test1", "total_runs": 5}])
+ cipd_describe("pkgA", instance_id="A2", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
+ cipd_describe(
# Cover presentation of large sets of tags.
"pkgC",
instance_id="C2",
tags=["version:%d" % i for i in range(1, 20)],
)
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test("one_manifest_up_to_date", builder="chromium-roller")
+ properties(
packages_by_manifest={
"chromium/chromium": default_packages[:1],
"chromium/chromium-other": default_packages[1:],
},
test_multipliers=[{"name": "test1", "total_runs": 5}],
)
+ cipd_describe("pkgA", instance_id="A2", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
+ cipd_describe("pkgC", instance_id="C2", tags=["version:2", "version:1"])
# pkgA is already up-to-date and is the only package in its manifest,
# so that manifest need not be updated.
+ check_current_version("pkgA", "version:2")
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test(
"stale_packages", builder="chromium-roller", status="failure"
)
+ properties(
packages_by_manifest={
"chromium/chromium": default_packages[:1],
"chromium/chromium-other": default_packages[1:2],
},
max_stale_days=1,
)
+ cipd_describe("pkgA", instance_id="A2", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
# pkgA and pkgB are already up-to-date so the manifests don't need to
# be updated.
+ check_current_version("pkgA", "version:2")
+ check_current_version("pkgB", "version:2")
+ api.time.seed(1337000000)
# pkgA is not stale, so we move on to check pkgB.
+ api.step_data(
"edit manifests.cipd describe pkgA",
api.cipd.example_describe(
package_name="pkgA", version="version:2", tstamp=1337000000
),
)
# pkgB is stale so we return a failure.
+ api.step_data(
"edit manifests.cipd describe pkgB",
api.cipd.example_describe(
package_name="pkgB",
version="version:2",
tstamp=1337000000 - 24 * 60 * 60,
),
)
+ assert_no_roll()
)
yield (
api.buildbucket_util.test("last_package_out_of_date", builder="tools-roller")
+ properties()
# These two packages share a version tag; all good so far.
+ cipd_describe("pkgA", instance_id="102", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="202", tags=["version:2"])
# But pkgC's latest instance doesn't share a tag with the other two
# packages' latest instances.
+ cipd_describe("pkgC", instance_id="301", tags=["version:1"])
# So we look search for versions of pkgC that *do* share a tag with the
# other two packages' latest instances (but there are none).
+ failed_cipd_describe("pkgC", older=True)
# So instead, we'll go back and see if the previous packages have
# instances that correspond to the pkgC's package's latest version.
+ cipd_describe_tag("pkgA", tag="version:1", instance_id="101")
+ cipd_describe_tag("pkgB", tag="version:1", instance_id="201")
# We succeed in finding such instances, so there should be a roll.
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test("first_package_out_of_date", builder="tools-roller")
+ properties()
+ cipd_describe("pkgA", instance_id="A1", tags=["version:1"])
# No shared tag with previous package.
# So we look search for versions of this package that *do* share a tag
# with the first package's latest instance, and we find one.
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
+ cipd_describe_tag("pkgB", tag="version:1", instance_id="B1")
# No shared tag with previous chosen packages.
# So we look search for versions of the webrunner package that *do* share
# a tag with the other two package's latest instances, and find one.
+ cipd_describe("pkgC", instance_id="C2", tags=["version:2"])
+ cipd_describe_tag("pkgC", tag="version:1", instance_id="C1")
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test("packages_requiring_ref", builder="tools-roller")
+ properties(packages_requiring_ref=["pkgA", "pkgB"])
+ cipd_describe("pkgA", instance_id="A2", tags=["version:1"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:1"])
+ cipd_describe("pkgC", instance_id="C1", tags=["version:1"])
+ api.auto_roller.success()
)
yield (
# If there aren't any tags shared by all `ref` instances of the
# `packages_requiring_ref` packages, then there's no valid tag that
# we can pin the package to, so the build should fail.
api.buildbucket_util.test(
"inconsistent_packages_requiring_ref", status="failure"
)
+ properties(packages_requiring_ref=["pkgA", "pkgB"])
+ cipd_describe("pkgA", instance_id="A2", tags=["version:1"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
+ assert_no_roll()
)
yield (
# The packages requiring `ref` share a tag, but the other package doesn't
# have any instance with that tag. So we should abort rather than
# backtracking, since pkgA and pkgB MUST be pinned to the `ref`
# instances.
api.buildbucket_util.test("cant_use_required_ref", status="failure")
+ properties(packages_requiring_ref=["pkgA", "pkgB"])
+ cipd_describe("pkgA", instance_id="A2", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
+ cipd_describe("pkgC", instance_id="301", tags=["version:1"])
+ failed_cipd_describe("pkgC", older=True, error="no such tag")
+ assert_no_roll()
)
yield (
# If only one flexible package is missing the ref, we can just pin it to
# the same tag as the other packages that do have the ref.
api.buildbucket_util.test("package_missing_ref", builder="tools-roller")
+ properties(packages_requiring_ref=["pkgA", "pkgB"])
+ cipd_describe("pkgA", instance_id="A2", tags=["version:1"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:1"])
+ failed_cipd_describe("pkgC")
+ cipd_describe_tag("pkgC", tag="version:1", instance_id="C1")
+ api.auto_roller.success()
)
yield (
# If no package has the specified ref, then we have no starting instance
# to initialize our common tags.
api.buildbucket_util.test(
"no_package_has_ref", builder="tools-roller", status="failure"
)
+ properties()
+ failed_cipd_describe("pkgA")
+ failed_cipd_describe("pkgB")
+ failed_cipd_describe("pkgC")
+ assert_no_roll()
)
yield (
api.buildbucket_util.test("inconsistent_versions", status="failure")
+ properties()
# These two packages share a version tag; all good so far.
+ cipd_describe("pkgA", instance_id="A2", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
# But pkgC's latest instance doesn't share a tag with the other two
# packages' latest instances.
+ cipd_describe("pkgC", instance_id="C1", tags=["version:1"])
# So we look search for a version of pkgC that *does* share a tag with
# the other two packages' latest instances (but it doesn't exist).
+ failed_cipd_describe("pkgC", older=True, error="no such tag")
# So we'll go back and see if the previous packages have instances that
# correspond to the pkgC's latest version. But this package doesn't have
# an instance that matches that version, which should trigger a failure.
+ failed_cipd_describe("pkgA", backtracked=True)
+ assert_no_roll()
)
yield (
api.buildbucket_util.test("noop")
+ properties()
+ cipd_describe("pkgA", instance_id="A1", tags=["version:1"])
+ cipd_describe("pkgB", instance_id="B1", tags=["version:1"])
+ cipd_describe("pkgC", instance_id="C1", tags=["version:1"])
+ api.step_data(
"edit manifests.jiri edit chromium/chromium",
api.json.output({"packages": []}),
)
)
yield (
api.buildbucket_util.test("default_with_platform", builder="tools-roller")
+ properties(
packages_by_manifest={
"fuchsia/prebuilts": ["pkgM/${platform}", "pkgN/${platform}"]
},
tag="git_revision",
)
+ get_platforms("pkgM/${platform}", ["mac-amd64", "linux-amd64"])
+ get_platforms("pkgN/${platform}", ["linux-amd64"])
+ cipd_describe(
"pkgM/mac-amd64",
instance_id="M1",
tags=["git_revision:a", "git_revision:b"],
)
+ cipd_describe("pkgM/linux-amd64", instance_id="M2", tags=["git_revision:a"])
+ cipd_describe("pkgN/linux-amd64", instance_id="N1", tags=["git_revision:a"])
+ api.auto_roller.success()
)
def fetch_debug_symbols(pkg, attributes=None):
test_output = {"path": pkg}
if attributes:
test_output["attributes"] = attributes
return api.jiri.read_manifest_element(
"chromium/chromium",
"package",
pkg,
test_output=test_output,
nesting="preroll fetch and upload debug symbols",
)
yield (
api.buildbucket_util.test("with_debug_symbols", builder="chromium-roller")
+ properties(
packages_by_manifest={
"chromium/chromium": default_packages
+ ["pkgX/debug/${platform}", "pkgY/debug"]
},
preroll_debug_symbol_gcs_buckets=["foo-bucket", "bar-bucket"],
postroll_debug_symbol_gcs_buckets=["baz-bucket"],
)
+ get_platforms("pkgX/debug/${platform}", ["linux-amd64"])
+ cipd_describe("pkgA", instance_id="A2", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
+ cipd_describe("pkgC", instance_id="C2", tags=["version:2"])
+ cipd_describe("pkgX/debug/linux-amd64", instance_id="X2", tags=["version:2"])
+ cipd_describe("pkgY/debug", instance_id="Y2", tags=["version:2"])
+ fetch_debug_symbols(
"pkgX/debug/${platform}", attributes="debug-symbols,debug-symbols-amd64"
)
+ fetch_debug_symbols(
"pkgY/debug", attributes="debug-symbols,debug-symbols-amd64"
)
+ fetch_debug_symbols("pkgA")
+ fetch_debug_symbols("pkgB")
+ fetch_debug_symbols("pkgC")
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test("dry_run", builder="chromium-dryrun-roller")
+ properties(dry_run=True)
+ cipd_describe("pkgA", instance_id="A2", tags=["version:2"])
+ cipd_describe("pkgB", instance_id="B2", tags=["version:2"])
+ cipd_describe("pkgC", instance_id="C2", tags=["version:2", "version:1"])
+ api.auto_roller.dry_run_success()
)
yield (
api.buildbucket_util.test("no_such_package", status="failure")
+ properties()
+ failed_cipd_describe("pkgA", error="no such package")
# The roller should exit immediately if it encounters an unexpected error.
+ api.post_process(DoesNotRunRE, "pkgB")
+ api.post_process(DoesNotRunRE, "pkgC")
)