blob: a0d565cd476f2635b5cee0fcedffc6ff7d0276f9 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for rolling CIPD prebuilts into Fuchsia."""
from google.protobuf import json_format as jsonpb
from recipe_engine.post_process import DoesNotRunRE
from PB.go.chromium.org.luci.buildbucket.proto import build as build_pb2
from PB.recipes.fuchsia.fuchsia_cipd_roller import InputProperties
PYTHON_VERSION_COMPATIBILITY = "PY3"
DEPS = [
"fuchsia/auto_roller",
"fuchsia/buildbucket_util",
"fuchsia/checkout",
"fuchsia/cipd_resolver",
"fuchsia/debug_symbols",
"fuchsia/gerrit",
"fuchsia/jiri",
"recipe_engine/buildbucket",
"recipe_engine/cipd",
"recipe_engine/context",
"recipe_engine/json",
"recipe_engine/path",
"recipe_engine/properties",
"recipe_engine/step",
"recipe_engine/time",
]
PROPERTIES = InputProperties
COMMIT_MESSAGE_TITLE = (
"""{prepend}[{type}] {type_descr} {roller} packages to {version}"""
)
COMMIT_MESSAGE_DO_NOT_SUBMIT = "DO NOT SUBMIT "
COMMIT_MESSAGE = """
{packages}
From: {old_version}
To: {version}
{multiply}
{divider}
"""
CIPD_URL = "https://chrome-infra-packages.appspot.com/p/{package}/+/{version}"
# The CIPD CLI will substitute this string in a package name for the name of
# the platform that the CLI is running on. Some Jiri manifests use this feature
# to avoid requiring multiple manifest elements for packages that support
# multiple platforms.
CIPD_PLATFORM_MAGIC_STRING = "${platform}"
def manifest_up_to_date(api, manifest, packages, candidate_versions):
"""Determines whether every package in the manifest is pinned to one of
the candidate versions.
Args:
manifest (str): The path to the jiri manifest where the packages are
pinned.
packages (seq of str): The names of the packages to check.
candidate_versions (set of str): Each package must be pinned to one
of these versions for it to be considered up-to-date. If any
package is pinned to a version that's *not* in this set, the
function will return False.
"""
for package in packages:
element = api.jiri.read_manifest_element(
manifest,
name="current version of %s" % package,
element_type="package",
element_name=package,
step_test_data=lambda: api.json.test_api.output_stream(
{"version": "version:0"}
),
)
current_version = element["version"]
api.step.active_result.presentation.step_text = current_version
if current_version not in candidate_versions:
return False
return True
def check_packages_not_stale(api, manifest, packages, max_stale_days):
if max_stale_days <= 0:
return True
for package in packages:
element = api.jiri.read_manifest_element(
manifest,
name="current version of %s" % package,
element_type="package",
element_name=package,
step_test_data=lambda: api.json.test_api.output_stream(
{"version": "version:0"}
),
)
pkg_desc = api.cipd.describe(package, element["version"])
if api.time.time() - pkg_desc.registered_ts > max_stale_days * 24 * 60 * 60:
return False
return True
def get_platform_specific_packages(api, manifest, package):
"""Resolve the platform-specific versions of a package name.
Uses jiri to determine the platform-specific versions that are included
in the manifest.
For example:
- If the package doesn't have platform-specific versions:
"pkgA" -> ["pkgA"]
- If the manifest specifies that the package is supported on
mac-amd64 and linux-amd64:
"pkgA/${platform}" -> ["pkgA/mac-amd64", "pkgA/linux-amd64"]
"""
if CIPD_PLATFORM_MAGIC_STRING not in package:
return [package]
package_def = api.jiri.read_manifest_element(manifest, "package", package)
platforms = [
p.strip() for p in package_def.get("platforms", "").split(",") if p.strip()
]
# Jiri has default platforms that it uses for any platform-dependent
# package whose manifest element doesn't specify a `packages` field. So
# Jiri should always return a non-empty list of platforms as long as the
# package name contains `CIPD_PLATFORM_MAGIC_STRING`. This is just a safety
# check to ensure we exit early with a clear error message if that
# assumption is violated.
assert platforms, (
"package %s is platform-dependent but its jiri manifest doesn't specify any "
"platforms"
) % package
return [
package.replace(CIPD_PLATFORM_MAGIC_STRING, platform) for platform in platforms
]
def append_urls(packages, old_version, new_version):
package_line = "{package} old:{old} new:{new}"
packages_with_urls = []
for package in packages:
packages_with_urls.append(
package_line.format(
old=CIPD_URL.format(package=package, version=old_version),
new=CIPD_URL.format(package=package, version=new_version),
package=package,
)
)
return packages_with_urls
def generate_message(
builder_name, packages, old_version, version, build_id, multiply, divider, dry_run
):
roller_string = builder_name.replace("-roller", "").replace("-dryrun", "")
if dry_run:
message_title = COMMIT_MESSAGE_TITLE.format(
prepend=COMMIT_MESSAGE_DO_NOT_SUBMIT,
type="dryrun",
type_descr="Dry run",
roller=roller_string,
version=version,
)
else:
message_title = COMMIT_MESSAGE_TITLE.format(
prepend="",
type="roll",
type_descr="Roll",
roller=roller_string,
version=version,
)
message_body = COMMIT_MESSAGE.format(
roller=roller_string,
packages=packages,
old_version=old_version,
version=version,
builder=builder_name,
build_id=build_id,
multiply=multiply,
divider=divider,
)
return "".join([message_title, message_body])
def RunSteps(api, props):
props.debug_symbol_attribute = props.debug_symbol_attribute or "debug-symbols"
props.tag = props.tag or "version"
props.ref = props.ref or "latest"
if props.owners:
api.step.empty("owners", step_text=", ".join(props.owners))
checkout_dir = api.checkout.with_options(
manifest=props.checkout_manifest,
remote=props.remote,
project=props.project,
# Ignore the build input; we should always check out the manifest
# repository at HEAD before updating the manifest to reduce the
# likelihood of merge conflicts.
build_input=build_pb2.Build.Input(),
use_lock_file=True,
)
with api.context(cwd=checkout_dir):
project_json = api.jiri.project(
projects=[props.project],
test_data=[{"path": str(checkout_dir.join(props.project))}],
).json.output[0]
project_dir = api.path.abs_to_path(project_json["path"])
packages_requiring_ref = set(props.packages_requiring_ref)
with api.step.nest("resolve package platforms"), api.context(cwd=project_dir):
unresolved_packages_by_manifest = props.packages_by_manifest
packages_by_manifest = {}
for manifest, packages in unresolved_packages_by_manifest.items():
manifest_resolved_packages = []
for package in packages:
resolved_packages = get_platform_specific_packages(
api, manifest, package
)
manifest_resolved_packages.extend(resolved_packages)
if package in packages_requiring_ref:
packages_requiring_ref.remove(package)
packages_requiring_ref.update(resolved_packages)
packages_by_manifest[manifest] = manifest_resolved_packages
all_packages = sorted(
p for packages in packages_by_manifest.values() for p in packages
)
assert packages_requiring_ref.issubset(
all_packages
), "`packages_requiring_ref` must be a subset of the specified packages"
candidate_versions = api.cipd_resolver.resolve_common_tags(
ref=props.ref,
tag_name=props.tag,
packages=all_packages,
packages_requiring_ref=packages_requiring_ref,
)
if not candidate_versions:
raise api.step.StepFailure("Failed to resolve a tag to roll to.")
version = candidate_versions[0]
with api.step.nest("edit manifests") as presentation, api.context(cwd=project_dir):
changed_packages = []
# We have to use the non-platform-specific packages here because those
# are the names that are in the manifests.
for manifest, packages in sorted(unresolved_packages_by_manifest.items()):
if manifest_up_to_date(api, manifest, packages, candidate_versions):
if check_packages_not_stale(
api, manifest, packages, props.max_stale_days
):
continue
raise api.step.StepFailure(
"packages in manifest %s are stale; nothing to roll for over %d days"
% (manifest, props.max_stale_days)
)
changes = api.jiri.edit_manifest(
manifest,
packages=[(package, version) for package in packages],
name="jiri edit %s" % manifest,
)
changed_packages.extend(changes["packages"])
if not changed_packages:
presentation.step_text = "manifest up-to-date; nothing to roll"
return api.auto_roller.nothing_to_roll()
old_version = changed_packages[0]["old_version"]
# Update the lockfiles.
for lock_entry in props.lockfiles:
fields = lock_entry.split("=")
manifest = fields[0]
lock = fields[1]
api.jiri.resolve(
local_manifest=True,
output=lock,
manifests=[manifest],
)
packages_with_urls = append_urls(sorted(all_packages), old_version, version)
multiply = ""
if props.test_multipliers:
multiply = "\nMULTIPLY: `%s`\n" % api.json.dumps(
[
jsonpb.MessageToDict(m, preserving_proto_field_name=True)
for m in props.test_multipliers
],
indent=2,
)
message = generate_message(
builder_name=api.buildbucket.builder_name,
packages="\n".join(packages_with_urls),
old_version=old_version,
version=version,
build_id=api.buildbucket_util.id,
multiply=multiply,
divider=props.commit_divider,
dry_run=props.dry_run,
)
if props.preroll_debug_symbol_gcs_buckets:
with api.step.nest("preroll fetch and upload debug symbols"), api.context(
cwd=project_dir
):
debug_symbol_packages = []
# Determine which packages are debug symbol packages.
for manifest, packages in unresolved_packages_by_manifest.items():
for package in packages:
package_def = api.jiri.read_manifest_element(
manifest=manifest,
element_type="package",
element_name=package,
)
attributes = package_def.get("attributes", "").split(",")
if props.debug_symbol_attribute in attributes:
debug_symbol_packages.append(package)
# Attempt to populate preroll GCS buckets with debug symbols. This
# step serves to check debug symbols for validity e.g. .debug_info
# sections are present, and to assist symbolization of stack traces
# from the packages under roll.
build_id_dirs = api.debug_symbols.fetch_and_upload(
packages=debug_symbol_packages,
version=version,
buckets=props.preroll_debug_symbol_gcs_buckets,
)
# Land the changes.
change = api.auto_roller.attempt_roll(
api.gerrit.host_from_remote_url(props.remote),
gerrit_project=props.project,
repo_dir=project_dir,
commit_message=message,
cl_notify_option=props.cl_notify_option,
create_unique_id=props.create_unique_change_id,
dry_run=props.dry_run,
force_submit=props.force_submit,
roller_owners=props.owners,
include_tryjobs=props.include_tryjobs,
)
rolled = change and change.success
# If roll succeeded, upload any debug symbols that were rolled.
if rolled and props.postroll_debug_symbol_gcs_buckets:
with api.context(cwd=project_dir):
api.debug_symbols.upload(
step_name="postroll upload debug symbols",
build_id_dirs=build_id_dirs,
buckets=props.postroll_debug_symbol_gcs_buckets,
)
return api.auto_roller.raw_result(
change, success_text=(None if props.dry_run else "Rolled to %s" % version)
)
def GenTests(api):
default_packages = ["pkgA", "pkgB", "pkgC"]
def properties(**kwargs):
props = {
"project": "integration",
"checkout_manifest": "minimal",
"remote": "https://fuchsia.googlesource.com",
"packages_by_manifest": {"chromium/chromium": default_packages},
"lockfiles": ["integration/flower=integration/jiri.lock"],
"owners": ["nobody@google.com", "noreply@google.com"],
"commit_divider": "BEGIN_FOOTER",
}
props.update(kwargs)
return api.properties(**props)
def check_current_version(pkg, version):
return api.jiri.read_manifest_element(
element_name=pkg,
# read_manifest_element requires `manifest` and `element_type`
# arguments even though they're no-ops for test data.
manifest="",
element_type="package",
test_output={"version": version},
step_name="edit manifests.current version of %s" % pkg,
)
def get_platforms(pkg, platforms):
return api.jiri.read_manifest_element(
element_name=pkg,
# read_manifest_element requires `manifest` and `element_type`
# arguments even though they're no-ops for test data.
manifest="",
element_type="package",
test_output={"name": pkg, "platforms": ",".join(platforms)},
step_name="resolve package platforms.read manifest for %s" % pkg,
)
# Use this to assert that no commit is made, and thus that no roll CL is
# created.
def assert_no_roll():
return api.post_process(DoesNotRunRE, r".*commit.*")
def resolved_tags(tags):
return api.step_data("resolve common tags", api.json.output_stream(tags))
yield (
api.buildbucket_util.test("default_with_multipliers", builder="chromium-roller")
+ properties(
packages_requiring_ref=default_packages[:1],
test_multipliers=[{"name": "test1", "total_runs": 5}],
)
+ resolved_tags(["version:2", "version:3"])
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test("tag_resolution_failed", status="failure")
+ properties()
+ resolved_tags([])
+ assert_no_roll()
)
yield (
api.buildbucket_util.test("one_manifest_up_to_date", builder="chromium-roller")
+ properties(
packages_by_manifest={
"chromium/chromium": default_packages[:1],
"chromium/chromium-other": default_packages[1:],
},
test_multipliers=[{"name": "test1", "total_runs": 5}],
)
+ resolved_tags(["version:2"])
# pkgA is already up-to-date and is the only package in its manifest,
# so that manifest need not be updated.
+ check_current_version("pkgA", "version:2")
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test(
"stale_packages", builder="chromium-roller", status="failure"
)
+ properties(
packages_by_manifest={
"chromium/chromium": default_packages[:1],
"chromium/chromium-other": default_packages[1:2],
},
max_stale_days=1,
)
+ resolved_tags(["version:2"])
# pkgA and pkgB are already up-to-date so the manifests don't need to
# be updated.
+ check_current_version("pkgA", "version:2")
+ check_current_version("pkgB", "version:2")
+ api.time.seed(1337000000)
# pkgA is not stale, so we move on to check pkgB.
+ api.step_data(
"edit manifests.cipd describe pkgA",
api.cipd.example_describe(
package_name="pkgA", version="version:2", tstamp=1337000000
),
)
# pkgB is stale so we return a failure.
+ api.step_data(
"edit manifests.cipd describe pkgB",
api.cipd.example_describe(
package_name="pkgB",
version="version:2",
tstamp=1337000000 - 24 * 60 * 60,
),
)
+ assert_no_roll()
)
yield (
api.buildbucket_util.test("noop")
+ properties()
+ resolved_tags(["version:1"])
+ api.step_data(
"edit manifests.jiri edit chromium/chromium",
api.json.output({"packages": []}),
)
)
yield (
api.buildbucket_util.test("default_with_platform", builder="tools-roller")
+ properties(
packages_by_manifest={
"fuchsia/prebuilts": ["pkgM/${platform}", "pkgN/${platform}"]
},
tag="git_revision",
)
+ get_platforms("pkgM/${platform}", ["mac-amd64", "linux-amd64"])
+ get_platforms("pkgN/${platform}", ["linux-amd64"])
+ resolved_tags(["git_revision:a", "git_revision:b"])
+ api.auto_roller.success()
)
def fetch_debug_symbols(pkg, attributes=None):
test_output = {"path": pkg}
if attributes:
test_output["attributes"] = attributes
return api.jiri.read_manifest_element(
"chromium/chromium",
"package",
pkg,
test_output=test_output,
nesting="preroll fetch and upload debug symbols",
)
yield (
api.buildbucket_util.test("with_debug_symbols", builder="chromium-roller")
+ properties(
packages_by_manifest={
"chromium/chromium": default_packages
+ ["pkgX/debug/${platform}", "pkgY/debug"]
},
preroll_debug_symbol_gcs_buckets=["foo-bucket", "bar-bucket"],
postroll_debug_symbol_gcs_buckets=["baz-bucket"],
)
+ get_platforms("pkgX/debug/${platform}", ["linux-amd64"])
+ resolved_tags(["version:2"])
+ fetch_debug_symbols(
"pkgX/debug/${platform}", attributes="debug-symbols,debug-symbols-amd64"
)
+ fetch_debug_symbols(
"pkgY/debug", attributes="debug-symbols,debug-symbols-amd64"
)
+ fetch_debug_symbols("pkgA")
+ fetch_debug_symbols("pkgB")
+ fetch_debug_symbols("pkgC")
+ api.auto_roller.success()
)
yield (
api.buildbucket_util.test("dry_run", builder="chromium-dryrun-roller")
+ properties(dry_run=True)
+ resolved_tags(["version:2"])
+ api.auto_roller.dry_run_success()
)