| # Copyright 2019 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for rolling CIPD prebuilts into Fuchsia.""" |
| |
| import re |
| |
| from recipe_engine.config import Dict, List |
| from recipe_engine.post_process import DoesNotRunRE |
| from recipe_engine.recipe_api import Property |
| |
| from RECIPE_MODULES.fuchsia.utils import memoize |
| |
| DEPS = [ |
| "fuchsia/auto_roller", |
| "fuchsia/buildbucket_util", |
| "fuchsia/debug_symbols", |
| "fuchsia/gerrit", |
| "fuchsia/jiri", |
| "fuchsia/status_check", |
| "recipe_engine/buildbucket", |
| "recipe_engine/cipd", |
| "recipe_engine/context", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/properties", |
| "recipe_engine/step", |
| ] |
| |
| PROPERTIES = { |
| "project": Property(kind=str, help="Jiri remote manifest project", default=None), |
| "checkout_manifest": Property( |
| kind=str, default="", help="Jiri manifest to checkout" |
| ), |
| "remote": Property(kind=str, help="Remote manifest repository"), |
| "packages_by_manifest": Property( |
| kind=Dict(), |
| default=None, |
| help=( |
| "Mapping from manifest path (relative to $project) to list of packages " |
| "that are pinned in that manifest and should be rolled." |
| ), |
| ), |
| "packages_requiring_ref": Property( |
| kind=List(str), |
| default=(), |
| help="The subset of packages that must have the specified ref", |
| ), |
| "debug_symbol_attribute": Property( |
| kind=str, |
| default="debug-symbols", |
| help="Jiri attribute to match debug symbol packages", |
| ), |
| "preroll_debug_symbol_gcs_buckets": Property( |
| kind=List(str), |
| default=(), |
| help="GCS buckets to upload debug symbols to prior to starting roll.", |
| ), |
| "postroll_debug_symbol_gcs_buckets": Property( |
| kind=List(str), |
| default=(), |
| help=( |
| "GCS buckets to upload debug symbols to after a successful roll. If " |
| "specified, preroll_debug_symbol_gcs_buckets is required." |
| ), |
| ), |
| "lockfiles": Property( |
| kind=List(str), |
| default=(), |
| help='The list of lockfiles to update in "${manifest}=${lockfile}" format', |
| ), |
| "create_unique_change_id": Property( |
| kind=bool, |
| default=False, |
| help="Whether to make roll Change-Id unique to the build.", |
| ), |
| "dry_run": Property( |
| kind=bool, |
| default=False, |
| help="Whether to dry-run the auto-roller (CQ+1 and abandon the change)", |
| ), |
| "force_submit": Property( |
| kind=bool, |
| default=False, |
| help="Whether to force-submit the change, bypassing CQ", |
| ), |
| "commit_divider": Property( |
| kind=str, |
| default="", |
| help="Line of text to divide the commit header and body from the footers", |
| ), |
| "tag": Property( |
| kind=str, |
| default="version", |
| help="A CIPD tag prefix common to all $packages where a common version can be extracted", |
| ), |
| "ref": Property( |
| kind=str, |
| default="latest", |
| help="A common CIPD ref to resolve when rolling a set of packages", |
| ), |
| "owners": Property( |
| kind=List(str), |
| default=(), |
| help=( |
| "The owners responsible for watching this roller " |
| '(example: "username@google.com").' |
| ), |
| ), |
| "test_multipliers": Property( |
| kind=List(dict), |
| default=None, |
| help=("A list of test multipliers to pass into the roll CL MULTIPLY footer."), |
| ), |
| "cl_notify_option": Property( |
| kind=str, |
| default=None, |
| help="Notify option when pushing roll CL.", |
| ), |
| } |
| |
| COMMIT_MESSAGE_TITLE = ( |
| """{prepend}[{type}] {type_descr} {roller} packages to {version}""" |
| ) |
| |
| COMMIT_MESSAGE_DO_NOT_SUBMIT = "DO NOT SUBMIT " |
| |
| COMMIT_MESSAGE = """ |
| |
| {packages} |
| |
| From: {old_version} |
| To: {version} |
| {multiply} |
| {divider} |
| """ |
| |
| CIPD_URL = "https://chrome-infra-packages.appspot.com/p/{package}/+/{version}" |
| |
| # cipd will return an error message matching this regex if the requested |
| # version (identified by either a ref or a tag) of the package doesn't exist. |
| MISSING_VERSION_REGEX = re.compile(r"no such (ref|tag)") |
| |
| # The CIPD CLI will substitute this string in a package name for the name of |
| # the platform that the CLI is running on. Some Jiri manifests use this feature |
| # to avoid requiring multiple manifest elements for packages that support |
| # multiple platforms. |
| CIPD_PLATFORM_MAGIC_STRING = "${platform}" |
| |
| |
| @memoize |
| def get_tags_from_cipd(api, cipd_pkg, tag_prefix, version): |
| """Returns all tags for a given CIPD package instance. |
| |
| Args: |
| api (RecipeApi): The recipe api to use. |
| cipd_pkg (str): The name of the CIPD package. |
| tag_prefix (str): The tag prefix to filter by. |
| version (str): The ref or a tag of the desired instance. |
| |
| Returns: |
| The package's tags that start with the given prefix, or an empty set if |
| the specified version wasn't found. |
| """ |
| try: |
| all_tags = api.cipd.describe(cipd_pkg, version).tags |
| except api.cipd.Error as e: |
| # If the tag/ref wasn't found, return an empty set. If another error |
| # occurred, propagate it. |
| match = MISSING_VERSION_REGEX.search(e.reason) |
| if not match: |
| raise |
| describe_step = api.step.active_result |
| describe_step.presentation.step_text = match.group() |
| # These errors are generally recoverable, so mark the step as passed so |
| # it doesn't clutter up the Milo UI unnecessarily. |
| describe_step.presentation.status = "SUCCESS" |
| return set() |
| |
| filtered_tags = [t for t in all_tags if t.tag.startswith(tag_prefix)] |
| # Sort tags so that we'll present the newest ones first. |
| filtered_tags.sort(key=lambda t: t.registered_ts, reverse=True) |
| |
| tag_names = [t.tag for t in filtered_tags] |
| |
| step_text_lines = tag_names |
| max_step_text_lines = 10 |
| if len(step_text_lines) > max_step_text_lines: |
| num_excluded = len(step_text_lines) - max_step_text_lines |
| step_text_lines = step_text_lines[:max_step_text_lines] |
| step_text_lines.append("(%d more)" % num_excluded) |
| api.step.active_result.presentation.step_text = "".join( |
| "\n" + line for line in step_text_lines |
| ) |
| |
| return set(tag_names) |
| |
| |
| def find_common_tags(api, packages, packages_requiring_ref, tag, ref): |
| """Returns the tags shared by the latest possible instances of all the |
| packages. |
| |
| The chosen instances of all of the packages listed in |
| `packages_requiring_ref` will have the given `ref` (or else an |
| `AssertionError` will be raised in the case where the `ref` instances of |
| the `packages_requiring_ref` don't have a shared tag). If |
| `packages_requiring_ref` is empty, then at least one (and generally most, |
| if not all) of the chosen instances will have the given `ref`. |
| |
| The only time that some of the chosen instances won't currently have that |
| ref is when some of the packages have been updated to a later version and |
| some haven't (e.g. if another roller that does the updating is running in |
| parallel with this recipe). In that case, we'll choose older instances of |
| the updated packages, whose versions correspond to the `ref` instances of |
| the non-updated packages. |
| |
| Args: |
| api (RecipeApi): The recipe API to use. |
| packages (seq(str)): The names of the packages to search. |
| packages_requiring_ref (seq(str)): The subset of packages that MUST be |
| pinned to the specified `ref`. |
| tag (str): The prefix to filter by when searching for a common tag. E.g., |
| if `tag` is "version", then the common tag returned will start with |
| "version". |
| ref (str): The CIPD ref that we should try to pin the packages to, e.g. |
| "latest". |
| |
| Returns: |
| A set of tags such that: |
| - Either: |
| - each of the packages in `packages_requiring_ref` has an instance |
| with the specified `ref` that has all those tags. |
| - OR if `packages_requiring_ref` is empty, at least one package |
| in `packages` has an instance with the specified `ref` that has |
| all those tags. |
| - AND every other package in `packages` has *some* instance with all |
| of those tags (although not necessarily the instance with the |
| specified `ref`). |
| |
| Conceptually, the packages can be considered "in sync" as long as |
| they're all pinned to one of these tags. |
| """ |
| # The set of packages that don't need to be pinned to `ref`. |
| # We use a list comprehension instead of set operations here to preserve the |
| # ordering of the packages, which makes writing tests much easier. |
| flexible_packages = [pkg for pkg in packages if pkg not in packages_requiring_ref] |
| |
| index = 0 |
| if packages_requiring_ref: |
| common_tags = set.intersection( |
| *( |
| get_tags_from_cipd(api, pkg, tag, ref) |
| for pkg in sorted(packages_requiring_ref) |
| ) |
| ) |
| if not common_tags: |
| raise api.step.StepFailure("Unable to find common tag to roll.") |
| else: |
| # Fetch the `ref` instance of the first package that actually has an |
| # instance with `ref`. We'll consider it to be the "representative" |
| # instance; we'll try to find a subset of its tags that are shared by the |
| # `ref` instances of all the other packages. |
| # Note that we'll have some duplicated `get_tags_from_cipd` calls in the |
| # main while loop below, which is ok since that function is memoized. |
| common_tags = set() |
| for pkg in flexible_packages: |
| common_tags = get_tags_from_cipd(api, pkg, tag, ref) |
| if common_tags: |
| break |
| if not common_tags: |
| raise api.step.StepFailure( |
| "none of the packages has an instance with the %r ref" % ref |
| ) |
| |
| found_outdated_package = False |
| while index < len(flexible_packages): |
| pkg = flexible_packages[index] |
| index += 1 |
| tags = get_tags_from_cipd(api, pkg, tag, ref) |
| intersection = common_tags.intersection(tags) |
| if intersection: |
| # This package's `ref` instance shares some tags with the previous |
| # packages' `ref` instances, so we still have some common tag |
| # candidates and can move on to the next package. |
| common_tags = intersection |
| continue |
| |
| # Else, the instance of `pkg` at `ref` does not share any tags with all |
| # the previous packages' fetched instances. We'll see if `pkg` has any |
| # (probably older) instance that has a shared tag with all the previous |
| # packages' chosen instances. |
| for common_tag in common_tags: |
| older_instance_tags = get_tags_from_cipd(api, pkg, tag, common_tag) |
| if older_instance_tags: |
| break |
| |
| if older_instance_tags: |
| # The representative package instance has not yet been updated to |
| # this package's `ref` version (or at least the representative |
| # hadn't been updated at the time that we fetched its tags), but |
| # there is an older instances of this package that *does* match the |
| # version of the representative instance, so we'll use that one |
| # instead instead. |
| # NOTE: There is a chance that `pkg` was actually |
| # out of date relative to the representative package, but got |
| # updated to the same version as the previous packages in the time |
| # between "cipd describe" and "cipd search", in which case the |
| # "older" instance will actually be newer than the original |
| # instance and we will end up with the same result as if `pkg` had |
| # already been up-to-date when we first "cipd described" it. |
| common_tags.intersection_update(older_instance_tags) |
| assert common_tags # should contain at least `common_tag` |
| elif not found_outdated_package and not packages_requiring_ref: |
| # This package has not yet been updated to the version of the |
| # representative package instance. Go back and start over with *this* |
| # package as the new representative *unless* we have some packages that |
| # must be pinned to `ref`, in which case we've already hit a dead end |
| # as it will be impossible to satisfy that requirement. |
| found_outdated_package = True |
| common_tags = tags |
| index = 0 |
| step = api.step("package out of date", None) |
| step.presentation.step_text = ( |
| "\nbacktracking to check older versions of previous packages" |
| ) |
| else: |
| # We should "backtrack" at most once (i.e., find a package whose `ref` |
| # instance version doesn't correspond to a version of all the other |
| # packages), or zero times if there are `packages_requiring_ref`. If it |
| # happens twice, there's probably something weird going on and we won't |
| # be able to reconcile the tags by continuing. |
| raise api.step.StepFailure("unable to find common tag to roll") |
| |
| return common_tags |
| |
| |
| def manifest_up_to_date(api, manifest, packages, candidate_versions): |
| """Determines whether every package in the manifest is pinned to one of |
| the candidate versions. |
| |
| Args: |
| manifest (str): The path to the jiri manifest where the packages are |
| pinned. |
| packages (seq of str): The names of the packages to check. |
| candidate_versions (set of str): Each package must be pinned to one |
| of these versions for it to be considered up-to-date. If any |
| package is pinned to a version that's *not* in this set, the |
| function will return False. |
| """ |
| for package in packages: |
| element = api.jiri.read_manifest_element( |
| manifest, |
| name="current version of %s" % package, |
| element_type="package", |
| element_name=package, |
| step_test_data=lambda: api.json.test_api.output_stream( |
| {"version": "version:0"} |
| ), |
| ) |
| current_version = element["version"] |
| api.step.active_result.presentation.step_text = current_version |
| if current_version not in candidate_versions: |
| return False |
| return True |
| |
| |
| def get_platform_specific_packages(api, manifest, package): |
| """Resolve the platform-specific versions of a package name. |
| |
| Uses jiri to determine the platform-specific versions that are included |
| in the manifest. |
| |
| For example: |
| - If the package doesn't have platform-specific versions: |
| |
| "pkgA" -> ["pkgA"] |
| |
| - If the manifest specifies that the package is supported on |
| mac-amd64 and linux-amd64: |
| |
| "pkgA/${platform}" -> ["pkgA/mac-amd64", "pkgA/linux-amd64"] |
| """ |
| if CIPD_PLATFORM_MAGIC_STRING not in package: |
| return [package] |
| package_def = api.jiri.read_manifest_element(manifest, "package", package) |
| platforms = [ |
| p.strip() for p in package_def.get("platforms", "").split(",") if p.strip() |
| ] |
| |
| # Jiri has default platforms that it uses for any platform-dependent |
| # package whose manifest element doesn't specify a `packages` field. So |
| # Jiri should always return a non-empty list of platforms as long as the |
| # package name contains `CIPD_PLATFORM_MAGIC_STRING`. This is just a safety |
| # check to ensure we exit early with a clear error message if that |
| # assumption is violated. |
| assert platforms, ( |
| "package %s is platform-dependent but its jiri manifest doesn't specify any " |
| "platforms" |
| ) % package |
| |
| return [ |
| package.replace(CIPD_PLATFORM_MAGIC_STRING, platform) for platform in platforms |
| ] |
| |
| |
| def append_urls(packages, old_version, new_version): |
| package_line = "{package} old:{old} new:{new}" |
| packages_with_urls = [] |
| for package in packages: |
| packages_with_urls.append( |
| package_line.format( |
| old=CIPD_URL.format(package=package, version=old_version), |
| new=CIPD_URL.format(package=package, version=new_version), |
| package=package, |
| ) |
| ) |
| return packages_with_urls |
| |
| |
| def generate_message( |
| builder_name, packages, old_version, version, build_id, multiply, divider, dry_run |
| ): |
| roller_string = builder_name.replace("-roller", "").replace("-dryrun", "") |
| |
| if dry_run: |
| message_title = COMMIT_MESSAGE_TITLE.format( |
| prepend=COMMIT_MESSAGE_DO_NOT_SUBMIT, |
| type="dryrun", |
| type_descr="Dry run", |
| roller=roller_string, |
| version=version, |
| ) |
| else: |
| message_title = COMMIT_MESSAGE_TITLE.format( |
| prepend="", |
| type="roll", |
| type_descr="Roll", |
| roller=roller_string, |
| version=version, |
| ) |
| |
| message_body = COMMIT_MESSAGE.format( |
| roller=roller_string, |
| packages=packages, |
| old_version=old_version, |
| version=version, |
| builder=builder_name, |
| build_id=build_id, |
| multiply=multiply, |
| divider=divider, |
| ) |
| |
| return "".join([message_title, message_body]) |
| |
| |
| def RunSteps( |
| api, |
| project, |
| checkout_manifest, |
| remote, |
| packages_by_manifest, |
| packages_requiring_ref, |
| debug_symbol_attribute, |
| preroll_debug_symbol_gcs_buckets, |
| postroll_debug_symbol_gcs_buckets, |
| lockfiles, |
| create_unique_change_id, |
| dry_run, |
| force_submit, |
| commit_divider, |
| tag, |
| ref, |
| owners, |
| test_multipliers, |
| cl_notify_option, |
| ): |
| if owners: |
| owners_step = api.step("owners", None) |
| owners_step.presentation.step_text = ", ".join(owners) |
| |
| with api.step.nest("checkout"), api.context(infra_steps=True): |
| api.jiri.init(use_lock_file=True) |
| api.jiri.import_manifest(checkout_manifest, remote, project) |
| api.jiri.update(run_hooks=False) |
| with api.context(cwd=api.path["start_dir"]): |
| api.jiri.run_hooks() |
| |
| project_dir = api.path["start_dir"].join(*project.split("/")) |
| |
| packages_requiring_ref = set(packages_requiring_ref) |
| |
| with api.step.nest("resolve package platforms"), api.context(cwd=project_dir): |
| unresolved_packages_by_manifest = packages_by_manifest |
| packages_by_manifest = {} |
| |
| for manifest, packages in unresolved_packages_by_manifest.items(): |
| manifest_resolved_packages = [] |
| for package in packages: |
| resolved_packages = get_platform_specific_packages( |
| api, manifest, package |
| ) |
| manifest_resolved_packages.extend(resolved_packages) |
| if package in packages_requiring_ref: |
| packages_requiring_ref.remove(package) |
| packages_requiring_ref.update(resolved_packages) |
| |
| packages_by_manifest[manifest] = manifest_resolved_packages |
| |
| all_packages = sorted( |
| p for packages in packages_by_manifest.values() for p in packages |
| ) |
| assert packages_requiring_ref.issubset( |
| all_packages |
| ), "`packages_requiring_ref` must be a subset of the specified packages" |
| |
| with api.step.nest("find common tags", status="last"): |
| candidate_versions = find_common_tags( |
| api, all_packages, packages_requiring_ref, tag, ref |
| ) |
| version = sorted(candidate_versions)[-1] |
| |
| with api.step.nest("edit manifests") as presentation, api.context(cwd=project_dir): |
| changed_packages = [] |
| # We have to use the non-platform-specific packages here because those |
| # are the names that are in the manifests. |
| for manifest, packages in unresolved_packages_by_manifest.iteritems(): |
| if manifest_up_to_date(api, manifest, packages, candidate_versions): |
| continue |
| changes = api.jiri.edit_manifest( |
| manifest, |
| packages=[(package, version) for package in packages], |
| name="jiri edit %s" % manifest, |
| ) |
| changed_packages.extend(changes["packages"]) |
| |
| if not changed_packages: |
| presentation.step_text = "manifest up-to-date; nothing to roll" |
| return api.auto_roller.nothing_to_roll() |
| |
| old_version = changed_packages[0]["old_version"] |
| |
| # Update the lockfiles. |
| for lock_entry in lockfiles: |
| fields = lock_entry.split("=") |
| manifest = fields[0] |
| lock = fields[1] |
| api.jiri.resolve( |
| local_manifest=True, |
| output=lock, |
| manifests=[manifest], |
| ) |
| |
| packages_with_urls = append_urls(sorted(all_packages), old_version, version) |
| |
| multiply = "" |
| if test_multipliers: |
| multiply = "\nMULTIPLY: `%s`\n" % api.json.dumps(test_multipliers, indent=2) |
| |
| message = generate_message( |
| builder_name=api.buildbucket.builder_name, |
| packages="\n".join(packages_with_urls), |
| old_version=old_version, |
| version=version, |
| build_id=api.buildbucket_util.id, |
| multiply=multiply, |
| divider=commit_divider, |
| dry_run=dry_run, |
| ) |
| |
| if preroll_debug_symbol_gcs_buckets: |
| with api.step.nest("preroll fetch and upload debug symbols"), api.context( |
| cwd=project_dir |
| ): |
| debug_symbol_packages = [] |
| # Determine which packages are debug symbol packages. |
| for manifest, packages in unresolved_packages_by_manifest.iteritems(): |
| for package in packages: |
| package_def = api.jiri.read_manifest_element( |
| manifest=manifest, |
| element_type="package", |
| element_name=package, |
| ) |
| attributes = package_def.get("attributes", "").split(",") |
| if debug_symbol_attribute in attributes: |
| debug_symbol_packages.append(package) |
| # Attempt to populate preroll GCS buckets with debug symbols. This |
| # step serves to check debug symbols for validity e.g. .debug_info |
| # sections are present, and to assist symbolization of stack traces |
| # from the packages under roll. |
| build_id_dirs = api.debug_symbols.fetch_and_upload( |
| packages=debug_symbol_packages, |
| version=version, |
| buckets=preroll_debug_symbol_gcs_buckets, |
| ) |
| |
| # Land the changes. |
| change = api.auto_roller.attempt_roll( |
| api.gerrit.host_from_remote_url(remote), |
| gerrit_project=project, |
| repo_dir=project_dir, |
| commit_message=message, |
| cl_notify_option=cl_notify_option, |
| create_unique_id=create_unique_change_id, |
| dry_run=dry_run, |
| force_submit=force_submit, |
| ) |
| rolled = change and change.success |
| |
| # If roll succeeded, upload any debug symbols that were rolled. |
| if rolled and postroll_debug_symbol_gcs_buckets: |
| with api.context(cwd=project_dir): |
| api.debug_symbols.upload( |
| step_name="postroll upload debug symbols", |
| build_id_dirs=build_id_dirs, |
| buckets=postroll_debug_symbol_gcs_buckets, |
| ) |
| |
| return api.auto_roller.raw_result( |
| change, success_text=(None if dry_run else "Rolled to %s" % version) |
| ) |
| |
| |
| def GenTests(api): |
| default_packages = ["pkgA", "pkgB", "pkgC"] |
| debug_symbol_packages = ["pkgX/debug/${platform}", "pkgY/debug"] |
| platform_packages = ["pkgM/${platform}", "pkgN/${platform}"] |
| default_lockfiles = ["integration/flower=integration/jiri.lock"] |
| |
| default_properties = api.properties( |
| project="integration", |
| checkout_manifest="minimal", |
| remote="https://fuchsia.googlesource.com", |
| packages_by_manifest={"chromium/chromium": default_packages}, |
| lockfiles=default_lockfiles, |
| owners=["nobody@google.com", "noreply@google.com"], |
| commit_divider="BEGIN_FOOTER", |
| ) |
| |
| multiple_manifest_properties = api.properties( |
| project="integration", |
| checkout_manifest="minimal", |
| remote="https://fuchsia.googlesource.com", |
| packages_by_manifest={ |
| "chromium/chromium": default_packages[:1], |
| "chromium/chromium-other": default_packages[1:], |
| }, |
| lockfiles=default_lockfiles, |
| owners=["nobody@google.com", "noreply@google.com"], |
| commit_divider="BEGIN_FOOTER", |
| ) |
| |
| debug_symbols_properties = api.properties( |
| project="integration", |
| checkout_manifest="minimal", |
| remote="https://fuchsia.googlesource.com", |
| packages_by_manifest={ |
| "chromium/chromium": default_packages + debug_symbol_packages |
| }, |
| preroll_debug_symbol_gcs_buckets=["foo-bucket", "bar-bucket"], |
| postroll_debug_symbol_gcs_buckets=["baz-bucket"], |
| lockfiles=default_lockfiles, |
| ) |
| |
| platform_pkg_properties = api.properties( |
| project="integration", |
| checkout_manifest="minimal", |
| remote="https://fuchsia.googlesource.com", |
| packages_by_manifest={"fuchsia/prebuilts": platform_packages}, |
| lockfiles=default_lockfiles, |
| tag="git_revision", |
| ) |
| |
| def cipd_describe( |
| pkg, instance_id, tags, older=False, backtracked=False, error=None |
| ): |
| """Mock a `cipd describe` call that fetches a package's tags. |
| |
| Args: |
| pkg (str): The name of the package. |
| instance_id (str): The mock instance ID to return. If None, the step |
| will be considered to have failed. |
| tags (seq(str)): The mocked tags to return (generally each tag starts |
| with "version:"). |
| older (bool): Whether we're describing an older instance of this |
| package, after finding out its `ref` version is ahead of the `ref` |
| versions of other packages. |
| backtracked (bool): Whether this describe attempt happens after we've |
| already hit an out-of-date package and backtracked. |
| error (str or None): If set, the mock data will be an error |
| response with this value in the "error" field. |
| """ |
| name = "find common tags.cipd describe {pkg}{suffix}".format( |
| pkg=pkg, suffix=" (2)" if backtracked or older else "" |
| ) |
| if error is not None: |
| return api.step_data(name, api.json.output({"error": error}), retcode=1) |
| return api.step_data( |
| name, |
| api.cipd.example_describe( |
| package_name=pkg, version=instance_id, test_data_tags=tags |
| ), |
| ) |
| |
| def failed_cipd_describe(pkg, error="no such ref", **kwargs): |
| return cipd_describe(pkg, instance_id=None, error=error, tags=[], **kwargs) |
| |
| def cipd_describe_tag(pkg, instance_id, tag): |
| return cipd_describe(pkg, instance_id, [tag], older=True, backtracked=True) |
| |
| def check_current_version(pkg, version): |
| return api.jiri.read_manifest_element( |
| element_name=pkg, |
| # read_manifest_element requires `manifest` and `element_type` |
| # arguments even though they're no-ops for test data. |
| manifest="", |
| element_type="package", |
| test_output={"version": version}, |
| step_name="edit manifests.current version of %s" % pkg, |
| ) |
| |
| def get_platforms(pkg, platforms): |
| return api.jiri.read_manifest_element( |
| element_name=pkg, |
| # read_manifest_element requires `manifest` and `element_type` |
| # arguments even though they're no-ops for test data. |
| manifest="", |
| element_type="package", |
| test_output={"name": pkg, "platforms": ",".join(platforms)}, |
| step_name="resolve package platforms.read manifest for %s" % pkg, |
| ) |
| |
| # Use this to assert that no commit is made, and thus that no roll CL is |
| # created. |
| def assert_no_roll(): |
| return api.post_process(DoesNotRunRE, r".*commit.*") |
| |
| yield ( |
| api.status_check.test("default_with_multipliers") |
| + default_properties |
| + api.properties(test_multipliers=[{"name": "test1", "total_runs": 5}]) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:2"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| + cipd_describe( |
| # Cover presentation of large sets of tags. |
| "pkgC", |
| instance_id="C2", |
| tags=["version:%d" % i for i in range(1, 20)], |
| ) |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="chromium-roller") |
| ) |
| |
| yield ( |
| api.status_check.test("one_manifest_up_to_date") |
| + multiple_manifest_properties |
| + api.properties(test_multipliers=[{"name": "test1", "total_runs": 5}]) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:2"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| + cipd_describe("pkgC", instance_id="C2", tags=["version:2", "version:1"]) |
| # pkgA is already up-to-date and is the only package in its manifest, |
| # so that manifest need not be updated. |
| + check_current_version("pkgA", "version:2") |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="chromium-roller") |
| ) |
| |
| yield ( |
| api.status_check.test("last_package_out_of_date") |
| + default_properties |
| # These two packages share a version tag; all good so far. |
| + cipd_describe("pkgA", instance_id="102", tags=["version:2"]) |
| + cipd_describe("pkgB", instance_id="202", tags=["version:2"]) |
| # But pkgC's latest instance doesn't share a tag with the other two |
| # packages' latest instances. |
| + cipd_describe("pkgC", instance_id="301", tags=["version:1"]) |
| # So we look search for versions of pkgC that *do* share a tag with the |
| # other two packages' latest instances (but there are none). |
| + failed_cipd_describe("pkgC", older=True) |
| # So instead, we'll go back and see if the previous packages have |
| # instances that correspond to the pkgC's package's latest version. |
| + cipd_describe_tag("pkgA", tag="version:1", instance_id="101") |
| + cipd_describe_tag("pkgB", tag="version:1", instance_id="201") |
| # We succeed in finding such instances, so there should be a roll. |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="tools-roller") |
| ) |
| |
| yield ( |
| api.status_check.test("first_package_out_of_date") |
| + default_properties |
| + cipd_describe("pkgA", instance_id="A1", tags=["version:1"]) |
| # No shared tag with previous package. |
| # So we look search for versions of this package that *do* share a tag |
| # with the first package's latest instance, and we find one. |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| + cipd_describe_tag("pkgB", tag="version:1", instance_id="B1") |
| # No shared tag with previous chosen packages. |
| # So we look search for versions of the webrunner package that *do* share |
| # a tag with the other two package's latest instances, and find one. |
| + cipd_describe("pkgC", instance_id="C2", tags=["version:2"]) |
| + cipd_describe_tag("pkgC", tag="version:1", instance_id="C1") |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="tools-roller") |
| ) |
| |
| yield ( |
| api.status_check.test("packages_requiring_ref") |
| + default_properties |
| + api.properties(packages_requiring_ref=["pkgA", "pkgB"]) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:1"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:1"]) |
| + cipd_describe("pkgC", instance_id="C1", tags=["version:1"]) |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="tools-roller") |
| ) |
| |
| yield ( |
| # If there aren't any tags shared by all `ref` instances of the |
| # `packages_requiring_ref` packages, then there's no valid tag that |
| # we can pin the package to, so the build should fail. |
| api.status_check.test("inconsistent_packages_requiring_ref", status="failure") |
| + default_properties |
| + api.properties(packages_requiring_ref=["pkgA", "pkgB"]) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:1"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| + assert_no_roll() |
| ) |
| |
| yield ( |
| # The packages requiring `ref` share a tag, but the other package doesn't |
| # have any instance with that tag. So we should abort rather than |
| # backtracking, since pkgA and pkgB MUST be pinned to the `ref` |
| # instances. |
| api.status_check.test("cant_use_required_ref", status="failure") |
| + assert_no_roll() |
| + default_properties |
| + api.properties(packages_requiring_ref=["pkgA", "pkgB"]) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:2"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| + cipd_describe("pkgC", instance_id="301", tags=["version:1"]) |
| + failed_cipd_describe("pkgC", older=True, error="no such tag") |
| ) |
| |
| yield ( |
| # If only one flexible package is missing the ref, we can just pin it to |
| # the same tag as the other packages that do have the ref. |
| api.status_check.test("package_missing_ref") |
| + default_properties |
| + api.properties(packages_requiring_ref=["pkgA", "pkgB"]) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:1"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:1"]) |
| + failed_cipd_describe("pkgC") |
| + cipd_describe_tag("pkgC", tag="version:1", instance_id="C1") |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="tools-roller") |
| ) |
| |
| yield ( |
| # If no package has the specified ref, then we have no starting instance |
| # to initialize our common tags. |
| api.status_check.test("no_package_has_ref", status="failure") |
| + default_properties |
| + failed_cipd_describe("pkgA") |
| + failed_cipd_describe("pkgB") |
| + failed_cipd_describe("pkgC") |
| + assert_no_roll() |
| + api.buildbucket.ci_build(builder="tools-roller") |
| ) |
| |
| yield ( |
| api.status_check.test("inconsistent_versions", status="failure") |
| + assert_no_roll() |
| + default_properties |
| # These two packages share a version tag; all good so far. |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:2"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| # But pkgC's latest instance doesn't share a tag with the other two |
| # packages' latest instances. |
| + cipd_describe("pkgC", instance_id="C1", tags=["version:1"]) |
| # So we look search for a version of pkgC that *does* share a tag with |
| # the other two packages' latest instances (but it doesn't exist). |
| + failed_cipd_describe("pkgC", older=True, error="no such tag") |
| # So we'll go back and see if the previous packages have instances that |
| # correspond to the pkgC's latest version. But this package doesn't have |
| # an instance that matches that version, which should trigger a failure. |
| + failed_cipd_describe("pkgA", backtracked=True) |
| ) |
| |
| yield ( |
| api.status_check.test("noop") |
| + default_properties |
| + cipd_describe("pkgA", instance_id="A1", tags=["version:1"]) |
| + cipd_describe("pkgB", instance_id="B1", tags=["version:1"]) |
| + cipd_describe("pkgC", instance_id="C1", tags=["version:1"]) |
| + api.step_data( |
| "edit manifests.jiri edit chromium/chromium", |
| api.json.output({"packages": []}), |
| ) |
| ) |
| |
| yield ( |
| api.status_check.test("default_with_platform") |
| + platform_pkg_properties |
| + get_platforms("pkgM/${platform}", ["mac-amd64", "linux-amd64"]) |
| + get_platforms("pkgN/${platform}", ["linux-amd64"]) |
| + cipd_describe( |
| "pkgM/mac-amd64", |
| instance_id="M1", |
| tags=["git_revision:a", "git_revision:b"], |
| ) |
| + cipd_describe("pkgM/linux-amd64", instance_id="M2", tags=["git_revision:a"]) |
| + cipd_describe("pkgN/linux-amd64", instance_id="N1", tags=["git_revision:a"]) |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="tools-roller") |
| ) |
| |
| def fetch_debug_symbols(pkg, attributes=None): |
| test_output = {"path": pkg} |
| if attributes: |
| test_output["attributes"] = attributes |
| return api.jiri.read_manifest_element( |
| "chromium/chromium", |
| "package", |
| pkg, |
| test_output=test_output, |
| nesting="preroll fetch and upload debug symbols", |
| ) |
| |
| yield ( |
| api.status_check.test("with_debug_symbols") |
| + debug_symbols_properties |
| + get_platforms("pkgX/debug/${platform}", ["linux-amd64"]) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:2"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| + cipd_describe("pkgC", instance_id="C2", tags=["version:2"]) |
| + cipd_describe("pkgX/debug/linux-amd64", instance_id="X2", tags=["version:2"]) |
| + cipd_describe("pkgY/debug", instance_id="Y2", tags=["version:2"]) |
| + fetch_debug_symbols( |
| "pkgX/debug/${platform}", attributes="debug-symbols,debug-symbols-amd64" |
| ) |
| + fetch_debug_symbols( |
| "pkgY/debug", attributes="debug-symbols,debug-symbols-amd64" |
| ) |
| + fetch_debug_symbols("pkgA") |
| + fetch_debug_symbols("pkgB") |
| + fetch_debug_symbols("pkgC") |
| + api.auto_roller.success() |
| + api.buildbucket.ci_build(builder="chromium-roller") |
| ) |
| |
| yield ( |
| api.status_check.test("dry_run") |
| + default_properties |
| + api.properties(dry_run=True) |
| + cipd_describe("pkgA", instance_id="A2", tags=["version:2"]) |
| + cipd_describe("pkgB", instance_id="B2", tags=["version:2"]) |
| + cipd_describe("pkgC", instance_id="C2", tags=["version:2", "version:1"]) |
| + api.auto_roller.dry_run_success() |
| + api.buildbucket.ci_build(builder="chromium-dryrun-roller") |
| ) |
| |
| yield ( |
| api.status_check.test("no_such_package", status="failure") |
| + default_properties |
| + failed_cipd_describe("pkgA", error="no such package") |
| # The roller should exit immediately if it encounters an unexpected error. |
| + api.post_process(DoesNotRunRE, "pkgB") |
| + api.post_process(DoesNotRunRE, "pkgC") |
| ) |