| # Copyright 2019 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for rolling CIPD prebuilts into Fuchsia.""" |
| |
| import functools |
| import os |
| import re |
| |
| from recipe_engine.config import List |
| from recipe_engine.recipe_api import Property |
| |
| from RECIPE_MODULES.fuchsia.utils.api import memoize |
| |
| DEPS = [ |
| 'fuchsia/auto_roller', |
| 'fuchsia/build', |
| 'fuchsia/buildbucket_util', |
| 'fuchsia/jiri', |
| 'fuchsia/status_check', |
| 'fuchsia/upload_debug_symbols', |
| 'recipe_engine/archive', |
| 'recipe_engine/buildbucket', |
| 'recipe_engine/cipd', |
| 'recipe_engine/context', |
| 'recipe_engine/file', |
| 'recipe_engine/json', |
| 'recipe_engine/path', |
| 'recipe_engine/properties', |
| 'recipe_engine/raw_io', |
| 'recipe_engine/step', |
| ] |
| |
| PROPERTIES = { |
| 'project': |
| Property(kind=str, help='Jiri remote manifest project', default=None), |
| 'manifest': |
| Property(kind=str, help='Jiri manifest to use'), |
| 'remote': |
| Property(kind=str, help='Remote manifest repository'), |
| 'import_in': |
| Property( |
| kind=str, help='Path to the manifest to edit relative to $project'), |
| 'packages': |
| Property( |
| kind=List(str), |
| help='The list of CIPD packages to update in $import_in'), |
| 'debug_symbol_attribute': |
| Property( |
| kind=str, |
| default='debug-symbols', |
| help='Jiri attribute to match debug symbol packages'), |
| 'debug_symbol_gcs_buckets': |
| Property( |
| kind=List(str), |
| default=(), |
| help='GCS buckets to upload debug symbols upon successful roll'), |
| 'lockfiles': |
| Property( |
| kind=List(str), |
| default=(), |
| help='The list of lockfiles to update in "${manifest}=${lockfile}" format' |
| ), |
| 'dry_run': |
| Property( |
| kind=bool, |
| default=False, |
| help='Whether to dry-run the auto-roller (CQ+1 and abandon the change)' |
| ), |
| 'tag': |
| Property( |
| kind=str, |
| default='version', |
| help='A CIPD tag prefix common to all $packages where a common version can be extracted' |
| ), |
| 'ref': |
| Property( |
| kind=str, |
| default='latest', |
| help='A common CIPD ref to resolve when rolling a set of packages'), |
| 'owners': |
| Property( |
| kind=List(str), |
| default=(), |
| help=('The owners responsible for watching this roller ' |
| '(example: "username@google.com").')), |
| 'test_multipliers': |
| Property( |
| kind=List(dict), |
| default=None, |
| help=('A list of test multipliers to pass into the roll CL ' |
| 'MULTIPLY footer.')), |
| } |
| |
| COMMIT_MESSAGE = """[roll] Roll {roller} CIPD packages: |
| |
| {packages} |
| |
| From: {old_version} |
| To: {version} |
| {multiply} |
| Cq-Cl-Tag: roller-builder:{builder} |
| Cq-Cl-Tag: roller-bid:{build_id} |
| CQ-Do-Not-Cancel-Tryjobs: true""" |
| |
| CIPD_URL = 'https://chrome-infra-packages.appspot.com/p/{package}/+/{version}' |
| |
| |
| @memoize |
| def get_tags_from_cipd(api, cipd_pkg, tag_prefix, ref): |
| """Returns all tags for a given CIPD package instance. |
| |
| Args: |
| api (RecipeApi): The recipe api to use. |
| cipd_pkg (str): The name of the CIPD package. |
| tag_prefix (str): The tag prefix to filter by. |
| ref (str): The ref of the desired instance. |
| |
| Returns: |
| The package's tags that start with the given prefix. |
| """ |
| cipd_tags = api.cipd.describe(cipd_pkg, ref).tags |
| return set(t.tag for t in cipd_tags if t.tag.startswith(tag_prefix)) |
| |
| |
| def find_common_tag(api, packages, tag, ref): |
| """Returns a common tag shared by some instance of all the packages. |
| |
| At least one (and generally most, if not all) of the chosen instances will |
| have the given `ref`. The only time that some of the instances won't |
| currently have that ref is when some of the packages have been updated to |
| a later version and some haven't (e.g. if another roller that does the |
| updating is running in parallel with this recipe). In that case, we'll use |
| instances of the updated packages whose versions correspond to the latest |
| instances of the non-updated packages. |
| |
| Args: |
| api (RecipeApi): The recipe API to use. |
| packages (seq(str)): The names of the packages to search. |
| tag (str): The prefix to filter by when searching for a common tag. E.g., |
| if `tag` is "version", then the common tag returned will start with |
| "version". |
| ref (str): The ref that should be shared by all (and at least one) of the |
| chosen packages instances, e.g. "latest". |
| |
| Returns: |
| The name of a tag that's shared by some instance of all the specified |
| packages. |
| """ |
| # Fetch the instance of the first package that has the `ref` ref, and |
| # consider it as the "representative" instance. We'll try to find a subset of |
| # its tags that are shared by the `ref` instances of all the other packages. |
| common_tags = get_tags_from_cipd(api, packages[0], tag, ref) |
| |
| found_outdated_package = False |
| i = 1 |
| while i < len(packages): |
| pkg = packages[i] |
| i += 1 |
| tags = get_tags_from_cipd(api, pkg, tag, ref) |
| intersection = common_tags.intersection(tags) |
| if intersection: |
| # This package's latest instance shares some tags with the previous |
| # packages' latest instances, so we still have some common tag candidates |
| # and can move on to the next package. |
| common_tags = intersection |
| continue |
| |
| # Else, the instance of `pkg` at `ref` does not share any tags with all the |
| # previous packages' fetched instances. We'll see if `pkg` has any |
| # (probably older) instance that has a shared tag with all the previous |
| # packages' chosen instances. |
| for common_tag in common_tags: |
| older_instances = api.cipd.search(pkg, common_tag) |
| if older_instances: |
| break |
| |
| if older_instances: |
| # The representative package instance has not yet been updated to this |
| # package's latest version (or at least the representative hadn't been |
| # updated at the time that we fetched its tags), but there are older |
| # instances of this package that *do* match the version of the |
| # representative instance, so we'll use one of those instead. |
| # NOTE: There is a chance that `pkg` was actually out of date relative to |
| # the representative package, but got updated to the same version as the |
| # previous packages in the time between "cipd describe" and "cipd |
| # search", in which case `older_instance` will actually be newer than the |
| # original instance and we will end up with the same result as if `pkg` |
| # had already been up-to-date when we first "cipd described" it. |
| older_instance = older_instances[0].instance_id |
| older_tags = get_tags_from_cipd(api, pkg, tag, older_instance) |
| common_tags.intersection_update(older_tags) |
| assert common_tags # should contain at least `common_tag` |
| elif not found_outdated_package: |
| # This package has not yet been updated to the version of the |
| # representative package instance. Go back and start over with *this* |
| # package as the new representative. |
| found_outdated_package = True |
| common_tags = tags |
| i = 0 |
| else: |
| # We should only "backtrack" once (i.e., find a package whose latest |
| # version tag doesn't correspond to a version of all the other packages). |
| # If it happens twice, there's probably something weird going on and we |
| # won't be able to reconcile the tags by continuing. |
| raise api.step.StepFailure('unable to find common tag to roll') |
| |
| # Choose one of the common tags (it doesn't matter which) as the version to |
| # pin all packages to. |
| return common_tags.pop() |
| |
| |
| def fetch_and_upload_debug_symbols(api, project, import_in, remote, project_dir, |
| packages, debug_symbol_attribute, |
| debug_symbol_gcs_buckets): |
| """ |
| Fetch debug symbol archives, unpack them, and upload debug symbols. |
| |
| Args: |
| project (str): Jiri remote manifest project. |
| import_in (str): Path to the edited manifest relative to $project |
| containing debug symbol packages. |
| remote (str): Remote manifest repository. |
| project_dir (Path): Project root path of $import_in. |
| packages (seq(str)): The list of CIPD packages updated in $import_in. |
| debug_symbol_attribute (str): Jiri attribute to match debug symbol packages. |
| debug_symbol_gcs_buckets (seq(str)): GCS buckets to upload debug symbols to. |
| """ |
| with api.context(infra_steps=True): |
| api.jiri.init( |
| use_lock_file=True, |
| attributes=(debug_symbol_attribute,), |
| ) |
| # Fetch debug symbol packages using locally edited manifest. |
| api.jiri.import_manifest( |
| manifest=import_in, |
| remote=remote, |
| name=project, |
| ) |
| api.jiri.fetch_packages(local_manifest=True) |
| |
| with api.step.nest('build'): |
| gn_results = api.build.gen( |
| checkout_root=api.path['start_dir'], |
| fuchsia_build_dir=api.path['start_dir'].join('out', 'default'), |
| target='x64', |
| build_type='debug', |
| product='products/bringup.gni', |
| # //bundles:infratools is necessary to build upload_debug_symbols. |
| packages=['//bundles:infratools'], |
| ) |
| |
| upload_debug_symbols_target = os.path.relpath( |
| str(gn_results.tool('upload_debug_symbols')), |
| str(gn_results.fuchsia_build_dir), |
| ) |
| api.build.ninja( |
| gn_results=gn_results, |
| build_zircon=False, |
| targets=[upload_debug_symbols_target], |
| ) |
| |
| build_id_dirs = [] |
| for package in packages: |
| # Find archives for each debug symbol package. |
| with api.context(cwd=project_dir): |
| package_def = api.jiri.read_manifest_element( |
| manifest=import_in, |
| element_type='package', |
| element_name=package, |
| ) |
| # Skip non debug symbol packages. |
| if debug_symbol_attribute not in package_def.get('attributes', ''): |
| continue |
| |
| package_path = api.path['start_dir'].join(package_def['path']) |
| archives = api.file.glob_paths( |
| name='find archives for %s' % package, |
| source=package_path, |
| pattern='**/*.tar.bz2', |
| test_data=(package_path.join('symbols.tar.bz2'),), |
| ) |
| |
| # Unpack archives into .build-id dirs. |
| for archive in archives: |
| # Extract API requires a unique, non-existent directory. |
| archive_basename = os.path.basename(api.path.abspath(archive)) |
| output_dir = api.path['start_dir'].join(package, archive_basename) |
| api.archive.extract( |
| step_name='extract %s' % archive, |
| archive_file=archive, |
| output=output_dir, |
| ) |
| build_id_dirs.append(output_dir) |
| |
| for debug_symbol_gcs_bucket in debug_symbol_gcs_buckets: |
| api.upload_debug_symbols( |
| step_name='upload debug symbols', |
| upload_debug_symbols_path=gn_results.tool('upload_debug_symbols'), |
| bucket=debug_symbol_gcs_bucket, |
| build_id_dirs=build_id_dirs, |
| ) |
| |
| |
| def _get_platform_specific_packages(package, output): |
| platform_regex = '(?<=' + package.replace('${platform}', |
| r'\${platform=).*(?=})') |
| pattern = re.compile(platform_regex) |
| match = pattern.search(output) |
| if match: |
| platforms = match.group(0).split(',') |
| return [package.replace('${platform}', platform) for platform in platforms] |
| return [package] |
| |
| |
| def _append_urls(packages, old_version, new_version): |
| package_line = '{package} old:{old} new:{new}' |
| packages_with_urls = [] |
| for package in packages: |
| if '${platform}' in package: |
| packages_with_urls.append(package) |
| else: |
| packages_with_urls.append( |
| package_line.format( |
| old=CIPD_URL.format(package=package, version=old_version), |
| new=CIPD_URL.format(package=package, version=new_version), |
| package=package)) |
| return packages_with_urls |
| |
| |
| def RunSteps(api, project, manifest, remote, import_in, packages, |
| debug_symbol_attribute, debug_symbol_gcs_buckets, lockfiles, |
| dry_run, tag, ref, owners, test_multipliers): |
| with api.context(infra_steps=True): |
| if owners: |
| owners_step = api.step('owners', None) |
| owners_step.presentation.step_summary_text = ', '.join(owners) |
| |
| api.jiri.init(use_lock_file=True) |
| api.jiri.import_manifest(manifest, remote, project) |
| api.jiri.update(run_hooks=False) |
| api.jiri.run_hooks() |
| |
| project_dir = api.path['start_dir'].join(*project.split('/')) |
| |
| with api.step.nest('find common tag'): |
| version = find_common_tag(api, packages, tag, ref) |
| |
| with api.context(cwd=project_dir): |
| changes = api.jiri.edit_manifest( |
| import_in, packages=[(package, version) for package in packages]) |
| |
| if not changes['packages']: |
| api.step('manifest up-to-date; nothing to roll', None) |
| return |
| |
| old_version = changes['packages'][0]['old_version'] |
| exact_packages = set() |
| |
| # Test data for jiri.resolve |
| package_test_data = [ |
| ('@Subdir prebuilt/tools/buildbucket\n' + |
| package[:package.index('${platform}')] + |
| '${platform=linux-amd64,mac-amd64} git_revision:aa2dae..') |
| for package in packages |
| if '${platform}' in package |
| ] |
| test_data = '\n'.join(package_test_data) |
| |
| # Update the lockfiles. |
| for lock_entry in lockfiles: |
| fields = lock_entry.split('=') |
| manifest = fields[0] |
| lock = fields[1] |
| resolve_output = api.jiri.resolve( |
| local_manifest=True, |
| output=lock, |
| manifests=[manifest], |
| step_test_data=test_data).stdout |
| for p in packages: |
| if '${platform}' in p: |
| platform_pkgs = _get_platform_specific_packages(p, resolve_output) |
| exact_packages = exact_packages.union(platform_pkgs) |
| else: |
| exact_packages.add(p) |
| |
| exact_packages = sorted(exact_packages) |
| packages_with_urls = _append_urls(exact_packages, old_version, version) |
| |
| multiply = '' |
| if test_multipliers: |
| multiply = '\nMULTIPLY: `%s`\n' % api.json.dumps( |
| test_multipliers, indent=2) |
| |
| message = COMMIT_MESSAGE.format( |
| roller=api.buildbucket.builder_name.replace('-roller', ''), |
| packages='\n'.join(packages_with_urls), |
| old_version=old_version, |
| version=version, |
| builder=api.buildbucket.builder_name, |
| build_id=api.buildbucket_util.id, |
| multiply=multiply, |
| ) |
| |
| # Land the changes. |
| rolled = api.auto_roller.attempt_roll( |
| gerrit_project=project, |
| repo_dir=project_dir, |
| commit_message=message, |
| dry_run=dry_run, |
| ) |
| |
| # If roll succeeded, upload any debug symbols that were rolled. |
| # TODO(fxb/37432): Upload debug symbols with artifactory. |
| if rolled and debug_symbol_gcs_buckets: |
| with api.step.nest('fetch and upload debug symbols'): |
| fetch_and_upload_debug_symbols( |
| api=api, |
| project=project, |
| import_in=import_in, |
| remote=remote, |
| project_dir=project_dir, |
| packages=packages, |
| debug_symbol_attribute=debug_symbol_attribute, |
| debug_symbol_gcs_buckets=debug_symbol_gcs_buckets, |
| ) |
| |
| |
| def GenTests(api): |
| default_packages = ['pkgA', 'pkgB', 'pkgC'] |
| debug_symbol_packages = ['pkgX/debug', 'pkgY/debug'] |
| platform_packages = ['pkgM/${platform}', 'pkgN/${platform}'] |
| default_lockfiles = ['integration/flower=integration/jiri.lock'] |
| |
| default_properties = api.properties( |
| project='integration', |
| manifest='minimal', |
| remote='https://fuchsia.googlesource.com', |
| import_in='chromium/chromium', |
| packages=default_packages, |
| lockfiles=default_lockfiles, |
| owners=['nobody@google.com', 'noreply@google.com'], |
| ) |
| |
| debug_symbols_properties = api.properties( |
| project='integration', |
| manifest='minimal', |
| remote='https://fuchsia.googlesource.com', |
| import_in='chromium/chromium', |
| packages=default_packages + debug_symbol_packages, |
| debug_symbol_gcs_buckets=('foo-bucket', 'bar-bucket'), |
| lockfiles=default_lockfiles, |
| ) |
| |
| platform_pkg_properties = api.properties( |
| project='integration', |
| manifest='minimal', |
| remote='https://fuchsia.googlesource.com', |
| import_in='fuchsia/prebuilts', |
| packages=platform_packages, |
| lockfiles=default_lockfiles, |
| tag='git_revision', |
| ) |
| |
| def cipd_describe(pkg, version, tags, second=False): |
| suffix = ' (2)' if second else '' |
| return api.step_data( |
| 'find common tag.cipd describe %s%s' % (pkg, suffix), |
| api.cipd.example_describe( |
| package_name=pkg, version=version, test_data_tags=tags)) |
| |
| def cipd_search(pkg, tag, instances=None): |
| return api.step_data( |
| 'find common tag.cipd search %s %s' % (pkg, tag), |
| api.cipd.example_search(package_name=pkg, instances=instances), |
| ) |
| |
| def cipd_search_and_describe(pkg, version, tag): |
| return (cipd_search(pkg, tag, instances=[version]) + |
| cipd_describe(pkg, version, [tag], second=True)) |
| |
| # yapf: disable |
| yield ( |
| api.status_check.test('default_with_multipliers') |
| + default_properties |
| + api.properties(test_multipliers=[ |
| {'name': 'test1', 'total_runs': 5}, |
| ]) |
| + cipd_describe('pkgA', version='A2', tags=['version:2']) |
| + cipd_describe('pkgB', version='B2', tags=['version:2']) |
| + cipd_describe('pkgC', version='C2', tags=['version:2', 'version:1']) |
| + api.auto_roller.success_step_data() |
| + api.buildbucket.ci_build(builder='chromium-roller') |
| ) |
| |
| yield ( |
| api.status_check.test('last_package_out_of_date') |
| + default_properties |
| |
| # These two packages share a version tag; all good so far. |
| + cipd_describe('pkgA', version='102', tags=['version:2']) |
| + cipd_describe('pkgB', version='202', tags=['version:2']) |
| |
| # But pkgC's latest instance doesn't share a tag with the other two |
| # packages' latest instances. |
| + cipd_describe('pkgC', version='301', tags=['version:1']) |
| |
| # So we look search for versions of the webrunner package that *do* share |
| # a tag with the other two package's latest instances (but there are |
| # none). |
| + cipd_search('pkgC', tag='version:2', instances=[]) |
| |
| # So we'll go back and see if the previous packages have instances that |
| # correspond to the pkgC's package's latest version. |
| + cipd_search_and_describe('pkgA', tag='version:1', version='101') |
| + cipd_search_and_describe('pkgB', tag='version:1', version='201') |
| |
| # We succeed in finding such instances, so there should be a roll. |
| + api.auto_roller.success_step_data() |
| + api.buildbucket.ci_build(builder='tools-roller') |
| ) |
| |
| yield ( |
| api.status_check.test('first_package_out_of_date') |
| + default_properties |
| + cipd_describe('pkgA', version='A1', tags=['version:1']) |
| |
| # No shared tag with previous package. |
| # So we look search for versions of this package that *do* share a tag |
| # with the first package's latest instance, and we find one. |
| + cipd_describe('pkgB', version='B2', tags=['version:2']) |
| + cipd_search_and_describe('pkgB', tag='version:1', version='B1') |
| |
| # No shared tag with previous chosen packages. |
| # So we look search for versions of the webrunner package that *do* share |
| # a tag with the other two package's latest instances, and find one. |
| + cipd_describe('pkgC', version='C2', tags=['version:2']) |
| + cipd_search_and_describe('pkgC', tag='version:1', version='C1') |
| |
| + api.auto_roller.success_step_data() |
| + api.buildbucket.ci_build(builder='tools-roller') |
| ) |
| |
| yield ( |
| api.status_check.test('inconsistent_versions', status='failure') |
| + default_properties |
| |
| # These two packages share a version tag; all good so far. |
| + cipd_describe('pkgA', version='A2', tags=['version:2']) |
| + cipd_describe('pkgB', version='B2', tags=['version:2']) |
| |
| # But pkgC's latest instance doesn't share a tag with the other two |
| # packages' latest instances. |
| + cipd_describe('pkgC', version='C1', tags=['version:1']) |
| |
| # So we look search for versions of the webrunner package that *do* share |
| # a tag with the other two package's latest instances (but there are |
| # none). |
| + cipd_search('pkgC', tag='version:2', instances=[]) |
| |
| # So we'll go back and see if the previous packages have instances that |
| # correspond to the pkgC's latest version. But this package |
| # doesn't have an instance that matches that version, which should |
| # trigger a failure. |
| + cipd_search('pkgA', tag='version:1', instances=[]) |
| ) |
| |
| yield ( |
| api.status_check.test('noop') |
| + default_properties |
| + cipd_describe('pkgA', version='A1', tags=['version:1']) |
| + cipd_describe('pkgB', version='B1', tags=['version:1']) |
| + cipd_describe('pkgC', version='C1', tags=['version:1']) |
| + api.step_data('jiri edit', api.json.output({'packages': []})) |
| ) |
| |
| yield ( |
| api.status_check.test('default_with_platform') |
| + platform_pkg_properties |
| + cipd_describe('pkgM/${platform}', version='M1', tags=['git_revision:a']) |
| + cipd_describe('pkgN/${platform}', version='N1', tags=['git_revision:a']) |
| + api.auto_roller.success_step_data() |
| + api.buildbucket.ci_build(builder='tools-roller') |
| ) |
| |
| yield ( |
| api.status_check.test('default_platform_not_resolved') |
| + platform_pkg_properties |
| + cipd_describe('pkgM/${platform}', version='M1', tags=['git_revision:a']) |
| + cipd_describe('pkgN/${platform}', version='N1', tags=['git_revision:a']) |
| + api.step_data('jiri resolve', api.raw_io.stream_output('')) |
| + api.auto_roller.success_step_data() |
| + api.buildbucket.ci_build(builder='tools-roller') |
| ) |
| |
| def fetch_debug_symbols(pkg, attributes=None): |
| test_output = {'path': pkg} |
| if attributes: |
| test_output['attributes'] = attributes |
| return api.jiri.read_manifest_element( |
| api, |
| 'chromium/chromium', |
| 'package', |
| pkg, |
| test_output=test_output, |
| nesting='fetch and upload debug symbols', |
| ) |
| |
| yield ( |
| api.status_check.test('with_debug_symbols') |
| + debug_symbols_properties |
| + cipd_describe('pkgA', version='A2', tags=['version:2']) |
| + cipd_describe('pkgB', version='B2', tags=['version:2']) |
| + cipd_describe('pkgC', version='C2', tags=['version:2']) |
| + cipd_describe('pkgX/debug', version='X2', tags=['version:2']) |
| + cipd_describe('pkgY/debug', version='Y2', tags=['version:2']) |
| + fetch_debug_symbols( |
| 'pkgX/debug', attributes='debug-symbols,debug-symbols-amd64') |
| + fetch_debug_symbols( |
| 'pkgY/debug', attributes='debug-symbols,debug-symbols-amd64') |
| + fetch_debug_symbols('pkgA') |
| + fetch_debug_symbols('pkgB') |
| + fetch_debug_symbols('pkgC') |
| + api.auto_roller.success_step_data() |
| + api.buildbucket.ci_build(builder='chromium-roller') |
| ) |
| # yapf: enable |