| # Copyright 2018 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| import functools |
| |
| from recipe_engine import recipe_api |
| from urlparse import urlparse |
| |
| from . import patch |
| from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2 |
| |
| # Set as an output property and consumed by other recipes code and the results |
| # uploader code in google3. |
| # It's a monotonic integer that corresponds to integration revisions so we |
| # can stuff our results into systems that expect Piper changelist numbers. |
| REVISION_COUNT_PROPERTY = "integration-revision-count" |
| |
| # Set as an output property to be used in the coverage recipe to remap source |
| # file paths to a new checkout. |
| ROOT_DIR_PROPERTY = "checkout_root" |
| |
| # By default, skip patching GerritChanges which map to these projects. They are |
| # not valid projects to patch into standard Fuchsia checkouts. |
| SKIP_PATCH_PROJECTS = ("infra/recipes",) |
| |
| |
| class _CheckoutResults(object): |
| """Represents a Fuchsia source checkout.""" |
| |
| def __init__(self, api, root_dir, snapshot_file, release_version, source_info): |
| self._api = api |
| self._root_dir = root_dir |
| self._snapshot_file = snapshot_file |
| self._release_version = release_version |
| self.source_info = source_info |
| self._changed_files_cache = None |
| |
| @property |
| def root_dir(self): |
| """The path to the root directory of the jiri checkout.""" |
| return self._root_dir |
| |
| @property |
| def snapshot_file(self): |
| """The path to the jiri snapshot file.""" |
| return self._snapshot_file |
| |
| @property |
| def release_version(self): |
| """Release version of checkout if applicable, otherwise None.""" |
| return self._release_version |
| |
| def upload_source_manifest(self, gcs_bucket, namespace=None): |
| """Upload the jiri source manifest to GCS.""" |
| assert gcs_bucket |
| with self._api.context(cwd=self._root_dir): |
| source_manifest = self._api.jiri.source_manifest() |
| with self._api.step.nest("upload source manifest"): |
| local_manifest_path = self._api.path.mkstemp() |
| self._api.file.write_json( |
| "source_manifest", local_manifest_path, source_manifest |
| ) |
| self._api.upload.file_to_gcs( |
| source=local_manifest_path, |
| bucket=gcs_bucket, |
| subpath="source_manifest.json", |
| namespace=namespace, |
| ) |
| |
| def upload_results(self, gcs_bucket, namespace=None): |
| """Upload snapshot to a given GCS bucket.""" |
| assert gcs_bucket |
| with self._api.step.nest("upload checkout results") as presentation: |
| self._api.upload.file_to_gcs( |
| source=self.snapshot_file, |
| bucket=gcs_bucket, |
| subpath=self._api.path.basename(self.snapshot_file), |
| namespace=namespace, |
| ) |
| presentation.properties[REVISION_COUNT_PROPERTY] = int( |
| self._api.git( |
| "set %s property" % REVISION_COUNT_PROPERTY, |
| "-C", |
| self._root_dir.join("integration"), |
| "rev-list", |
| "--count", |
| "HEAD", |
| step_test_data=lambda: self._api.raw_io.test_api.stream_output("1"), |
| stdout=self._api.raw_io.output(), |
| ).stdout.strip() |
| ) |
| |
| def changed_files( |
| self, buildbucket_input, test_data=("foo.cc", "bar.cc"), **kwargs |
| ): |
| """Returns a list of absolute paths that were changed. |
| |
| Checks the git repo specified in buildbucket_input.gerrit_changes[0]. |
| |
| Args: |
| buildbucket_input (build_pb2.Input): The buildbucket input from |
| which to retrieve CL info. |
| test_data (seq of str): Mock list of changed files. |
| **kwargs (dict): Passed through to `api.git.get_changed_files()`. |
| |
| Returns: |
| None if buildbucket_input.gerrit_changes is empty. |
| list of paths otherwise. |
| """ |
| if not buildbucket_input.gerrit_changes: |
| return None |
| |
| change = buildbucket_input.gerrit_changes[0] |
| cache_key = (change.host, change.project, change.change, change.patchset) |
| if not self._changed_files_cache or cache_key != self._changed_files_cache[0]: |
| with self._api.step.nest("get changed files"): |
| project = change.project |
| with self._api.context(cwd=self._root_dir): |
| project_test_data = [ |
| { |
| "name": project, |
| "path": self._api.path.abspath(self._root_dir) |
| if project == "project" |
| else self._api.path.abspath(self._root_dir.join(project)), |
| } |
| ] |
| repo_path = self._api.jiri.project( |
| projects=[project], test_data=project_test_data |
| ).json.output[0]["path"] |
| with self._api.context(cwd=self._api.path.abs_to_path(repo_path)): |
| changed_files = self._api.git.get_changed_files( |
| test_data=test_data, **kwargs |
| ) |
| changed_files = [ |
| self._api.path.join(repo_path, changed) for changed in changed_files |
| ] |
| # We only expect this function to be called with one key per build, so |
| # keeping a cache of one element should be sufficient, while still |
| # being correct in case it is called with different keys. |
| self._changed_files_cache = (cache_key, changed_files) |
| return self._changed_files_cache[1] |
| |
| |
| def _nest(func): |
| """Nest function call within "checkout" step. |
| |
| Check whether already inside a "checkout" step since some public |
| methods in CheckoutApi call other public methods. |
| """ |
| |
| @functools.wraps(func) |
| def wrapper(self, *args, **kwargs): |
| if not self._nested: |
| with self.m.step.nest("checkout"): |
| self._nested = True |
| ret = func(self, *args, **kwargs) |
| self._nested = False |
| return ret |
| else: |
| return func(self, *args, **kwargs) |
| |
| return wrapper |
| |
| |
| class CheckoutApi(recipe_api.RecipeApi): |
| """An abstraction over how Jiri checkouts are created during Fuchsia CI/CQ |
| builds.""" |
| |
| REVISION_COUNT_PROPERTY = REVISION_COUNT_PROPERTY |
| ROOT_DIR_PROPERTY = ROOT_DIR_PROPERTY |
| |
| class PatchFileValidationError(recipe_api.StepFailure): |
| pass |
| |
| def __init__(self, buildset, *args, **kwargs): |
| super(CheckoutApi, self).__init__(*args, **kwargs) |
| self._nested = False |
| self._gitiles_commit = None |
| # If buildset != None, it will be of the format |
| # commit/gitiles/host/project/+/revision. |
| # If provided, it will override the gitiles_commit from the build input used |
| # in the checkout. This would happen in the case that the build input commit |
| # is tracking a different repo than the one we want to checkout (as in the |
| # fuchsia builds launched by the toolchain recipes). |
| if buildset: |
| buildset = buildset.replace("commit/gitiles/", "") |
| buildset_parts = buildset.split("/") |
| host = buildset_parts[0] |
| project = "/".join(buildset_parts[1:-2]) |
| revision = buildset_parts[-1] |
| self._gitiles_commit = common_pb2.GitilesCommit( |
| host=host, project=project, id=revision, |
| ) |
| |
| def CheckoutResults(self, root_dir, snapshot_file, release_version, source_info): |
| """Return a CheckoutResults object. |
| |
| Outside this module, should only be used in testing example recipes. |
| """ |
| return _CheckoutResults( |
| self.m, root_dir, snapshot_file, release_version, source_info |
| ) |
| |
| @_nest |
| def with_options( |
| self, |
| path, |
| manifest, |
| remote, |
| project=None, |
| patch_project=None, |
| attributes=(), |
| build_input=None, |
| rebase_revision=None, |
| run_hooks=True, |
| skip_patch_projects=SKIP_PATCH_PROJECTS, |
| timeout_secs=None, |
| ): |
| """Initializes and populates a jiri checkout from a remote manifest. |
| |
| Emits a source info for the build. |
| |
| Args: |
| path (Path): The Fuchsia checkout root. |
| manifest (str): Relative path to the manifest in the remote repository. |
| remote (str): URL to the remote repository. |
| project (str): The name that jiri should assign to the project. |
| patch_project (str): The name of the jiri project to patch if different |
| from the gerrit_change project. |
| rebase_revision (str): The base revision to patch on top of, if the |
| build input is a Gerrit Change. TODO(kjharland): This is hacky, find a |
| better way to carry this information through to `from_patchset`. |
| attributes (seq(str)): A list of jiri manifest attributes; projects or |
| packages with matching attributes - otherwise regarded as optional - |
| will be downloaded. |
| build_input (buildbucket.build_pb2.Build.Input): The input to a buildbucket |
| build. |
| run_hooks (bool): Whether or not to run the hooks. |
| skip_patch_projects (seq(str)): Do not attempt to patch these |
| projects. |
| timeout_secs (int): A timeout for jiri update in seconds. |
| |
| Returns: |
| A Python dictionary representing the source info. |
| """ |
| |
| if build_input and build_input.gerrit_changes: |
| gerrit_change = build_input.gerrit_changes[0] |
| rest = build_input.gerrit_changes[1:] |
| assert rest == [], "build information contains more than one patchset" |
| self.m.jiri.init( |
| directory=path, |
| attributes=attributes, |
| skip_partial=[ |
| "https://%s/%s" |
| % ( |
| gerrit_change.host.replace("-review", ""), |
| patch_project or gerrit_change.project, |
| ) |
| ], |
| ) |
| self.from_patchset( |
| path=path, |
| manifest=manifest, |
| remote=remote, |
| project=project, |
| patch_project=patch_project, |
| rebase_revision=rebase_revision, |
| run_hooks=run_hooks, |
| timeout_secs=timeout_secs, |
| gerrit_change=gerrit_change, |
| skip_patch_projects=skip_patch_projects, |
| ) |
| else: |
| self.m.jiri.init(directory=path, attributes=attributes) |
| commit = None |
| if build_input and build_input.gitiles_commit.id: |
| commit = build_input.gitiles_commit |
| |
| if self._gitiles_commit: |
| # Use gitiles_commit from properties instead of build input. |
| commit = self._gitiles_commit |
| |
| self.from_commit( |
| path=path, |
| manifest=manifest, |
| remote=remote, |
| commit=commit, |
| project=project, |
| run_hooks=run_hooks, |
| timeout_secs=timeout_secs, |
| ) |
| |
| with self.m.context(cwd=path): |
| return self.m.jiri.project(name="source-info").json.output |
| |
| @_nest |
| def from_spec(self, checkout_spec): |
| """Initialize a Fuchsia checkout according to a checkout spec. |
| |
| If a gitiles_commit was provided through the buildset property, this will |
| set the buildbucket.build.input's gitiles_commit to the gitiles_commit from |
| the property. |
| |
| Args: |
| checkout_spec: infra.fuchsia.Fuchsia.Checkout protocol buffer message. |
| """ |
| build = self.m.buildbucket.build |
| if self._gitiles_commit: |
| # Override build input gitiles_commit with gitiles_commit from |
| # properties. |
| build.input.gitiles_commit.CopyFrom(self._gitiles_commit) |
| checkout_root = self.m.path["start_dir"].join("fuchsia") |
| assert checkout_spec.manifest |
| assert checkout_spec.remote |
| checkout = self.fuchsia_with_options( |
| path=checkout_root, |
| build=build, |
| manifest=checkout_spec.manifest, |
| remote=checkout_spec.remote, |
| attributes=checkout_spec.attributes, |
| is_release_version=checkout_spec.is_release_version, |
| ) |
| |
| return checkout |
| |
| @_nest |
| def from_patchset( |
| self, |
| path, |
| manifest, |
| remote, |
| project, |
| patch_project, |
| run_hooks, |
| timeout_secs, |
| gerrit_change, |
| rebase_revision=None, |
| skip_patch_projects=SKIP_PATCH_PROJECTS, |
| ): |
| """Initializes and populates a Jiri checkout from a remote manifest and Gerrit |
| change. |
| |
| Args: |
| path (Path): The Fuchsia checkout root. |
| manifest (str): Relative path to the manifest in the remote repository. |
| remote (str): URL to the remote repository. |
| project (str): The name that jiri should assign to the project. |
| patch_project (str): The name of the jiri project to patch if different |
| from the gerrit_change project. |
| timeout_secs (int): A timeout for jiri update in seconds. |
| gerrit_change: The GerritChange message from the BuildBucket build input. |
| rebase_revision (str): The revision to rebase gerrit_change on top of. |
| skip_patch_projects (seq(str)): Do not attempt to patch these |
| projects. |
| """ |
| is_integration_patch = project == gerrit_change.project |
| |
| # Fetch the project and update. |
| details = self._get_change_details(gerrit_change) |
| with self.m.context(cwd=path): |
| self.m.jiri.import_manifest( |
| manifest, |
| remote, |
| name=project, |
| revision="HEAD", |
| remote_branch=details["branch"], |
| ) |
| |
| self.m.jiri.update( |
| run_hooks=False, fetch_packages=False, timeout=timeout_secs |
| ) |
| |
| # Patch the current Gerrit change. |
| current_revision = self._get_current_revision(gerrit_change, details) |
| patch_ref = current_revision["ref"] |
| |
| patch_project = patch_project or gerrit_change.project |
| |
| # Failures in pulling down patches and rebasing are likely not |
| # infra-related. If we got here, we're already able to talk to Gerrit |
| # successfully, so any errors are likely merge conflicts. |
| with self.m.context(infra_steps=False): |
| if patch_project not in skip_patch_projects: |
| self._apply_patch( |
| path, |
| patch_ref, |
| patch_project, |
| gerrit_change, |
| rebase_revision, |
| is_integration_patch, |
| timeout_secs, |
| ) |
| |
| if run_hooks: |
| self.m.jiri.run_hooks(local_manifest=True) |
| self.m.jiri.fetch_packages(local_manifest=True) |
| |
| @_nest |
| def from_commit( |
| self, path, manifest, remote, commit, project, run_hooks, timeout_secs |
| ): |
| """Populates a Jiri checkout from a remote manifest and Gitiles commit. |
| |
| Args: |
| path (Path): The Fuchsia checkout root. |
| manifest (str): Relative path to the manifest in the remote repository. |
| remote (str): URL to the remote repository. |
| project (str): The name that jiri should assign to the project. |
| remote (str): The remote git repository. |
| commit: Commit information derived from |
| buildbucket.build_pb2.Build.Input.gitiles_commit. |
| timeout_secs (int): A timeout for jiri update in seconds. |
| run_hooks (bool): Whether or not to run the hooks. |
| override (bool): Whether to override the imported manifest with a commit's |
| given revision. |
| """ |
| revision = "HEAD" |
| override = False |
| |
| if commit: |
| revision = commit.id |
| # If the commit project != manifest project, then we get the manifest at |
| # HEAD and override only the commit's project. |
| override = commit.project != project |
| |
| with self.m.context(cwd=path): |
| if override: |
| self.m.jiri.import_manifest( |
| manifest, remote, name=project, revision="HEAD" |
| ) |
| |
| # In order to identify a project to override, jiri keys on |
| # both the project name and the remote source repository (not to be |
| # confused with `remote`, the manifest repository). |
| # Doing this correctly would require finding the commit's remote in the |
| # transitive imports of the jiri manifest. But those transitive imports |
| # aren't available until we run "jiri update", and doing that twice is |
| # slow, so we rely on: |
| # 1. The convention that the name of the jiri project |
| # is the same as commit.project. |
| # 2. The hope that the URL scheme of the commit remote is the same as that |
| # of the manifest remote. |
| manifest_remote_url = urlparse(remote) |
| host = commit.host |
| # When using sso we only specify the lowest subdomain, by convention. |
| if manifest_remote_url.scheme == "sso": |
| host = host.split(".")[0] |
| commit_remote = "%s://%s/%s" % ( |
| manifest_remote_url.scheme, |
| host, |
| commit.project, |
| ) |
| self.m.jiri.override( |
| project=commit.project, # See 1. above |
| remote=commit_remote, |
| new_revision=revision, |
| ) |
| else: |
| self.m.jiri.import_manifest( |
| manifest, remote, name=project, revision=revision |
| ) |
| |
| self.m.jiri.update(run_hooks=False, timeout=timeout_secs) |
| if run_hooks: |
| self.m.jiri.run_hooks() |
| |
| @_nest |
| def fuchsia_with_options( |
| self, |
| path, |
| build, |
| manifest, |
| remote, |
| rebase_revision=None, |
| attributes=(), |
| is_release_version=False, |
| timeout_secs=30 * 60, |
| skip_patch_projects=SKIP_PATCH_PROJECTS, |
| ): |
| """Uses Jiri to check out a Fuchsia project. |
| |
| The root of the checkout is returned via _CheckoutResults.root_dir. |
| |
| Args: |
| path (Path): The Fuchsia checkout root. |
| build (buildbucket.build_pb2.Build): A buildbucket build. |
| manifest (str): A path to the manifest in the remote (e.g. manifest/minimal) |
| remote (str): A URL to the remote repository which Jiri will be pointed at |
| rebase_revision (str): The base revision to patch on top of, if the |
| build input is a Gerrit Change. TODO(kjharland): This is hacky, find a |
| better way to carry this information through to `from_patchset`. |
| attributes (seq(str)): A list of jiri manifest attributes; projects or |
| packages with matching attributes - otherwise regarded as optional - |
| will be downloaded. |
| is_release_version (bool): Whether the checkout is a release version. |
| timeout_secs (int): How long to wait for the checkout to complete |
| before failing |
| skip_patch_projects (seq(str)): Do not attempt to patch these |
| projects. |
| |
| Returns: |
| A _CheckoutResults containing details of the checkout. |
| """ |
| try: |
| source_info = self.with_options( |
| path, |
| manifest, |
| remote, |
| # Currently all valid fuchsia checkouts use a manifest from a repo named 'integration' |
| project="integration", |
| rebase_revision=rebase_revision, |
| attributes=attributes, |
| build_input=build.input, |
| timeout_secs=timeout_secs, |
| skip_patch_projects=skip_patch_projects, |
| ) |
| except self.m.jiri.RebaseError: |
| # A failure to rebase is closer to user error than an infra failure. |
| # It can be fixed by the user by rebasing their change and then |
| # retrying the patch. The infrastructure is working properly. |
| raise |
| except self.PatchFileValidationError: |
| # An invalid patch file represents a user error not an infra |
| # failure. A user should correct their change. |
| raise |
| except recipe_api.StepFailure as e: |
| # All other failures are the infra's fault. |
| raise recipe_api.InfraFailure(e.name or e.reason, result=e.result) |
| |
| with self.m.context(infra_steps=True, cwd=path): |
| snapshot_file = self.m.path["cleanup"].join("jiri.snapshot") |
| self.m.jiri.snapshot(snapshot_file) |
| return self.finalize_checkout( |
| path=path, |
| snapshot_file=snapshot_file, |
| is_release_version=is_release_version, |
| source_info=source_info, |
| ) |
| |
| def _apply_patch( |
| self, |
| path, |
| patch_ref, |
| patch_project, |
| gerrit_change, |
| rebase_revision, |
| is_integration_patch, |
| timeout_secs, |
| ): |
| self.m.jiri.patch( |
| patch_ref, |
| host="https://%s" % gerrit_change.host, |
| project=patch_project, |
| rebase=True, |
| rebase_revision=rebase_revision, |
| ) |
| |
| # Handle patches.json, if present. |
| self._apply_patchfile(path, gerrit_change) |
| |
| if is_integration_patch: |
| self.m.jiri.update( |
| gc=True, |
| rebase_tracked=True, |
| local_manifest=True, |
| run_hooks=False, |
| fetch_packages=False, |
| timeout=timeout_secs, |
| ) |
| |
| # It's difficult to figure out what commit the tryjob rebased a CL on |
| # top of. So we simply log the last few commits here. (It's not |
| # sufficient to log just the parent commit, because checking out a CL |
| # at the top of a stack of open CLs will also check out and rebase all |
| # the parent CLs on top of master). |
| project_dir = self.m.jiri.project([patch_project]).json.output[0]["path"] |
| self.m.git.log(directory=project_dir, depth=10) |
| |
| def _apply_patchfile(self, path, gerrit_change): |
| """Parses and applies the PatchFile for the given gerrit change.""" |
| # Verify patches.json exists. |
| # TODO: This is a fragile assumption that relies on integration.git |
| # being checked out at //integration. Find a better way to derive path |
| # to patches.json. |
| patchfile_path = path.join(gerrit_change.project, "patches.json") |
| if not self.m.path.exists(patchfile_path): |
| return |
| |
| patch_file = self._parse_patchfile(patchfile_path) |
| |
| # Ensure patchfile is valid. |
| validation_err = patch_file.validate(gerrit_change) |
| if validation_err is not None: |
| raise self.PatchFileValidationError(str(validation_err)) |
| |
| for patch_input in patch_file.inputs: |
| # If the patch pulls in a project that's not in the workspace already, the patch |
| # would not affect this build / test run. Skip this patch. |
| project_exists = ( |
| len(self.m.jiri.project([patch_input.project]).json.output) > 0 |
| ) |
| if not project_exists: |
| warning = ( |
| "Warning: skipping patch for %s which is not in the checkout" |
| % patch_input.project |
| ) |
| self.m.step(warning, []) |
| continue |
| |
| # Strip protocol if present. |
| host = patch_input.host |
| host_url = urlparse(host) |
| if host_url.scheme: |
| host = host_url.hostname |
| |
| # Patch in the change |
| self.m.jiri.patch( |
| ref=patch_input.ref, |
| host="https://%s" % host, |
| project=patch_input.project, |
| rebase=True, |
| ) |
| |
| def _get_change_details(self, gerrit_change): |
| """Fetches the details of a Gerrit change.""" |
| return self.m.gerrit.change_details( |
| name="get change details", |
| change_id="%s~%s" % (gerrit_change.project, gerrit_change.change,), |
| host=gerrit_change.host, |
| query_params=["ALL_REVISIONS"], |
| test_data=self.m.json.test_api.output( |
| { |
| "branch": "master", |
| "revisions": { |
| "d4e5f6": {"_number": 3, "ref": "refs/changes/00/100/3"}, |
| "a1b2c3": {"_number": 7, "ref": "refs/changes/00/100/7"}, |
| "g7h8i9": {"_number": 9, "ref": "refs/changes/00/100/9"}, |
| }, |
| } |
| ), |
| ).json.output |
| |
| def _get_current_revision(self, gerrit_change, change_details): |
| current_patchsets = [ |
| rev |
| for rev in change_details["revisions"].itervalues() |
| if rev["_number"] == gerrit_change.patchset |
| ] |
| assert len(current_patchsets) == 1 |
| return current_patchsets[0] |
| |
| def _parse_patchfile(self, patchfile_path): |
| """Parses a PatchFile from the given path.""" |
| js = self.m.json.read("read patches-json", patchfile_path,).json.output |
| return patch.PatchFile.from_json(js) |
| |
| def _get_release_version(self, path): |
| """Get release version of checkout.""" |
| release_version = self.m.release.ref_to_release_version( |
| ref="HEAD", repo_path=path.join("integration") |
| ) |
| release_version_str = str(release_version).replace("releases/", "") |
| self.m.step.active_result.presentation.logs["release_version"] = [ |
| release_version_str |
| ] |
| return release_version_str |
| |
| def finalize_checkout(self, path, snapshot_file, is_release_version, source_info): |
| """Finalizes a checkout; constructs a _CheckoutResults object. |
| |
| Args: |
| path (Path): The Fuchsia checkout root. |
| snapshot_file (Path): Path to the snapshot file. |
| is_release_version (bool): Whether this checkout is a release version. |
| source_info (dict): A Python dictionary representing the source info. |
| |
| Returns: |
| A _CheckoutResults containing details of the checkout. |
| """ |
| # Always log snapshot contents (even if uploading to GCS) to help debug |
| # things like tryjob failures during roller-commits. |
| self.m.file.read_text("read snapshot", snapshot_file) |
| |
| # If using a release version, get release version string. |
| release_version = ( |
| self._get_release_version(path) if is_release_version else None |
| ) |
| |
| return self.CheckoutResults( |
| root_dir=path, |
| snapshot_file=snapshot_file, |
| release_version=release_version, |
| source_info=source_info, |
| ) |