blob: 293e359e894e40b7fd026e254bdfc05ee4e6e4b6 [file] [log] [blame]
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import functools
from urllib.parse import urlparse
from recipe_engine import recipe_api
from . import patch
# Set as an output property and consumed by other recipes code and the results
# uploader code in google3.
# It's a monotonic integer that corresponds to integration revisions so we
# can stuff our results into systems that expect Piper changelist numbers.
REVISION_COUNT_PROPERTY = "integration-revision-count"
# Set as an output property and consumed by the
# go/cq-incremental-builder-monitor_dev dashboard.
CHECKOUT_FAILED_PROPERTY = "checkout_failed"
# Set as an output property and consumed by the
# go/cq-incremental-builder-monitor dashboard as well as other incremental
# build dashboards.
CACHED_REVISION_PROPERTY = "cached_revision"
# By default, skip patching GerritChanges which map to these projects. They are
# not valid projects to patch into standard Fuchsia checkouts.
SKIP_PATCH_PROJECTS = ("infra/recipes",)
class _CheckoutResults:
"""Represents a Fuchsia source checkout."""
def __init__(
self,
api,
root_dir,
snapshot_file,
release_branch,
release_version,
source_info,
):
self._api = api
self._root_dir = root_dir
self._snapshot_file = snapshot_file
self._release_branch = release_branch
self._release_version = release_version
self.source_info = source_info
self._changed_files_cache = None
@property
def root_dir(self):
"""The path to the root directory of the jiri checkout."""
return self._root_dir
@property
def snapshot_file(self):
"""The path to the jiri snapshot file."""
return self._snapshot_file
@property
def release_branch(self):
"""Release branch corresponding to checkout if applicable, otherwise None."""
return self._release_branch
@property
def release_version(self):
"""Release version of checkout if applicable, otherwise None.
Returns a release.ReleaseVersion object.
"""
return self._release_version
def project(self, project_name, **kwargs):
return self._api.checkout.project(
project_name, checkout_root=self.root_dir, **kwargs
)
@property
def integration_revision(self):
# If triggered by integration CQ, then recipe_bootstrap will ensure the
# input gitiles_commit is what we want.
if self.contains_integration_patch:
return self._api.buildbucket.build.input.gitiles_commit.id
# Otherwise just use the revision that we actually checked out.
return [repo for repo in self.source_info if repo["name"] == "integration"][0][
"revision"
]
@property
def contains_integration_patch(self):
"""Returns whether we're testing an integration change."""
changes = self._api.buildbucket.build.input.gerrit_changes
return changes and changes[0].project == "integration"
def _upload_source_manifest(self, gcs_bucket, namespace=None):
"""Upload the jiri source manifest to GCS."""
assert gcs_bucket
with self._api.context(cwd=self._root_dir):
source_manifest = self._api.jiri.source_manifest()
with self._api.step.nest("upload source manifest"):
self._api.gsutil.upload_namespaced_file(
source=self._api.json.input(source_manifest),
bucket=gcs_bucket,
subpath="source_manifest.json",
namespace=namespace,
)
def upload_results(self, gcs_bucket, namespace=None):
"""Upload snapshot to a given GCS bucket."""
assert gcs_bucket
with self._api.step.nest("upload checkout results") as presentation:
self._api.gsutil.upload_namespaced_file(
source=self.snapshot_file,
bucket=gcs_bucket,
subpath=self._api.path.basename(self.snapshot_file),
namespace=namespace,
)
with self._api.context(cwd=self._root_dir.join("integration")):
presentation.properties[
REVISION_COUNT_PROPERTY
] = self._api.git.rev_list_count(
"HEAD",
step_name="set %s property" % REVISION_COUNT_PROPERTY,
test_data="1",
)
if not self._api.platform.is_mac:
self._upload_source_manifest(gcs_bucket, namespace=namespace)
def list_files(self, **kwargs):
"""Returns a list of paths across the checkout.
Args:
**kwargs (dict): Passed through to `api.git.ls_files()`.
Returns:
List of Paths.
"""
all_paths = []
with self._api.step.nest("list files"):
projects = self._api.jiri.project(
test_data=[
{
"name": "project",
"path": self._api.path.abspath(self._root_dir),
}
],
).json.output
for project in projects:
project_path = self._api.path.abs_to_path(project["path"])
with self._api.context(cwd=project_path):
result = self._api.git.ls_files(step_name=project["name"], **kwargs)
files = result.stdout.strip("\n").split("\n")
all_paths += [project_path.join(f) for f in files]
return all_paths
def changed_files(self, test_data=("foo.cc", "bar.cc"), **kwargs):
"""Returns a list of absolute paths that were changed.
Checks the git repo specified in buildbucket_input.gerrit_changes[0].
Args:
test_data (seq(str)): Mock list of changed files.
**kwargs (dict): Passed through to `api.git.get_changed_files()`.
Returns:
Empty list if input gerrit_changes is empty or if the build is
triggered by a change to a repo that's not included in the
checkout. List of Paths otherwise.
"""
bb_input = self._api.buildbucket.build.input
if not bb_input.gerrit_changes:
return []
cache_key = tuple(kwargs.items())
if not self._changed_files_cache or cache_key != self._changed_files_cache[0]:
with self._api.step.nest("get changed files"):
change = bb_input.gerrit_changes[0]
project = change.project
with self._api.context(cwd=self._root_dir):
project_test_data = [
{
"name": project,
"path": self._api.path.abspath(self._root_dir)
if project == "project"
else self._api.path.abspath(self._root_dir.join(project)),
}
]
try:
repo_path = self.project(project, test_data=project_test_data)[
"path"
]
except self._api.jiri.NoSuchProjectError:
return []
with self._api.context(cwd=self._api.path.abs_to_path(repo_path)):
changed_files = self._api.git.get_changed_files(
test_data=test_data, **kwargs
)
changed_files = [
self._api.path.join(repo_path, changed) for changed in changed_files
]
# We only expect this function to be called with one key per build, so
# keeping a cache of one element should be sufficient, while still
# being correct in case it is called with different keys.
self._changed_files_cache = (cache_key, changed_files)
return self._changed_files_cache[1]
def check_clean(self):
with self._api.context(cwd=self._root_dir):
self._api.jiri.check_clean()
def _nest(func):
"""Nest function call within "checkout" step.
Check whether already inside a "checkout" step since some public
methods in CheckoutApi call other public methods.
"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if self._presentation:
# Nesting was already set up by a parent function call, no need to
# do anything else.
return func(self, *args, **kwargs)
# The various checkout methods should internally categorize each
# internal step correctly as infra or non-infra. Wrapping the entire
# call with infra_steps=True would cause each step to be an infra step
# by default rather than a non-infra step, leading to steps being
# incorrectly marked as infra failures.
assert not self.m.context.infra_step, (
"api.checkout.%s() must not be wrapped with api.context(infra_steps=True)"
% func.__name__
)
with self.m.step.nest("checkout") as pres:
self._presentation = pres
try:
ret = func(self, *args, **kwargs)
except:
pres.properties[CHECKOUT_FAILED_PROPERTY] = True
raise
self._presentation = None
return ret
return wrapper
class CheckoutApi(recipe_api.RecipeApi):
"""An abstraction over how Jiri checkouts are created during Fuchsia CI/CQ
builds."""
CHECKOUT_INFO_PROPERTY = "checkout_info"
GOT_REVISION_PROPERTY = "got_revision"
REVISION_COUNT_PROPERTY = REVISION_COUNT_PROPERTY
CACHED_REVISION_PROPERTY = CACHED_REVISION_PROPERTY
# An invalid patch file represents a user error not an infra
# failure. A user should correct their change.
class PatchFileValidationError(recipe_api.StepFailure):
pass
def __init__(self, props, *args, **kwargs):
super().__init__(*args, **kwargs)
self._presentation = None
self._gitiles_commit = None
if props.gitiles_commit.host:
self._gitiles_commit = props.gitiles_commit
self._respect_gitiles_commit_with_gerrit_change = (
props.respect_gitiles_commit_with_gerrit_change
)
self._cherry_pick_patches = props.cherry_pick_patches
def CheckoutResults(self, *args, **kwargs):
"""Return a CheckoutResults object.
Outside this module, should only be used in testing example recipes.
"""
return _CheckoutResults(self.m, *args, **kwargs)
@_nest
def from_spec(self, checkout_spec, **kwargs):
"""Initialize a Fuchsia checkout according to a checkout spec.
Args:
checkout_spec (infra.fuchsia.Fuchsia.Checkout): Checkout spec
protobuf.
**kwargs (dict): Passed through to fuchsia_with_options().
"""
return self.fuchsia_with_options(
manifest=checkout_spec.manifest,
remote=checkout_spec.remote,
attributes=checkout_spec.attributes,
is_release_version=checkout_spec.is_release_version,
min_rebase_distance=checkout_spec.min_rebase_distance,
allow_skipping_patch=checkout_spec.allow_skipping_patch,
enable_submodules=checkout_spec.enable_submodules,
**kwargs,
)
@_nest
def fuchsia_with_options(
self,
manifest,
remote,
path=None,
project="integration",
is_release_version=False,
timeout_secs=45 * 60,
use_incremental_cache=False,
enable_submodules=False,
**kwargs,
):
"""Uses Jiri to check out a Fuchsia project.
The root of the checkout is returned via _CheckoutResults.root_dir.
Args:
manifest (str): A path to the manifest in the remote (e.g.
manifest/minimal).
remote (str): A URL to the remote repository which Jiri will be
pointed at.
project (str): The name that jiri should assign to the project.
is_release_version (bool): Whether the checkout is a release
version.
timeout_secs (int): A timeout to assign to each Jiri operation.
use_incremental_cache (bool): Whether to reuse a checkout from the
local checkout cache, necessary when the checkout will be used
for an incremental build.
enable_submodules (bool): Whether to enable submodules.
**kwargs (dict): Passed through to with_options().
Returns:
A _CheckoutResults containing details of the checkout.
"""
if path is None:
path = self.m.path["start_dir"].join("fuchsia")
if use_incremental_cache:
path = self.m.path["cache"].join("incremental")
# Checkout caches are necessary for supporting incremental builds
# because doing a fresh checkout would reset all files' mtimes, which
# would result in a full build because it would appear to the build
# system that all files had been modified since the last build.
#
# Incremental builds are specific to the Fuchsia project and are not
# relevant in the general case of checking out a Jiri project, which is
# why this logic is in this function and not in with_options().
if use_incremental_cache:
cache_ctx = self.m.cache.guard("incremental")
else:
cache_ctx = contextlib.nullcontext()
with cache_ctx:
self.with_options(
manifest=manifest,
remote=remote,
path=path,
project=project,
timeout_secs=timeout_secs,
use_incremental_cache=use_incremental_cache,
enable_submodules=enable_submodules,
**kwargs,
)
with self.m.context(infra_steps=True, cwd=path):
source_info = self.m.jiri.project(name="source-info").json.output
snapshot_file = self.m.path["cleanup"].join("jiri_snapshot.xml")
self.m.jiri.snapshot(snapshot_file)
# Always log snapshot contents (even if uploading to GCS) to help debug
# things like tryjob failures during roller-commits.
self.m.file.read_text("read snapshot", snapshot_file)
# If using a release version, resolve release version and release
# branches.
release_version = release_branch = None
if is_release_version:
release_version = self._get_release_version(path)
release_branch = self._get_release_branch()
checkout_result = self.CheckoutResults(
root_dir=path,
snapshot_file=snapshot_file,
release_branch=release_branch,
release_version=release_version,
source_info=source_info,
)
# Verify the checkout starts in a clean state.
checkout_result.check_clean()
return checkout_result
@_nest
def with_options(self, remote, path=None, **kwargs):
"""Wrapper to avoid deeply nesting the function body.
Only context manager-related logic should go in this function. All
other logic should go in _with_options().
Args: see _with_options().
"""
if path is None:
path = self.m.path["start_dir"].join(self.m.git.remote_alias(remote))
self.m.file.ensure_directory("ensure checkout dir", path)
with self.m.context(infra_steps=True, cwd=path):
return self._with_options(remote=remote, path=path, **kwargs)
def _with_options(
self,
manifest,
remote,
path=None,
project=None,
attributes=(),
build_input=None,
fetch_packages=True,
enable_submodules=False,
use_lock_file=False,
skip_patch_projects=SKIP_PATCH_PROJECTS,
timeout_secs=None,
use_incremental_cache=False,
min_rebase_distance=0,
allow_skipping_patch=False,
):
"""Initializes and populates a jiri checkout from a remote manifest.
If a gitiles_commit was provided through the "gitiles_commit" property,
this will set the buildbucket.build.input's gitiles_commit to the
gitiles_commit from the property.
Args:
manifest (str): Relative path to the manifest in the remote repository.
remote (str): URL to the remote repository.
path (Path): The Fuchsia checkout root. If unset, a dedicated
checkout directory will be created and returned.
project (str): The name that jiri should assign to the project.
attributes (seq(str)): A list of jiri manifest attributes; projects or
packages with matching attributes - otherwise regarded as optional -
will be downloaded.
build_input (buildbucket.build_pb2.Build.Input): The input to a buildbucket
build.
fetch_packages (bool): Whether or not to fetch CIPD packages (and
run jiri hooks). Running hooks could theoretically be a separate
parameter but in practice there are no use cases for fetching
packages without running hooks. And when we want to disable
both, we generally care more about disabling fetching packages
since it normally takes the longest time.
enable_submodules (bool): Whether to enable submodules in jiri config.
use_lock_file (bool): Whether to enforce lock files in the jiri
root.
skip_patch_projects (seq(str)): Do not attempt to patch these
projects.
timeout_secs (int): A timeout to assign to each Jiri operation.
use_incremental_cache (bool): Whether or not this checkout will be
used for an incremental build.
min_rebase_distance (int): The minimum number of revisions needed
between the cached checkout revision and HEAD to trigger a
rebase to HEAD. Ignored if use_incremental_cache is False.
allow_skipping_patch (bool): Whether to ignore patch failures
resulting from the affected project not existing in the
checkout.
Returns: A Path to the checkout root.
"""
assert manifest, "'manifest' must be set"
assert remote, "'remote' must be set"
if path == self.m.path["start_dir"]:
# start_dir tends to be a dumping ground, so to keep the checkout
# clean it's best to use a dedicated directory.
raise ValueError("checkout must not be rooted at start_dir")
build_input = build_input or self.m.buildbucket.build.input
if self._gitiles_commit:
# Override build input gitiles_commit with gitiles_commit from
# properties.
build_input.gitiles_commit.CopyFrom(self._gitiles_commit)
# The revision of the manifest repository to import. We'll do any
# patches and overrides on top of the checkout determined by this
# version of the manifest repository.
base_manifest_revision = None
commit = None
gerrit_change = None
patches = [] # Details of projects to patch (used in CQ).
overrides = [] # Details of projects to override (used in local CI).
if build_input.gerrit_changes:
assert (
len(build_input.gerrit_changes) == 1
), "build input contains more than one gerrit_change"
gerrit_change = build_input.gerrit_changes[0]
change_details = self._get_change_details(gerrit_change)
# Re-resolve HEAD rather than using the base commit resolved by
# recipe_bootstrap, even for integration patches, because we want to
# wait as late as possible to choose a base commit for the checkout.
# This will make it more likely that we'll catch merge conflicts and
# other types of collisions between changes.
#
# The most notable risk of doing this is that the properties already
# resolved by recipe_bootstrap might come from a different version
# of the integration repo than the version included in this
# checkout, which could cause issues if the properties are not
# forwards-compatible with newer versions of the integration repo
# (e.g. it references a jiri project that is deleted from the
# manifests). However, this is extremely rare in practice because
# since we rarely make changes that simultaneously affect infra and
# non-infra parts of integration like jiri manifests.
#
# Only skip this resolution if we are specifically allowing a
# Gitiles commit to be combined with a Gerrit change.
if not (
build_input.gitiles_commit.id
and self._respect_gitiles_commit_with_gerrit_change
):
base_manifest_revision = self._resolve_branch_head(
remote, branch=change_details["branch"]
)
current_revision = self._get_current_revision(gerrit_change, change_details)
if gerrit_change.project not in skip_patch_projects:
patches.append(
{
"host": "https://%s" % gerrit_change.host,
"project": gerrit_change.project,
"ref": current_revision["ref"],
}
)
# TODO(olivernewman): Also load patches.json into the "patches" list
# (which must happen after patching in the gerrit change so we have
# patches.json locally) so we can expose all patches in the checkout
# info output property.
# Only use the Gitiles commit to resolve the base manifest revision if
# it wasn't already resolved in the previous section.
if not base_manifest_revision and build_input.gitiles_commit.id:
commit = build_input.gitiles_commit
is_manifest_commit = commit.project == project
if is_manifest_commit:
base_manifest_revision = commit.id
else:
base_manifest_revision = self._resolve_branch_head(remote, "main")
# In order to identify a project to override, jiri keys on both
# the project name and the remote source repository (not to be
# confused with `remote`, the manifest repository). Doing this
# correctly would require finding the commit's remote in the
# transitive imports of the jiri manifest. But those transitive
# imports aren't available until we run "jiri update", and doing
# that twice is slow, so we rely on:
# 1. The convention that the name of the jiri project
# is the same as commit.project.
# 2. The hope that the URL scheme of the commit remote is the
# same as that of the manifest remote.
manifest_remote_url = urlparse(remote)
host = commit.host
# When using sso we only specify the lowest subdomain, by convention.
if manifest_remote_url.scheme == "sso":
host = host.split(".")[0]
commit_remote = "{}://{}/{}".format(
manifest_remote_url.scheme, host, commit.project
)
overrides.append(
{
"project": commit.project, # See 1. above
"remote": commit_remote,
"new_revision": commit.id,
}
)
if not base_manifest_revision:
# If we haven't resolved the base manifest revision by this point,
# we have neither a triggering commit nor a triggering Gerrit
# change, so just checkout the manifest repository at HEAD.
base_manifest_revision = self._resolve_branch_head(remote, "main")
# Compute the distance (in revisions) to the cache's manifest revision.
# Is None if there is no checkout cached.
cache_distance = float("inf")
cached_revision = None
if use_incremental_cache:
cached_revision = self._resolve_cached_revision(project)
if min_rebase_distance and cached_revision:
with self.m.context(cwd=path.join(project)):
cache_distance = self.m.git.rev_list_count(
cached_revision, base_manifest_revision, test_data="5"
)
# Only reimport the manifest if:
# 1. There is no cache.
# 2. The cache is "too old, as specified by the min_rebase_distance.
if cache_distance > min_rebase_distance:
self.m.jiri.init(
directory=path,
attributes=attributes,
enable_submodules=enable_submodules,
use_lock_file=use_lock_file,
)
self.m.jiri.import_manifest(
manifest,
remote,
name=project,
revision=base_manifest_revision,
overwrite=use_incremental_cache,
)
for override in overrides:
self.m.jiri.override(**override)
# Resets the checkout to make sure that nothing remains in the cache
# from a previous build.
if use_incremental_cache:
with self.m.context(cwd=path):
self.m.jiri.reset()
# We must clone all projects prior to applying any patches. But we need
# not run hooks or fetch packages until after all patches are applied,
# since applying a patch to the manifest repository could update the
# versions of the packages we need to fetch.
# We only run this update if we don't have a cache or if the cache is
# too old, as specified by the min_rebase_distance.
if cache_distance > min_rebase_distance:
self.m.jiri.update(
run_hooks=False,
fetch_packages=False,
timeout=timeout_secs,
gc=True,
)
successful_patches = []
for p in patches:
is_manifest_patch = project == p["project"]
# Failures in pulling down patches and rebasing are likely not
# infra-related. If we got here, we're already able to talk to Gerrit
# successfully, so any errors are likely merge conflicts.
with self.m.context(infra_steps=False):
try:
patch_base_revision = self._apply_patch(
path,
p["ref"],
p["project"],
gerrit_change,
change_details,
is_manifest_patch,
timeout_secs,
)
except self.m.jiri.NoSuchProjectError as e:
# Always allow skipping integration patches to support
# running tryjobs on infra config changes even when the
# tryjob's checkout doesn't include the integration repo.
if allow_skipping_patch or p["project"] == "integration":
self.m.step.empty("skipping patch", step_text=str(e))
continue
# Aside from the integration repo, there isn't a reason to
# run tryjobs on repos that aren't included in the checkout.
# So we should fail the build to notify the user that the
# tryjob will not actually test their change.
raise
# Emit the revision that we rebased the patch change on top of,
# which will always be head of the patch's target branch at this
# moment in time.
p["base_revision"] = patch_base_revision
successful_patches.append(p)
# We should only emit metadata for projects that we successfully
# patched.
patches = successful_patches
# Run hooks and fetch CIPD packages separately from `jiri update` to get
# timing information. We want to fetch packages only *after* any gerrit
# changes have been patched in. If we fetched packages *before* patching
# in a change to the manifest repository, then we'd need to run hooks
# again afterward to honor the contents of the patch, and might end up
# overwriting old versions of CIPD packages that were downloaded prior
# to patching. So it's more efficient to only fetch packages once at the
# very end.
if fetch_packages:
# Handle package_overrides.json, if present.
packages_to_skip = self._apply_package_overrides(path)
self.m.jiri.run_hooks(
# We must use the local manifest from integration.git rather
# than the contents of .jiri_manifest in cases where we might
# have patched in a change to the manifest repository. The only
# time we *cannot* use the local manifest is when there are
# overrides, in which case .jiri_manifest will be the only
# correct source of truth.
local_manifest=bool(gerrit_change),
fetch_packages=True,
# Jiri sets the fetch-packages timeout to 5x the hook timeout.
hook_timeout_secs=timeout_secs / 5 if timeout_secs else None,
# Skip downloading any packages that were already replaced by
# package_overrides.json.
packages_to_skip=packages_to_skip,
)
# This information is consumed by `fx sync-to` to reproduce infra
# checkouts locally.
# TODO(olivernewman): Also expose overrides, and patches specified by
# patches.json. Neither overrides (used only in local CI) nor
# patches.json (used only in occasional manual experimental builds) is
# used very frequently, but they're necessary to include here for
# complete correctness.
self._presentation.properties[self.CHECKOUT_INFO_PROPERTY] = {
"manifest_remote": remote,
"manifest": manifest,
"base_manifest_revision": base_manifest_revision,
"patches": patches,
}
# This information is consumed by a variety of incremental builder
# dashboards.
self._presentation.properties[self.CACHED_REVISION_PROPERTY] = cached_revision
return path
def project(self, project_name, checkout_root=None, **kwargs):
"""Returns metadata for a project in the checkout.
Raises NoSuchProjectError if the project is not among the repos in
the checkout.
"""
if not checkout_root:
checkout_root = self.m.context.cwd
with self.m.context(cwd=checkout_root, infra_steps=True):
output = self.m.jiri.project(projects=[project_name], **kwargs).json.output
if not output:
raise self.m.jiri.NoSuchProjectError(
"project %r is not present in the checkout" % project_name
)
return output[0]
def _resolve_cached_revision(self, project):
"""Return the revision of the project in the cache.
Args:
project (str): The name of the project to look in the cache for.
Returns:
A containing a git revision, or None if there is no cached checkout.
"""
return self.m.jiri.get_import_revision(
project,
name="get cached revision",
test_data="foobar",
# The command will fail if a jiri root does not exist (i.e. if
# we haven't started a checkout out), which is fine - we'll just
# return None.
ok_ret="any",
)
def _resolve_branch_head(self, remote, branch):
"""Return the hash of the commit currently at the tip of a branch.
Args:
remote (str): The URL of the repository containing the branch.
branch (str): Name of the branch to resolve (e.g. "main").
"""
return self.m.git.get_remote_branch_head(
url=self.m.sso.sso_to_https(remote),
branch=branch,
step_name="resolve head of %r branch" % branch,
)
def _apply_patch(
self,
path,
patch_ref,
patch_project,
gerrit_change,
change_details,
is_manifest_patch,
timeout_secs,
):
target_branch = change_details["branch"]
project_dir = self.m.path.abs_to_path(self.project(patch_project)["path"])
if self._cherry_pick_patches:
with self.m.context(cwd=project_dir):
url, ref = self.m.gerrit.resolve_change(gerrit_change)
self.m.git.fetch(url, ref)
self.m.git.cherry_pick("FETCH_HEAD")
else:
self.m.jiri.patch(
patch_ref,
host="https://%s" % gerrit_change.host,
project=patch_project,
rebase=True,
rebase_branch=target_branch,
)
with self.m.context(cwd=project_dir):
# Resolve the revision that we patched on top of, which is the tip
# of the target branch at this moment in time.
patch_base_revision = self.m.git.rev_parse(
"origin/%s" % target_branch,
step_test_data=lambda: self.m.raw_io.test_api.stream_output_text(
"abc123"
),
)
# Handle patches.json, if present.
self._apply_patchfile(path, gerrit_change, target_branch)
if is_manifest_patch:
self.m.jiri.update(
gc=True,
rebase_tracked=True,
local_manifest=True,
run_hooks=False,
fetch_packages=False,
timeout=timeout_secs,
)
# It's difficult to figure out what commit the tryjob rebased a CL on
# top of. So we simply log the last few commits here. (It's not
# sufficient to log just the parent commit, because checking out a CL
# at the top of a stack of open CLs will also check out and rebase all
# the parent CLs on top of main).
with self.m.context(cwd=project_dir):
self.m.git.log(depth=10)
return patch_base_revision
def _apply_patchfile(self, path, gerrit_change, target_branch):
"""Parses and applies the PatchFile for the given gerrit change."""
# TODO: This is a fragile assumption that relies on integration.git
# being checked out at //integration. Find a better way to derive path
# to patches.json.
patchfile_path = path.join(gerrit_change.project, "patches.json")
try:
# Note that in recipe unit testing mode, `read_json` returns None
# string if the file has not been mocked, rather than raising an
# exception.
contents = self.m.file.read_json("read patches.json", patchfile_path)
except self.m.file.Error as e:
if e.errno_name != "ENOENT": # pragma: no cover
raise
self.m.step.active_result.presentation.status = self.m.step.SUCCESS
contents = None
# The change doesn't include a patch file, so no need to do any
# patching.
if not contents:
self.m.step.active_result.presentation.step_text = "no patch file found"
return
patch_file = patch.PatchFile.from_json(contents)
# Ensure patchfile is valid.
validation_err = patch_file.validate(gerrit_change)
if validation_err is not None:
raise self.PatchFileValidationError(str(validation_err))
for patch_input in patch_file.inputs:
# If the patch pulls in a project that's not in the workspace already, the patch
# would not affect this build / test run. Skip this patch.
try:
self.project(
patch_input.project, name="jiri project %s" % patch_input.project
)
except self.m.jiri.NoSuchProjectError:
warning = (
"warning: skipping patch for %s which is not in the checkout"
% patch_input.project
)
self.m.step.empty(warning)
continue
# Strip protocol if present.
host = patch_input.host
host_url = urlparse(host)
if host_url.scheme:
host = host_url.hostname
# Patch in the change
self.m.jiri.patch(
ref=patch_input.ref,
host="https://%s" % host,
project=patch_input.project,
rebase=True,
rebase_branch=target_branch,
)
def _apply_package_overrides(self, path):
"""Apply one or more package overrides if package_overrides.json is
present. package_overrides.json is a map of package to CAS digest pairs.
Args:
path (Path): Checkout root.
Returns:
list(str): List of packages in package_overrides.json.
"""
package_overrides_path = path.join("package_overrides.json")
self.m.path.mock_add_paths(package_overrides_path)
if not self.m.path.exists(package_overrides_path): # pragma: no cover
return []
package_overrides = (
self.m.file.read_json(
"read package overrides",
package_overrides_path,
)
or {}
)
for package, cas_digest in package_overrides.items():
package_entries = self.m.jiri.package([package]).json.output
# The package may appear more than once across the checkout.
for package_entry in package_entries:
# Download the CAS tree at the path that the package would be
# downloaded to.
self.m.cas_util.download(
cas_digest,
package_entry["path"],
)
return package_overrides.keys()
def _get_change_details(self, gerrit_change):
"""Fetches the details of a Gerrit change."""
return self.m.gerrit.change_details(
name="get change details",
change_id="%s~%s" % (gerrit_change.project, gerrit_change.change),
host=gerrit_change.host,
query_params=["ALL_REVISIONS"],
test_data=self.m.json.test_api.output(
{
"branch": "main",
"revisions": {
"d4e5f6": {"_number": 3, "ref": "refs/changes/00/100/3"},
"a1b2c3": {"_number": 7, "ref": "refs/changes/00/100/7"},
"g7h8i9": {"_number": 9, "ref": "refs/changes/00/100/9"},
},
}
),
).json.output
def _get_current_revision(self, gerrit_change, change_details):
current_patchsets = [
rev
for rev in change_details["revisions"].values()
if rev["_number"] == gerrit_change.patchset
]
assert len(current_patchsets) == 1
return current_patchsets[0]
def _get_release_version(self, path):
"""Get release version corresponding to HEAD."""
with self.m.step.nest("resolve release version") as presentation:
release_version = self.m.release.ref_to_release_version(
ref="HEAD",
repo_path=path.join("integration"),
)
# Fuchsia's buildmon service depends on this property being set.
presentation.properties["release_version"] = str(release_version)
return release_version
def _get_release_branch(self):
with self.m.step.nest("resolve release branch") as presentation:
ref = self.m.buildbucket.build.input.gitiles_commit.ref.replace(
"refs/heads/", ""
)
if not self.m.release.validate_branch(ref):
return None
presentation.properties["release_branch"] = ref
return ref