blob: 64d5ad1d57c687786a2dad46fe6f40c3aec2aac8 [file] [log] [blame]
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import functools
import re
from recipe_engine import recipe_api
from urlparse import urlparse
from . import patch
from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2
# Set as an output property and consumed by other recipes code and the results
# uploader code in google3.
# It's a monotonic integer that corresponds to integration revisions so we
# can stuff our results into systems that expect Piper changelist numbers.
REVISION_COUNT_PROPERTY = 'integration-revision-count'
class CheckoutResults(object):
"""Represents a Fuchsia source checkout."""
def __init__(self, api, root_dir, snapshot_file, is_from_snapshot,
release_version, source_manifest):
self._api = api
self._root_dir = root_dir
self._snapshot_file = snapshot_file
self._is_from_snapshot = is_from_snapshot
self._release_version = release_version
self.source_manifest = source_manifest
@property
def root_dir(self):
"""The path to the root directory of the jiri checkout."""
return self._root_dir
@property
def snapshot_file(self):
"""The path to the jiri snapshot file."""
return self._snapshot_file
@property
def checkout_snapshot(self):
"""Whether this checkout result is a result of checkout from snapshot"""
return self._is_from_snapshot
@property
def release_version(self):
"""Release version of checkout if applicable, otherwise None."""
return self._release_version
def upload_results(self, gcs_bucket, namespace=None):
"""Upload snapshot to a given GCS bucket."""
assert gcs_bucket
with self._api.m.step.nest('upload checkout results') as presentation:
self._api.m.upload.file_to_gcs(
source=self.snapshot_file,
bucket=gcs_bucket,
subpath=self._api.m.path.basename(self.snapshot_file),
namespace=namespace,
)
presentation.properties[REVISION_COUNT_PROPERTY] = int(
self._api.m.git(
'-C',
self._root_dir.join('integration'),
'rev-list',
'--count',
'HEAD',
step_test_data=lambda: self._api.m.raw_io.test_api.stream_output(
'1'),
stdout=self._api.m.raw_io.output()).stdout.strip())
def _nest(func):
"""Nest function call within "checkout" step.
Check whether already inside a "checkout" step since some public methods
in CheckoutApi call other public methods.
"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if not self._nested:
with self.m.step.nest('checkout'):
self._nested = True
ret = func(self, *args, **kwargs)
self._nested = False
return ret
else:
return func(self, *args, **kwargs)
return wrapper
class CheckoutApi(recipe_api.RecipeApi):
"""An abstraction over how Jiri checkouts are created during Fuchsia CI/CQ builds."""
CheckoutResults = CheckoutResults
REVISION_COUNT_PROPERTY = REVISION_COUNT_PROPERTY
def __init__(self, buildset, *args, **kwargs):
super(CheckoutApi, self).__init__(*args, **kwargs)
self._nested = False
self._gitiles_commit = None
# If buildset != None, it will be of the format
# commit/gitiles/host/project/+/revision.
# If provided, it will override the gitiles_commit from the build input used
# in the checkout. This would happen in the case that the build input commit
# is tracking a different repo than the one we want to checkout (as in the
# fuchsia builds launched by the toolchain recipes).
if buildset:
buildset = buildset.replace('commit/gitiles/', '')
buildset_parts = buildset.split('/')
host = buildset_parts[0]
project = '/'.join(buildset_parts[1:-2])
revision = buildset_parts[-1]
self._gitiles_commit = common_pb2.GitilesCommit(
host=host,
project=project,
id=revision,
)
@_nest
def with_options(self,
path,
manifest,
remote,
project=None,
patch_project=None,
attributes=(),
build_input=None,
timeout_secs=None,
rebase_revision=None,
run_hooks=True):
"""Initializes and populates a jiri checkout from a remote manifest.
Emits a source manifest for the build.
Args:
path (Path): The Fuchsia checkout root.
manifest (str): Relative path to the manifest in the remote repository.
remote (str): URL to the remote repository.
project (str): The name that jiri should assign to the project.
patch_project (str): The name of the jiri project to patch if different
from the gerrit_change project.
rebase_revision (str): The base revision to patch on top of, if the
build input is a Gerrit Change. TODO(kjharland): This is hacky, find a
better way to carry this information through to `from_patchset`.
attributes (seq(str)): A list of jiri manifest attributes; projects or
packages with matching attributes - otherwise regarded as optional -
will be downloaded.
build_input (buildbucket.build_pb2.Build.Input): The input to a buildbucket
build.
timeout_secs (int): A timeout for jiri update in seconds.
run_hooks (bool): Whether or not to run the hooks.
Returns:
A Python dictionary representing the source manifest.
"""
self.m.jiri.init(directory=path, attributes=attributes)
if build_input and build_input.gerrit_changes:
gerrit_change = build_input.gerrit_changes[0]
rest = build_input.gerrit_changes[1:]
assert rest == [], 'build information contains more than one patchset'
self.from_patchset(
path=path,
manifest=manifest,
remote=remote,
project=project,
patch_project=patch_project,
rebase_revision=rebase_revision,
run_hooks=run_hooks,
timeout_secs=timeout_secs,
gerrit_change=gerrit_change)
else:
commit = None
if build_input and build_input.gitiles_commit.id:
commit = build_input.gitiles_commit
if self._gitiles_commit:
# Use gitiles_commit from properties instead of build input.
commit = self._gitiles_commit
self.from_commit(
path=path,
manifest=manifest,
remote=remote,
commit=commit,
project=project,
run_hooks=run_hooks,
timeout_secs=timeout_secs)
with self.m.context(cwd=path):
return self.m.jiri.emit_source_manifest()
@_nest
def from_spec(self, checkout_spec):
"""Initialize a Fuchsia checkout according to a checkout spec.
If a gitiles_commit was provided through the buildset property, this will
set the buildbucket.build.input's gitiles_commit to the gitiles_commit from
the property.
Args:
checkout_spec: infra.fuchsia.Fuchsia.Checkout protocol buffer message.
"""
build = self.m.buildbucket.build
if self._gitiles_commit:
# Override build input gitiles_commit with gitiles_commit from
# properties.
build.input.gitiles_commit.CopyFrom(self._gitiles_commit)
checkout_root = self.m.path['start_dir'].join('fuchsia')
if checkout_spec.use_snapshot:
if self.m.buildbucket_util.is_tryjob:
assert len(build.input.gerrit_changes) == 1
checkout = self.from_patchset_snapshot(
path=checkout_root, gerrit_change=build.input.gerrit_changes[0])
else:
checkout = self.from_commit_snapshot(
path=checkout_root, gitiles_commit=build.input.gitiles_commit)
else:
assert checkout_spec.manifest
assert checkout_spec.remote
checkout = self.fuchsia_with_options(
path=checkout_root,
build=build,
manifest=checkout_spec.manifest,
remote=checkout_spec.remote,
attributes=checkout_spec.attributes,
is_release_version=checkout_spec.is_release_version,
)
return checkout
@_nest
def from_patchset(self,
path,
manifest,
remote,
project,
patch_project,
run_hooks,
timeout_secs,
gerrit_change,
rebase_revision=None):
"""Initializes and populates a Jiri checkout from a remote manifest and Gerrit change.
Args:
path (Path): The Fuchsia checkout root.
manifest (str): Relative path to the manifest in the remote repository.
remote (str): URL to the remote repository.
project (str): The name that jiri should assign to the project.
patch_project (str): The name of the jiri project to patch if different
from the gerrit_change project.
timeout_secs (int): A timeout for jiri update in seconds.
gerrit_change: The GerritChange message from the BuildBucket build input.
rebase_revision: The revision to rebase gerrit_change on top of.
"""
is_integration_patch = (project == gerrit_change.project)
# Fetch the project and update.
details = self._get_change_details(gerrit_change)
with self.m.context(cwd=path):
self.m.jiri.import_manifest(
manifest,
remote,
name=project,
revision='HEAD',
remote_branch=details['branch'])
self.m.jiri.update(
run_hooks=False, fetch_packages=False, timeout=timeout_secs)
# Patch the current Gerrit change.
current_revision = self._get_current_revision(gerrit_change, details)
patch_ref = current_revision['ref']
# Failures in pulling down patches and rebasing are likely not
# infra-related. If we got here, we're already able to talk to Gerrit
# successfully, so any errors are likely merge conflicts.
with self.m.context(infra_steps=False):
self.m.jiri.patch(
patch_ref,
host='https://%s' % gerrit_change.host,
project=patch_project or gerrit_change.project,
rebase=True,
rebase_revision=rebase_revision,
)
# Handle patches.json, if present.
self._apply_patchfile(path, gerrit_change)
if is_integration_patch:
self.m.jiri.update(
gc=True,
rebase_tracked=True,
local_manifest=True,
run_hooks=False,
fetch_packages=False,
timeout=timeout_secs)
if run_hooks:
self.m.jiri.run_hooks(local_manifest=True)
self.m.jiri.fetch_packages(local_manifest=True)
@_nest
def from_commit(self, path, manifest, remote, commit, project, run_hooks,
timeout_secs):
"""Populates a Jiri checkout from a remote manifest and Gitiles commmit.
Args:
path (Path): The Fuchsia checkout root.
manifest (str): Relative path to the manifest in the remote repository.
remote (str): URL to the remote repository.
project (str): The name that jiri should assign to the project.
remote (str): The remote git repository.
commit: Commit information derived from
buildbucket.build_pb2.Build.Input.gitiles_commit.
timeout_secs (int): A timeout for jiri update in seconds.
run_hooks (bool): Whether or not to run the hooks.
override (bool): Whether to override the imported manifest with a commit's
given revision.
"""
revision = 'HEAD'
override = False
if commit:
revision = commit.id
# If the commit project != manifest project, then we get the manifest at
# HEAD and override only the commit's project.
override = commit.project != project
with self.m.context(cwd=path):
if override:
self.m.jiri.import_manifest(
manifest, remote, name=project, revision='HEAD')
# In order to identify a project to override, jiri keys on
# both the project name and the remote source repository (not to be
# confused with `remote`, the manifest repository).
# Doing this correctly would require finding the commit's remote in the
# transitive imports of the jiri manifest. But those transitive imports
# aren't available until we run "jiri update", and doing that twice is
# slow, so we rely on:
# 1. The convention that the name of the jiri project
# is the same as commit.project.
# 2. The hope that the URL scheme of the commit remote is the same as that
# of the manifest remote.
manifest_remote_url = urlparse(remote)
host = commit.host
# When using sso we only specify the lowest subdomain, by convention.
if manifest_remote_url.scheme == 'sso':
host = host.split('.')[0]
commit_remote = '%s://%s/%s' % (
manifest_remote_url.scheme,
host,
commit.project,
)
self.m.jiri.override(
project=commit.project, # See 1. above
remote=commit_remote,
new_revision=revision)
else:
self.m.jiri.import_manifest(
manifest, remote, name=project, revision=revision)
self.m.jiri.update(run_hooks=False, timeout=timeout_secs)
if run_hooks:
self.m.jiri.run_hooks()
@_nest
def fuchsia_with_options(self,
path,
build,
manifest,
remote,
rebase_revision=None,
attributes=(),
is_release_version=False,
timeout_secs=30 * 60):
"""Uses Jiri to check out a Fuchsia project.
The root of the checkout is returned via CheckoutResults.root_dir.
Args:
path (Path): The Fuchsia checkout root.
build (buildbucket.build_pb2.Build): A buildbucket build.
manifest (str): A path to the manifest in the remote (e.g. manifest/minimal)
remote (str): A URL to the remote repository which Jiri will be pointed at
project (str): The name of the project
rebase_revision (str): The base revision to patch on top of, if the
build input is a Gerrit Change. TODO(kjharland): This is hacky, find a
better way to carry this information through to `from_patchset`.
attributes (seq(str)): A list of jiri manifest attributes; projects or
packages with matching attributes - otherwise regarded as optional -
will be downloaded.
is_release_version (bool): Whether the checkout is a release version.
timeout_secs (int): How long to wait for the checkout to complete
before failing
Returns:
A CheckoutResults containing details of the checkout.
"""
try:
source_manifest = self.with_options(
path,
manifest,
remote,
# Currently all valid fuchsia checkouts use a manifest from a repo named 'integration'
project='integration',
rebase_revision=rebase_revision,
attributes=attributes,
build_input=build.input,
timeout_secs=timeout_secs,
)
except self.m.jiri.RebaseError:
# A failure to rebase is closer to user error than an infra failure.
# It can be fixed by the user by rebasing their change and then
# retrying the patch. The infrastructure is working properly.
raise
except recipe_api.StepFailure as e:
# All other failures are the infra's fault.
raise recipe_api.InfraFailure(e.name or e.reason, result=e.result)
with self.m.context(infra_steps=True, cwd=path):
snapshot_file = self.m.path['cleanup'].join('jiri.snapshot')
self.m.jiri.snapshot(snapshot_file)
return self.finalize_checkout(
path=path,
snapshot_file=snapshot_file,
is_from_snapshot=False,
is_release_version=is_release_version,
source_manifest=source_manifest,
)
@_nest
def from_commit_snapshot(self, path, gitiles_commit, attributes=()):
"""Uses Jiri to check out Fuchsia from a Jiri snapshot and Gitiles commit.
The root of the checkout is returned via CheckoutResults.root_dir.
Args:
path (Path): The Fuchsia checkout root.
gitiles_commit (buildbucket.common_pb2.GitilesCommit): A Gitiles commit.
attributes (seq(str)): A list of jiri manifest attributes; projects or
packages with matching attributes - otherwise regarded as optional -
will be downloaded.
Returns:
A CheckoutResults containing details of the checkout.
"""
repository = 'https://%s/%s' % (
gitiles_commit.host,
gitiles_commit.project,
)
revision = gitiles_commit.id
with self.m.context(infra_steps=True):
snapshot_repo_dir = self.m.path['cleanup'].join('snapshot_repo')
# Without any patch information, we just want to fetch whatever we're
# told via repository and revision.
self.m.git.checkout(
url=repository,
ref=revision,
path=snapshot_repo_dir,
cache=False,
)
return self._checkout_snapshot(
path=path, snapshot_repo_dir=snapshot_repo_dir, attributes=attributes)
@_nest
def from_patchset_snapshot(self, path, gerrit_change, attributes=()):
"""Uses Jiri to check out Fuchsia from a Jiri snapshot from a Gerrit patch.
The root of the checkout is returned via CheckoutResults.root_dir.
Args:
path (Path): The Fuchsia checkout root.
gerrit_change (buildbucket.common_pb2.GerritChange): A Gerrit change.
attributes (seq(str)): A list of jiri manifest attributes; projects or
packages with matching attributes - otherwise regarded as optional -
will be downloaded.
Returns:
A CheckoutResults containing details of the checkout.
"""
with self.m.context(infra_steps=True):
snapshot_repo_dir = self.m.path['cleanup'].join('snapshot_repo')
# 1) Check out the patch from Gerrit (initializing the repo also).
# 2) Learn the destination branch for the Gerrit change.
# 3) Fetch and rebase the patch against the destination branch.
#
# Firstly, we want to rebase on top of the upstream branch because when
# we're testing, we want to test the rebased change to make the tryjob as
# accurate as possible before submitting.
#
# Secondly, we need to fetch the destination branch for a Gerrit change
# via the Gerrit recipe module because CQ does not provide this
# information. This is the canonical way in which other Chrome Infra
# tryjob recipes are able to rebase onto the destination branch.
details = self._get_change_details(gerrit_change)
current_revision = self._get_current_revision(gerrit_change, details)
self.m.git.checkout(
url='https://%s/%s' % (gerrit_change.host, gerrit_change.project),
ref=current_revision['ref'],
path=snapshot_repo_dir,
cache=False,
)
with self.m.context(cwd=snapshot_repo_dir):
git_host = gerrit_change.host
gs_suffix = '-review.googlesource.com'
assert git_host.endswith(gs_suffix)
git_host = '%s.googlesource.com' % git_host[:-len(gs_suffix)]
self.m.git(
'fetch',
'https://%s/%s' % (git_host, gerrit_change.project),
details['branch'],
)
self.m.git('rebase', 'FETCH_HEAD')
return self._checkout_snapshot(
path=path, snapshot_repo_dir=snapshot_repo_dir, attributes=attributes)
def _checkout_snapshot(self, path, snapshot_repo_dir, attributes=()):
# Read the snapshot so it shows up in the step presentation.
snapshot_file = snapshot_repo_dir.join('snapshot')
cherrypick_file = snapshot_repo_dir.join('cherrypick.json')
# Create a checkout from the snapshot.
self.m.jiri.init(attributes=attributes)
# Hooks must be run during update for a snapshot, otherwise it will be
# impossible to run them later. It's impossible because jiri doesn't record
# the hooks anywhere when it updates from a snapshot, so the only way to
# run hooks inside of the snapshot is to re-run update, which is redundant.
self.m.jiri.update(
run_hooks=True,
snapshot=snapshot_file,
timeout=None,
# If attributes are set, override snapshot's current configuration.
override_optional=bool(attributes))
source_manifest = self.m.jiri.emit_source_manifest()
# Perform cherrypicks if there is a cherrypick file.
if self.m.path.exists(cherrypick_file):
# Get the cherrypick json
cherrypick_json = self.m.file.read_raw('read cherrypick file',
cherrypick_file,
'{\"topaz\":[\"test\"]}')
cherrypick_dict = self.m.json.loads(cherrypick_json)
for project, cherrypicks in cherrypick_dict.items():
# Get the project relative checkout path and join it to the path.
# path always exists in the snapshot file, so we don't need to check for its existance.
manifest_element = self.m.jiri.read_manifest_element(
manifest=snapshot_file,
element_type='project',
element_name=project)
if manifest_element['path'] == '.':
project_path = path
else:
project_path = path.join(*manifest_element['path'].split('/'))
for cherrypick in cherrypicks:
self.m.git('-C', project_path, 'cherry-pick', cherrypick,
'--keep-redundant-commits')
return self.finalize_checkout(
path=path,
snapshot_file=snapshot_file,
is_from_snapshot=True,
is_release_version=False,
source_manifest=source_manifest,
)
def _apply_patchfile(self, path, gerrit_change):
"""Parses and applies the PatchFile for the given gerrit change."""
# Verify patches.json exists.
patchfile_path = path.join(gerrit_change.project, 'patches.json')
if not self.m.path.exists(patchfile_path):
return
patch_file = self._parse_patchfile(patchfile_path)
# Ensure patchfile is valid.
validation_err = patch_file.validate(gerrit_change)
if validation_err is not None:
raise self.m.step.StepFailure(str(validation_err))
for patch_input in patch_file.inputs:
# If the patch pulls in a project that's not in the workspace already, the patch
# would not affect this build / test run. Skip this patch.
project_exists = len(
self.m.jiri.project([patch_input.project]).json.output) > 0
if not project_exists:
warning = 'Warning: skipping patch for %s which is not in the checkout' % patch_input.project
self.m.step(warning, [])
continue
# Strip protocol if present.
host = patch_input.host
host_url = urlparse(host)
if host_url.scheme:
host = host_url.hostname
# Patch in the change
self.m.jiri.patch(
ref=patch_input.ref,
host='https://%s' % host,
project=patch_input.project,
rebase=True)
def _get_change_details(self, gerrit_change):
"""Fetches the details of a Gerrit change"""
return self.m.gerrit.change_details(
name='get change details',
change_id='%s~%s' % (
gerrit_change.project,
gerrit_change.change,
),
gerrit_host='https://%s' % gerrit_change.host,
query_params=['ALL_REVISIONS'],
test_data=self.m.json.test_api.output({
'branch': 'master',
'revisions': {
'd4e5f6': {
'_number': 3,
'ref': 'refs/changes/00/100/3'
},
'a1b2c3': {
'_number': 7,
'ref': 'refs/changes/00/100/7'
},
'g7h8i9': {
'_number': 9,
'ref': 'refs/changes/00/100/9'
},
},
}),
)
def _get_current_revision(self, gerrit_change, change_details):
current_patchsets = [
rev for rev in change_details['revisions'].itervalues()
if rev['_number'] == gerrit_change.patchset
]
assert len(current_patchsets) == 1
return current_patchsets[0]
def _parse_patchfile(self, patchfile_path):
"""Parses a PatchFile from the given path"""
js = self.m.json.read(
'read patches-json',
patchfile_path,
).json.output
return patch.PatchFile.from_json(js)
def _get_release_version(self, path):
"""Get release version of checkout."""
release_version = self.m.release.ref_to_release_version(
ref='HEAD', repo_path=path.join('integration'))
release_version_str = str(release_version).replace('releases/', '')
self.m.step.active_result.presentation.logs['release_version'] = [
release_version_str
]
return release_version_str
def finalize_checkout(self, path, snapshot_file, is_from_snapshot,
is_release_version, source_manifest):
"""Finalizes a checkout; constructs a CheckoutResults object.
Args:
path (Path): The Fuchsia checkout root.
snapshot_file (Path): Path to the snapshot file.
is_from_snapshot (bool): Whether this checkout is from snapshot.
is_release_version (bool): Whether this checkout is a release version.
source_manifest (dict): A Python dictionary representing the source manifest.
Returns:
A CheckoutResults containing details of the checkout.
"""
snapshot_contents = self.m.file.read_text('read snapshot', snapshot_file)
# Always log snapshot contents (even if uploading to GCS) to help debug
# things like tryjob failures during roller-commits.
snapshot_step_logs = self.m.step.active_result.presentation.logs
snapshot_step_logs['snapshot_contents'] = snapshot_contents.split('\n')
# If using a release version, get release version string.
release_version = (
self._get_release_version(path) if is_release_version else None)
return CheckoutResults(
api=self,
root_dir=path,
snapshot_file=snapshot_file,
is_from_snapshot=is_from_snapshot,
release_version=release_version,
source_manifest=source_manifest,
)