blob: 0fb938aec456a80f56f58aea0da242b4ae15ea16 [file] [log] [blame]
# Copyright 2020 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
from recipe_engine import recipe_api
from RECIPE_MODULES.fuchsia.utils import pluralize
from PB.go.chromium.org.luci.buildbucket.proto import (
builder_common as builder_common_pb2,
builds_service as builds_service_pb2,
common as common_pb2,
)
from PB.recipe_modules.fuchsia.presubmit_util.options import Options
class PresubmitUtilApi(recipe_api.RecipeApi):
"""APIs for running presubmit tests in external projects."""
Options = Options
def orchestrate(
self,
options,
cl_subject=None,
file_edits=None,
package_overrides=None,
gclient_variables=None,
):
"""Orchestrate a presubmit run in an external project.
The external project must support one of:
1. File-edit-based overrides.
2. Overrides via gclient properties.
Args:
options (presubmit_util.Options): Presubmit options.
cl_subject (str): Subject of the CL for informational purposes.
file_edits (list): List of (file, file_contents) pairs to edit.
package_overrides (dict(str,str)): Mapping from package to CAS
digest to populate package_overrides.json.
gclient_variables (dict): Variables to pass to gclient.
"""
change_num = None
patchset_num = None
file_edits = file_edits or []
if package_overrides:
file_edits += [
(
"package_overrides.json",
json.dumps(package_overrides, indent=2, sort_keys=True),
),
]
# Some downstream projects support setting gclient variables via build
# properties to modify the checkout contents. For those projects we need
# not create a CL.
if not gclient_variables:
change_info = self._create_cl(
options,
subject=cl_subject,
file_edits=file_edits,
)
change_num = change_info["number"]
patchset_num = change_info["revisions"][change_info["current_revision"]][
"number"
]
try:
self._run_downstream_builders(
options, gclient_variables, change_num, patchset_num
)
finally:
if change_num:
self._abandon_cl(options, change_num=change_num)
def _run_downstream_builders(
self, options, gclient_variables, change_num, patchset_num
):
"""Dry-run the downstream project's builders.
The builders run either against the specified Gerrit change, or with the
specific gclient variables set via properties in order to modify the
checkout contents.
"""
if not options.tryjobs:
# If specific tryjobs are not listed, it's sufficient to trigger CQ
# and wait for CQ to finish rather than waiting on the individual
# builders.
assert (
not options.trigger_tryjobs
), "'tryjobs' must be set if 'trigger_tryjobs' is set"
self._trigger_cq(options, change_num=change_num)
self._wait_for_cq(options, change_num=change_num)
return
if options.trigger_tryjobs:
# Trigger tryjobs directly, skipping CQ.
properties = {}
if gclient_variables:
properties["gclient_variables"] = gclient_variables
build_ids = self._trigger_tryjobs(
options,
change_num=change_num,
patchset_num=patchset_num,
properties=properties,
)
else:
# Trigger tryjobs via CQ.
self._trigger_cq(options, change_num=change_num)
# Give tryjobs time to start after applying the CQ label. There's
# some latency between the label being applied and CQ triggering the
# builds.
self.m.time.sleep(options.tryjobs_wait_secs)
build_ids = self._resolve_build_ids(options, change_num, patchset_num)
self._collect_tryjobs(options, change_num=change_num, build_ids=build_ids)
def _resolve_build_ids(self, options, change_num, patchset_num):
"""Resolve IDs of builds triggered by CQ on a change."""
with self.m.step.nest("resolve triggered build IDs") as presentation:
assert (
change_num and patchset_num
), "cannot search builders without a patchset"
predicates = []
for builder in options.tryjobs:
project, bucket, builder_name = builder.split("/")
predicate = builds_service_pb2.BuildPredicate(
builder=builder_common_pb2.BuilderID(
project=project,
bucket=bucket,
builder=builder_name,
),
gerrit_changes=[
common_pb2.GerritChange(
host=options.tryjobs_gerrit_host or options.gerrit_host,
project=options.gerrit_project,
change=change_num,
patchset=patchset_num,
),
],
)
predicates.append(predicate)
build_ids = [
b.id
for b in self.m.buildbucket.search_with_multiple_predicates(
step_name="search",
predicate=predicates,
)
]
if len(build_ids) < len(options.tryjobs):
presentation.status = self.m.step.EXCEPTION
raise self.m.step.InfraFailure(
f"expected search results to contain {len(options.tryjobs)} "
f"builds, got {len(build_ids)}; builds may have failed to have "
f"been scheduled, or the queried builders may not exist"
)
return build_ids
def _create_cl(self, options, subject, file_edits, presentation=None):
"""Create a CL.
Args:
options (presubmit_util.Options): Presubmit options.
subject (str): Commit message subject of CL.
file_edits (seq((str, str))): A sequence of file edits, where each
file edit is a pair of strings: (filepath, contents).
presentation (Step): Add links to this step, if specified. Otherwise,
add links to the step generated by this call.
Returns:
gerritpb.ChangeInfo: The CL's change info.
"""
args = [
"-subject",
subject,
"-json-output",
self.m.json.output(),
]
for filepath, contents in file_edits:
args += ["-file-edit", f"{filepath}:{contents}"]
if options.ref:
args += ["-ref", options.ref]
step = self._run("create CL", options, "create-cl", args, infra_step=True)
change_info = step.json.output
presentation = presentation or step.presentation
presentation.links["gerrit_link"] = (
f"https://{options.gerrit_host}/c/{options.gerrit_project}/+/{int(change_info['number'])}"
)
return change_info
def _trigger_cq(self, options, change_num):
"""Trigger dryrun on a CL.
Args:
options (presubmit_util.Options): Presubmit options.
change_num (int): Gerrit change number.
"""
args = [
"-change-num",
change_num,
"-dryrun",
]
self._run("trigger CQ+1", options, "trigger-cq", args)
def _wait_for_cq(self, options, change_num):
"""Wait on a CQ attempt for completion and get its status.
Args:
options (presubmit_util.Options): Presubmit options.
change_num (int): Gerrit change number.
"""
args = [
"-change-num",
change_num,
"-timeout",
f"{int(options.timeout_secs)}s",
"-json-output",
self.m.json.output(),
]
if options.presubmit_status_gerrit_label:
args.extend(["-gerrit-label", options.presubmit_status_gerrit_label])
step = self._run("wait for CQ", options, "wait-for-cq", args)
passed = step.json.output
if not passed:
gerrit_link = f"https://{options.gerrit_host}/c/{options.gerrit_project}/+/{change_num}"
self.m.step.empty(
"CQ attempt failed",
status="FAILURE",
step_text=f"CQ attempt failed. See [gerrit UI]({gerrit_link})",
)
def _abandon_cl(self, options, change_num):
"""Abandon a CL.
Args:
options (presubmit_util.Options): Presubmit options.
change_num (int): Gerrit change number.
"""
args = [
"-change-num",
change_num,
]
self._run("abandon CL", options, "abandon-cl", args, infra_step=True)
def _trigger_tryjobs(
self,
options,
change_num=None,
patchset_num=None,
properties=None,
):
"""Trigger tryjobs on a Buildbucket-based presubmit.
Args:
options (presubmit_util.Options): Presubmit options.
change_num (int or None): Gerrit change number.
patchset_num (int or None): Gerrit change's patchset number.
properties (dict or None): Input properties for the tryjobs.
"""
reqs = []
for builder in options.tryjobs:
project, bucket, builder_name = builder.split("/")
req = self.m.buildbucket.schedule_request(
builder=builder_name,
project=project,
bucket=bucket,
# Emulate "Choose Tryjobs" plugin.
gerrit_changes=(
[
common_pb2.GerritChange(
host=options.tryjobs_gerrit_host or options.gerrit_host,
project=options.gerrit_project,
change=change_num,
patchset=patchset_num,
),
]
if change_num
else []
),
properties=properties,
# Use the server-defined settings rather than inheriting the
# parent build's settings. The parent build's settings should
# not necessarily match the tryjobs' settings, e.g. when
# crossing infrastructures.
exe_cipd_version=None,
experimental=None,
inherit_buildsets=False,
gitiles_commit=None,
priority=None,
)
reqs.append(req)
with self.m.step.nest("trigger tryjobs"):
builds = self.m.buildbucket.schedule(
reqs,
step_name="schedule",
# Don't propagate ResultDB data from the tryjobs. It may be
# useful, but it can result in a huge amount of ResultDB data if
# we're running many tryjobs. We also don't want to pollute our
# ResultDB data with data from other projects.
include_sub_invs=False,
)
return [b.id for b in builds]
def _collect_tryjobs(
self,
options,
change_num=None,
build_ids=None,
raise_on_failure=True,
):
"""Collect tryjobs on a Buildbucket-based presubmit.
Args:
options (presubmit_util.Options): Presubmit options.
change_num (int or None): Gerrit change number.
build_ids (seq(str)): A sequence of build ids to collect. Cannot be
be combined with options.tryjobs.
raise_on_failure (bool): Whether to raise on tryjob failure(s).
Raises:
InfraFailure: One or more tryjobs completed with INFRA_FAILURE status.
StepFailure: One or more tryjobs completed with FAILURE status.
"""
with self.m.step.nest("collect tryjobs"):
builds = self.m.buildbucket.collect_builds(
step_name="collect",
build_ids=build_ids,
timeout=options.timeout_secs,
)
try:
self.m.buildbucket_util.display_builds(
step_name="display",
builds=builds.values(),
raise_on_failure=raise_on_failure,
)
except self.m.step.StepFailure:
failed_tryjobs = [
b.builder.builder
for b in builds.values()
if b.status != common_pb2.SUCCESS
]
gerrit_link = None
if change_num:
gerrit_link = f"https://{options.gerrit_host}/c/{options.gerrit_project}/+/{change_num}"
raise self.m.step.StepFailure(
"%s failed:%s\n\n%s"
% (
pluralize("external tryjob", len(failed_tryjobs)),
f" see [gerrit UI]({gerrit_link}):" if gerrit_link else "",
"\n".join([f"- {t}" for t in failed_tryjobs]),
)
)
return builds
def _run(self, step_name, options, subcmd_name, args, infra_step=False):
assert options.gerrit_host
assert options.gerrit_project
cmd = [
self._presubmit_util_tool,
subcmd_name,
"-gerrit-host",
options.gerrit_host,
"-gerrit-project",
options.gerrit_project,
] + args
return self.m.step(step_name, cmd, infra_step=infra_step)
@property
def _presubmit_util_tool(self):
return self.m.cipd_ensure(
self.resource("cipd.ensure"),
"fuchsia/infra/cl-util/${platform}",
)