blob: b45309c5f7498c0bcf99c38ae1ac717663b09124 [file] [log] [blame]
# Copyright 2020 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
from recipe_engine import recipe_api
from RECIPE_MODULES.fuchsia.utils import pluralize
from PB.go.chromium.org.luci.buildbucket.proto import (
builder_common as builder_common_pb2,
builds_service as builds_service_pb2,
common as common_pb2,
)
from PB.recipe_modules.fuchsia.presubmit_util.options import Options
class PresubmitUtilApi(recipe_api.RecipeApi):
"""APIs for running presubmit tests in external projects."""
Options = Options
def orchestrate(
self,
options,
cl_subject=None,
file_edits=None,
package_overrides=None,
gclient_variables=None,
):
"""Orchestrate a presubmit run in an external project.
The external project must support one of:
1. File-edit-based overrides.
2. Overrides via gclient properties.
Args:
options (presubmit_util.Options): Presubmit options.
cl_subject (str): Subject of the CL for informational purposes.
file_edits (list): List of (file, file_contents) pairs to edit.
package_overrides (dict(str,str)): Mapping from package to CAS
digest to populate package_overrides.json.
gclient_variables (dict): Variables to pass to gclient.
"""
change_num = None
patchset_num = None
file_edits = file_edits or []
if package_overrides:
file_edits += [
(
"package_overrides.json",
json.dumps(package_overrides, indent=2, sort_keys=True),
),
]
if not gclient_variables:
change_info = self._create_cl(
"create CL",
options,
subject=cl_subject,
file_edits=file_edits,
)
change_num = change_info["number"]
patchset_num = change_info["revisions"][change_info["current_revision"]][
"number"
]
try:
# If we aren't explicitly triggering tryjobs, build IDs are resolved
# during collection.
build_ids = None
# If we are, build IDs are provided by the trigger output.
if options.trigger_tryjobs:
properties = None
if gclient_variables:
properties = {
"gclient_variables": gclient_variables,
}
build_ids = [
b.id
for b in self._trigger_tryjobs(
"trigger tryjobs",
options,
change_num=change_num,
patchset_num=patchset_num,
properties=properties,
)
]
else:
self._trigger_cq(
"trigger CQ+1",
options,
change_num=change_num,
)
if not bool(options.tryjobs):
self._wait_for_cq(
"wait for CQ",
options,
change_num=change_num,
)
else:
# Give tryjobs time to start after applying CQ label.
# This is only required in the case that we used CQ to trigger
# tryjobs, not when we have triggered the tryjobs explicitly.
if not options.trigger_tryjobs:
self.m.time.sleep(options.tryjobs_wait_secs)
self._collect_tryjobs(
"collect tryjobs",
options,
change_num=change_num,
patchset_num=patchset_num,
build_ids=build_ids,
)
finally:
if change_num:
self._abandon_cl("abandon CL", options, change_num=change_num)
def _create_cl(self, step_name, options, subject, file_edits, presentation=None):
"""Create a CL.
Args:
step_name (str): Name of the step.
options (presubmit_util.Options): Presubmit options.
subject (str): Commit message subject of CL.
file_edits (seq((str, str))): A sequence of file edits, where each
file edit is a pair of strings: (filepath, contents).
presentation (Step): Add links to this step, if specified. Otherwise,
add links to the step generated by this call.
Returns:
gerritpb.ChangeInfo: The CL's change info.
"""
args = [
"-subject",
subject,
"-json-output",
self.m.json.output(),
]
for filepath, contents in file_edits:
args += ["-file-edit", "%s:%s" % (filepath, contents)]
if options.ref:
args += ["-ref", options.ref]
step = self._run(step_name, options, "create-cl", args, infra_step=True)
change_info = step.json.output
presentation = presentation or step.presentation
presentation.links["gerrit_link"] = "https://%s/c/%s/+/%d" % (
options.gerrit_host,
options.gerrit_project,
change_info["number"],
)
return change_info
def _trigger_cq(
self,
step_name,
options,
change_num,
):
"""Trigger dryrun on a CL.
Args:
step_name (str): Name of the step.
options (presubmit_util.Options): Presubmit options.
change_num (int): Gerrit change number.
"""
args = [
"-change-num",
change_num,
"-dryrun",
]
self._run(step_name, options, "trigger-cq", args)
def _wait_for_cq(
self,
step_name,
options,
change_num,
):
"""Wait on a CQ attempt for completion and get its status.
Args:
step_name (str): Name of the step.
options (presubmit_util.Options): Presubmit options.
change_num (int): Gerrit change number.
"""
args = [
"-change-num",
change_num,
"-timeout",
"%ds" % options.timeout_secs,
"-json-output",
self.m.json.output(),
]
step = self._run(step_name, options, "wait-for-cq", args)
passed = step.json.output
if not passed:
gerrit_link = "https://%s/c/%s/+/%d" % (
options.gerrit_host,
options.gerrit_project,
change_num,
)
self.m.step.empty(
"CQ attempt failed",
status="FAILURE",
step_text="CQ attempt failed. See [gerrit UI](%s)" % gerrit_link,
)
def _abandon_cl(self, step_name, options, change_num):
"""Abandon a CL.
Args:
step_name (str): Name of the step.
options (presubmit_util.Options): Presubmit options.
change_num (int): Gerrit change number.
"""
args = [
"-change-num",
change_num,
]
self._run(step_name, options, "abandon-cl", args, infra_step=True)
def _trigger_tryjobs(
self,
step_name,
options,
change_num=None,
patchset_num=None,
properties=None,
):
"""Trigger tryjobs on a Buildbucket-based presubmit.
Args:
step_name (str): Name of the step.
options (presubmit_util.Options): Presubmit options.
change_num (int or None): Gerrit change number.
patchset_num (int or None): Gerrit change's patchset number.
properties (dict or None): Input properties for the tryjobs.
"""
reqs = []
for builder in options.tryjobs:
project, bucket, builder_name = builder.split("/")
req = self.m.buildbucket.schedule_request(
builder=builder_name,
project=project,
bucket=bucket,
# Emulate "Choose Tryjobs" plugin.
gerrit_changes=[
common_pb2.GerritChange(
host=options.tryjobs_gerrit_host or options.gerrit_host,
project=options.gerrit_project,
change=change_num,
patchset=patchset_num,
),
]
if change_num
else [],
properties=properties,
# Use the server-defined settings rather than inheriting the
# parent build's settings. The parent build's settings should
# not necessarily match the tryjobs' settings, e.g. when
# crossing infrastructures.
exe_cipd_version=None,
experimental=None,
inherit_buildsets=False,
gitiles_commit=None,
priority=None,
)
reqs.append(req)
with self.m.step.nest(step_name):
return self.m.buildbucket.schedule(
reqs,
step_name="schedule",
# Don't propagate ResultDB data from the tryjobs. It may be
# useful, but it can result in a huge amount of ResultDB data if
# we're running many tryjobs. We also don't want to pollute our
# ResultDB data with data from other projects.
include_sub_invs=False,
)
def _collect_tryjobs(
self,
step_name,
options,
change_num=None,
patchset_num=None,
build_ids=None,
raise_on_failure=True,
):
"""Collect tryjobs on a Buildbucket-based presubmit.
Args:
step_name (str): Name of the step.
options (presubmit_util.Options): Presubmit options.
change_num (int or None): Gerrit change number.
patchset_num (int or None): Gerrit change's patchset number.
build_ids (seq(str)): A sequence of build ids to collect. Cannot be
be combined with options.tryjobs.
raise_on_failure (bool): Whether to raise on tryjob failure(s).
Raises:
InfraFailure: One or more tryjobs completed with INFRA_FAILURE status.
StepFailure: One or more tryjobs completed with FAILURE status.
"""
with self.m.step.nest(step_name) as presentation:
tryjobs = options.tryjobs if not build_ids else None
if tryjobs:
assert (
change_num and patchset_num
), "cannot search builders without a patchset"
predicates = []
for builder in tryjobs:
project, bucket, builder_name = builder.split("/")
predicate = builds_service_pb2.BuildPredicate(
builder=builder_common_pb2.BuilderID(
project=project,
bucket=bucket,
builder=builder_name,
),
gerrit_changes=[
common_pb2.GerritChange(
host=options.tryjobs_gerrit_host or options.gerrit_host,
project=options.gerrit_project,
change=change_num,
patchset=patchset_num,
),
],
)
predicates.append(predicate)
build_ids = [
b.id
for b in self.m.buildbucket.search(
step_name="search",
predicate=predicates,
)
]
if len(build_ids) < len(tryjobs):
presentation.status = self.m.step.EXCEPTION
raise self.m.step.InfraFailure(
"expected search results to contain %d builds, got %d; "
"builds may have failed to have been scheduled, or the "
"queried builders may not exist"
% (len(tryjobs), len(build_ids)),
)
builds = self.m.buildbucket.collect_builds(
step_name="collect",
build_ids=build_ids,
timeout=options.timeout_secs,
)
try:
self.m.buildbucket_util.display_builds(
step_name="display",
builds=builds.values(),
raise_on_failure=raise_on_failure,
)
except self.m.step.StepFailure:
failed_tryjobs = [
b.builder.builder
for b in builds.values()
if b.status != common_pb2.SUCCESS
]
gerrit_link = None
if change_num:
gerrit_link = "https://%s/c/%s/+/%d" % (
options.gerrit_host,
options.gerrit_project,
change_num,
)
raise self.m.step.StepFailure(
"%s failed:%s\n\n%s"
% (
pluralize("external tryjob", len(failed_tryjobs)),
" see [gerrit UI](%s):" % gerrit_link if gerrit_link else "",
"\n".join(["- %s" % t for t in failed_tryjobs]),
)
)
return builds
def _run(self, step_name, options, subcmd_name, args, infra_step=False):
assert options.gerrit_host
assert options.gerrit_project
cmd = [
self._presubmit_util_tool,
subcmd_name,
"-gerrit-host",
options.gerrit_host,
"-gerrit-project",
options.gerrit_project,
] + args
return self.m.step(step_name, cmd, infra_step=infra_step)
@property
def _presubmit_util_tool(self):
return self.m.ensure_tool("cl-util", self.resource("tool_manifest.json"))