blob: 28a2b8ed8df60133748bd0a1a7773159cb2a6e8a [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia and isolating build artifacts."""
import re
from PB.infra.fuchsia import Fuchsia
from recipe_engine import post_process
from recipe_engine.recipe_api import Property
TEST_MULTIPLIER_KEY = "MULTIPLY"
# These represent the location of the isolated hash in the output of this
# recipe when building SDK archives. Must be kept in sync with sdk.py.
ISOLATE_STEP_NAME = "isolate artifacts"
ISOLATED_OUTPUT_KEY = "isolated_output_hash"
DEPS = [
"fuchsia/artifacts",
"fuchsia/build",
"fuchsia/build_input_resolver",
"fuchsia/buildbucket_util",
"fuchsia/checkout",
"fuchsia/fuchsia",
"fuchsia/gerrit",
"fuchsia/git",
"fuchsia/gitiles",
"fuchsia/jiri",
"fuchsia/jsonutil",
"fuchsia/spec",
"fuchsia/status_check",
"fuchsia/testing_requests",
"fuchsia/testsharder",
"recipe_engine/buildbucket",
"recipe_engine/cipd",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/isolated",
"recipe_engine/json",
"recipe_engine/path",
"recipe_engine/properties",
"recipe_engine/python",
"recipe_engine/swarming",
"recipe_engine/step",
]
PROPERTIES = {
"parent_id": Property(
# This is a string because led IDs are not integers.
kind=str,
help="Parent build's buildbucket or led id",
default=None,
),
"spec_remote": Property(
kind=str,
help="URL of the specs git repository",
default="https://fuchsia.googlesource.com/integration",
),
"spec_revision": Property(
kind=str, help="The revision of spec_remote to fetch", default=None
),
"perfcompare": Property(
kind=bool,
help=(
'Enable perfcompare mode: Build the "without CL" revision, '
'not just the "with CL" revision.'
),
default=False,
),
"comment_led": Property(
kind=bool,
help=("Whether to add comments for multipliers on led tasks."),
default=False,
),
}
def RunSteps(api, parent_id, spec_remote, spec_revision, perfcompare, comment_led):
spec, spec_revision = api.fuchsia.setup_with_spec(spec_remote, spec_revision)
bb_input = api.buildbucket.build.input
with api.step.nest("got_revision") as presentation:
# Read by the binary-size Gerrit plugin
presentation.properties["got_revision"] = bb_input.gitiles_commit.id
# TODO(fxb/39958): Retrying all failed builds in Gerrit launches subbuilds as
# well, which fail because they're not triggered by a parent build. Once
# that's resolved we can get rid of this check.
if spec.build.run_tests and not parent_id:
raise api.python.infra_failing_step(
"no parent_id", "subbuilds can only be triggered by parent builds"
)
checkout = api.checkout.from_spec(spec.checkout)
# Load test multipliers before building so we can fail fast if the JSON is
# malformed, which avoids wasting a build.
multipliers = []
if bb_input.gerrit_changes and spec.test.test_in_shards:
with api.step.nest("test multipliers") as presentation:
multipliers = read_multipliers(api, bb_input, checkout)
presentation.logs["multipliers"] = api.json.dumps(
[m.render_to_jsonish() for m in multipliers], indent=2
)
# Give SDK subbuilds their own namespaces for upload, so they do not clobber
# in the case of multiple subbuilds.
if not parent_id or spec.build.sdk_subbuild:
upload_namespace = api.buildbucket_util.id
else:
upload_namespace = parent_id
if spec.checkout.upload_results:
assert spec.gcs_bucket, "gcs_bucket must be set if checkout.upload_results is"
checkout.upload_results(spec.gcs_bucket, namespace=upload_namespace)
if spec.artifact_gcs_bucket:
checkout.upload_source_manifest(
spec.artifact_gcs_bucket, namespace=upload_namespace
)
repo_path = None
if perfcompare:
# Do sanity checks to catch problems before doing the build.
assert bb_input.gerrit_changes, (
"perfcompare mode is for CQ builds only," " not CI: no Gerrit changes found"
)
project = bb_input.gerrit_changes[0].project
with api.context(cwd=checkout.root_dir):
repo_path = api.jiri.project(projects=[project]).json.output[0]["path"]
assert repo_path, (
"Unable to find the path of the Git repo that"
" the CL applies to for perfcompare mode"
)
# Build infratools.
spec.build.universe_packages.append("//bundles:infratools")
spec.build.ninja_targets.append("bundles:infratools")
run_build_steps(
api,
spec,
parent_id,
upload_namespace,
checkout,
bb_input,
multipliers,
without_cl=False,
comment_led=comment_led,
)
if perfcompare:
with api.step.nest("build without CL"):
# Unapply the topmost Git commit that was applied from Gerrit. If
# the CQ is testing a stack of multiple CLs from Gerrit, the other
# CLs are left applied.
# TODO(mseaborn): This does not handle cases where the CL changed
# Jiri manifest files or contained a patches.json file.
api.git(
"-C",
repo_path,
"checkout",
"HEAD^",
name='git checkout of "without CL" revision',
)
run_build_steps(
api,
spec,
parent_id,
upload_namespace,
checkout,
bb_input,
multipliers,
without_cl=True,
comment_led=comment_led,
)
def run_build_steps(
api,
spec,
parent_id,
upload_namespace,
checkout,
bb_input,
multipliers,
without_cl,
comment_led,
):
collect_build_metrics = False
if spec.build.upload_results:
assert spec.gcs_bucket, "gcs_bucket must be set if checkout.upload_results is"
collect_build_metrics = True
# If SDK subbuild, set SDK ID to parent ID.
sdk_id = parent_id if spec.build.sdk_subbuild else None
build = api.build.from_spec(
spec.build,
checkout,
collect_build_metrics,
pave=spec.test.pave,
sdk_id=sdk_id,
gcs_bucket=spec.gcs_bucket,
)
check_sizes_result = api.step(
"check sizes",
[
build.tool("size_checker"),
"--build-dir",
build.fuchsia_build_dir,
"--sizes-json-out",
api.json.output(),
],
step_test_data=lambda: api.json.test_api.output({"some-file": 123}),
# If the size checks fail, we still want to write the output to
# the output property below.
ok_ret="any",
)
# This property is read by the binary-size Gerrit plugin.
if check_sizes_result.json.output:
check_sizes_result.presentation.properties[
"binary_sizes"
] = check_sizes_result.json.output
if check_sizes_result.exc_result.retcode:
raise api.step.StepFailure("size checks failed")
# In SDK subbuild mode, isolate SDK archive and ninja targets.
if spec.build.sdk_subbuild:
sdk_archive_path = build.fuchsia_build_dir.join("sdk", "archive")
isolated = api.isolated.isolated(sdk_archive_path)
sdk_ninja_targets = [
target
for target in spec.build.ninja_targets
if target.startswith("sdk/archive")
]
for ninja_target in sdk_ninja_targets:
isolated.add_file(
path=build.fuchsia_build_dir.join(*ninja_target.split("/"))
)
sdk_archive_isolated_hash = isolated.archive(ISOLATE_STEP_NAME)
api.step.active_result.presentation.properties[
ISOLATED_OUTPUT_KEY
] = sdk_archive_isolated_hash
# TODO(garymm): assert spec.gcs_bucket set if upload_results set.
if spec.gcs_bucket and spec.build.upload_results:
build.upload_results(
gcs_bucket=spec.gcs_bucket,
is_release_version=spec.checkout.is_release_version,
namespace=upload_namespace,
)
if spec.build.enforce_size_limits:
build.check_filesystem_sizes()
# Must be set before testing_requests.task_requests() is called.
api.artifacts.gcs_bucket = spec.artifact_gcs_bucket
api.artifacts.uuid = upload_namespace
if without_cl:
api.artifacts.uuid += "_without_cl"
send_comment = True
if spec.build.run_tests:
if parent_id.isdigit():
# Use parent build so that testing task requests refer to
# that build, which actually orchestrates testing.
buildbucket_build = api.buildbucket.get(int(parent_id))
# If it's a try build, the parent build will not have its gitiles_commit
# populated (it's populated at runtime by `build_input_resolver`, but
# that doesn't change the input values stored in Buildbucket). So we need
# to populate it with the same commit that `build_input_resolver`
# resolved for the subbuild.
buildbucket_build.input.gitiles_commit.CopyFrom(bb_input.gitiles_commit)
build_url = "https://ci.chromium.org/b/%s" % buildbucket_build.id
else:
# When the parent was launched by led, it's not possible to retrieve
# the parent build, so we fall back to using our own build.
# This is technically incorrect and any tests that rely on having
# correct buildbucket metadata may fail when run via led. Ideally
# we wouldn't have any tests that knew about buildbucket, but
# for now this is OK since none of those tests run in recipes CQ,
# which uses led to test recipes changes.
buildbucket_build = api.buildbucket.build
build_url = "https://ci.chromium.org/swarming/task/%s?server=%s" % (
api.swarming.task_id,
api.buildbucket.build.infra.swarming.hostname,
)
if not comment_led:
send_comment = False
if spec.test.test_in_shards:
shards = api.testsharder.execute(
"create test shards",
testsharder_path=build.tool("testsharder"),
build_dir=build.fuchsia_build_dir,
max_shard_size=spec.test.max_shard_size,
target_duration_secs=spec.test.target_shard_duration_secs,
max_shards_per_env=spec.test.max_shards_per_env,
multipliers=multipliers,
tags=spec.build.environment_tags,
)
if bb_input.gerrit_changes and send_comment:
gerrit_change = bb_input.gerrit_changes[0]
report_multipliers(api, shards, gerrit_change, build_url)
task_requests = api.testing_requests.task_requests(
build,
buildbucket_build,
spec.test.per_test_timeout_secs,
spec.test.pool,
shards,
spec.test.swarming_expiration_timeout_secs,
spec.test.swarming_io_timeout_secs,
spec.test.use_runtests,
spec.test.timeout_secs,
default_service_account=spec.test.default_service_account,
pave=spec.test.pave,
targets_serial=spec.test.targets_serial,
catapult_dashboard_master=spec.test.catapult_dashboard_master,
catapult_dashboard_bot=spec.test.catapult_dashboard_bot,
release_version=checkout.release_version,
zircon_args=spec.test.zircon_args,
)
else:
task_requests = api.testing_requests.deprecated_task_requests(
build,
buildbucket_build,
api.testing_requests.deprecated_test_cmds(spec.test),
spec.test.device_type,
spec.test.pool,
spec.test.timeout_secs,
spec.test.pave,
swarming_expiration_timeout_secs=spec.test.swarming_expiration_timeout_secs,
swarming_io_timeout_secs=spec.test.swarming_io_timeout_secs,
default_service_account=spec.test.default_service_account,
zircon_args=spec.test.zircon_args,
)
orchestration_inputs = api.build.TestOrchestrationInputs.from_build_results(
build,
task_requests,
include_generated_sources=("profile" in spec.build.variants),
)
orchestration_inputs_hash = orchestration_inputs.isolate(api)
step_result = api.step("logging orchestration_inputs_hash", cmd=None)
dest_property = orchestration_inputs.property_name(without_cl)
step_result.presentation.properties[dest_property] = orchestration_inputs_hash
# The checkout root is needed for the coverage recipe to remap file
# paths for source files from this checkout to the checkout created in
# the coverage recipe where we call covargs.
step_result.presentation.properties[
api.checkout.ROOT_DIR_PROPERTY
] = api.path.abspath(build.checkout_root)
# Must be done after testing_requests.task_requests() is called, because that
# modifies the filesystem images. TODO(garymm,joshuaseaton): once legacy_qemu
# code paths are removed, remove this comment as it will become false.
if spec.artifact_gcs_bucket:
api.artifacts.upload(
"upload artifacts", build, sign_artifacts=spec.build.sign_artifacts
)
def read_multipliers(api, bb_input, checkout):
# Get project dir for gerrit change from source manifest
gerrit_change = bb_input.gerrit_changes[0]
project_dir = checkout.root_dir
repo_url = "https://%s/%s" % (
gerrit_change.host.replace("-review", ""),
gerrit_change.project,
)
dirs = checkout.source_manifest["directories"]
for d in dirs:
if dirs[d]["git_checkout"]["repo_url"] == repo_url:
if d != ".":
project_dir = checkout.root_dir.join(d)
break
with api.context(cwd=project_dir):
commit_msg = api.git.get_commit_message(name="get commit msg")
return api.testsharder.extract_multipliers(commit_msg)
def report_multipliers(api, shards, gerrit_change, build_url):
has_multiplier_shards = False
for shard in shards:
# A multiplier shard will start with "multiplied:".
# TODO(fxb/51896): Remove dependency on shard name.
if shard.name.startswith("multiplied:"):
has_multiplier_shards = True
break
if has_multiplier_shards:
try:
api.gerrit.set_review(
"report multiplier shards",
str(gerrit_change.change),
message="A builder created multiplier shards. Click the following link for more details: %s"
% build_url,
test_data=api.json.test_api.output({}),
)
except api.step.StepFailure:
# Comment failures shouldn't fail the build.
pass
def GenTests(api):
def spec_data(
build_type="debug",
ninja_targets=(),
sdk_subbuild=False,
variants=(),
device_type="QEMU",
enforce_size_limits=False,
run_tests=True,
test_in_shards=True,
gcs_bucket=None,
pave=True,
catapult_dashboard_master=None,
catapult_dashboard_bot=None,
):
test_spec = None
if run_tests:
test_spec = Fuchsia.Test(
device_type=device_type,
max_shard_size=0,
target_shard_duration_secs=10 * 60,
max_shards_per_env=8,
timeout_secs=30 * 60,
pool="fuchsia.tests",
test_in_shards=test_in_shards,
swarming_expiration_timeout_secs=10 * 60,
swarming_io_timeout_secs=5 * 60,
default_service_account="service_account",
targets_serial=True,
pave=pave,
catapult_dashboard_master=catapult_dashboard_master,
catapult_dashboard_bot=catapult_dashboard_bot,
)
spec = Fuchsia(
checkout=Fuchsia.Checkout(
manifest="minimal",
project="integration",
remote="https://fuchsia.googlesource.com/manifest",
upload_results=bool(gcs_bucket),
),
build=Fuchsia.Build(
variants=variants,
build_type=build_type,
run_tests=run_tests,
ninja_targets=ninja_targets,
sdk_subbuild=sdk_subbuild,
board="boards/x64.gni",
product="products/core.gni",
target="x64",
enforce_size_limits=enforce_size_limits,
upload_results=bool(gcs_bucket),
),
test=test_spec,
gcs_bucket=gcs_bucket,
artifact_gcs_bucket="fuchsia-infra-artifacts",
)
return api.spec.spec_loaded_ok(step_name="load spec.build_init", message=spec)
default_gitiles_refs_steps = api.gitiles.refs(
"refs", ["refs/heads/master", "deadbeef",]
)
spec_remote = "https://fuchsia.googlesource.com/integration"
properties = {
# We rely on the buildbucket test API using this same
# ID for ci_build_message and the builds returned by get().
"parent_id": str(api.buildbucket.ci_build_message().id),
"spec_remote": spec_remote,
}
yield (
api.checkout.test("default", tryjob=False)
+ api.build.test("default")
+ spec_data(gcs_bucket="fuchsia-infra", run_tests=True, variants=["profile"])
+ api.properties(**properties)
)
yield (
api.checkout.test("non_numeric_parent_id", tryjob=False)
+ api.build.test("default")
+ spec_data(gcs_bucket="fuchsia-infra", run_tests=True)
+ api.properties(parent_id="not-a-number")
)
yield (
api.status_check.test("subbuild_no_parent_id", status="infra_failure")
+ spec_data(run_tests=True)
+ api.buildbucket.try_build()
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(parent_id="")
)
# Test the case where the test spec includes fields that enable uploading
# to the Catapult performance dashboard.
yield (
api.checkout.test("catapult_dashboard_upload_enabled", tryjob=False)
+ api.build.test("default")
+ spec_data(
gcs_bucket="fuchsia-infra",
run_tests=True,
catapult_dashboard_master="example.fuchsia.global.ci",
catapult_dashboard_bot="example-fuchsia-x64-nuc",
)
+ api.properties(**properties)
)
yield (
api.checkout.test("default_cq", tryjob=True)
+ api.build.test("default_cq", tryjob=True)
+ spec_data(run_tests=True)
+ api.buildbucket.try_build(
# Values chosen to match the test data in jiri/test_api.py
# example_source_manifest() so that we trigger the test multipliers
# code path.
project="manifest",
git_repo="https://fuchsia.googlesource.com/manifest",
)
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(**properties)
)
yield (
api.checkout.test("default_multipliers", tryjob=True)
+ api.build.test("default_multipliers", tryjob=True, with_multipliers=True)
+ spec_data(run_tests=True)
+ api.buildbucket.try_build()
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(**properties)
+ api.step_data("report multiplier shards", retcode=1)
+ api.post_process(post_process.MustRun, "isolate test orchestration inputs")
)
yield (
api.checkout.test("build_type_release_not_run_tests", tryjob=False)
+ api.build.test("default", create_shards=False)
+ spec_data(
build_type="release",
gcs_bucket="fuchsia-infra",
run_tests=False,
enforce_size_limits=True,
)
+ api.properties(**properties)
)
yield (
api.checkout.test("sdk", tryjob=False)
+ api.build.test("sdk", tryjob=False, create_shards=False)
+ spec_data(
run_tests=False,
ninja_targets=[
"sdk/archive/core.tar.gz",
"sdk/archive/fuchsia_dart.tar.gz",
],
sdk_subbuild=True,
)
+ api.properties(parent_id="sdk-id")
)
yield (
api.checkout.test("not_test_in_shards")
+ api.build.test("", create_shards=False)
+ spec_data(run_tests=True, test_in_shards=False, pave=True)
+ api.properties(**properties)
)
yield (
api.checkout.test("check_sizes_fails", tryjob=False, status="failure")
+ api.build.test("check_sizes_fails", create_shards=False, status="failure")
+ spec_data(gcs_bucket="fuchsia-infra")
+ api.properties(**properties)
+ api.override_step_data("check sizes", retcode=1)
)
test = (
api.checkout.test("cq_perfcompare", tryjob=True)
+ api.build.test("default", tryjob=True)
+ spec_data(run_tests=True)
+ api.buildbucket.try_build(
# Use an example repo that is different from integration.git
# (which is still used for spec files).
git_repo="https://fuchsia.googlesource.com/third_party/example_repo"
)
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(perfcompare=True, **properties)
)
test.step_data["build without CL.create test shards"] = test.step_data[
"create test shards"
]
yield test