blob: 97c045977b7885a4c703a35f011cff5d06afa88a [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia and isolating build artifacts."""
from PB.infra.fuchsia import Fuchsia
from recipe_engine import post_process
from recipe_engine.recipe_api import Property
TEST_MULTIPLIER_KEY = "MULTIPLY"
# These represent the location of the isolated hash in the output of this
# recipe when building SDK archives. Must be kept in sync with sdk.py.
ISOLATE_STEP_NAME = "isolate artifacts"
ISOLATED_OUTPUT_KEY = "isolated_output_hash"
DEPS = [
"fuchsia/artifacts",
"fuchsia/build",
"fuchsia/build_input_resolver",
"fuchsia/buildbucket_util",
"fuchsia/checkout",
"fuchsia/fuchsia",
"fuchsia/gerrit",
"fuchsia/git",
"fuchsia/gitiles",
"fuchsia/jiri",
"fuchsia/recipe_testing",
"fuchsia/spec",
"fuchsia/sso",
"fuchsia/status_check",
"fuchsia/swarming_retry",
"fuchsia/testing_requests",
"fuchsia/testsharder",
"recipe_engine/buildbucket",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/isolated",
"recipe_engine/json",
"recipe_engine/path",
"recipe_engine/platform",
"recipe_engine/properties",
"recipe_engine/python",
"recipe_engine/raw_io",
"recipe_engine/step",
"recipe_engine/swarming",
]
PROPERTIES = {
"parent_id": Property(
# This is a string because led IDs are not integers.
kind=str,
help="Parent build's buildbucket or led id",
default=None,
),
"spec_remote": Property(
kind=str,
help="URL of the specs git repository",
default="https://fuchsia.googlesource.com/integration",
),
"spec_revision": Property(
kind=str, help="The revision of spec_remote to fetch", default=None
),
"perfcompare": Property(
kind=bool,
help=(
'Enable perfcompare mode: Build the "without CL" revision, '
'not just the "with CL" revision.'
),
default=False,
),
"comment_led": Property(
kind=bool,
help=("Whether to add comments for multipliers on led tasks."),
default=False,
),
}
def RunSteps(
api, parent_id, spec_remote, spec_revision, perfcompare, comment_led,
):
spec, spec_revision = api.fuchsia.setup_with_spec(spec_remote, spec_revision)
bb_input = api.buildbucket.build.input
# TODO(fxb/39958): Retrying all failed builds in Gerrit launches subbuilds as
# well, which fail because they're not triggered by a parent build. Once
# that's resolved we can get rid of this check.
if spec.build.run_tests and not parent_id:
raise api.python.infra_failing_step(
"no parent_id", "subbuilds can only be triggered by parent builds"
)
checkout = api.checkout.from_spec(spec.checkout)
with api.step.nest("got_revision") as presentation:
# Set got_revision to the baseline integration revision for the binary-size
# Gerrit plugin.
# If triggered by integration CQ, then build_input_resolver will ensure the
# input gitiles_commit is what we want.
if (
bb_input.gerrit_changes
and bb_input.gerrit_changes[0].project == "integration"
):
got_revision = bb_input.gitiles_commit.id
# Otherwise just use the revision that we actually checked out.
else:
got_revision = [
repo for repo in checkout.source_info if repo["name"] == "integration"
][0]["revision"]
presentation.properties["got_revision"] = got_revision
# Load test multipliers before building so we can fail fast if the JSON is
# malformed, which avoids wasting a build.
multipliers = []
if bb_input.gerrit_changes and spec.test.test_in_shards:
with api.step.nest("test multipliers") as presentation:
multipliers = read_multipliers(api, bb_input, checkout)
presentation.logs["multipliers"] = api.json.dumps(
[m.render_to_jsonish() for m in multipliers], indent=2
)
# Give SDK subbuilds their own namespaces for upload, so they do not clobber
# in the case of multiple subbuilds.
if not parent_id or spec.build.sdk_subbuild:
upload_namespace = api.buildbucket_util.id
else:
upload_namespace = parent_id
if spec.checkout.upload_results:
assert spec.gcs_bucket, "gcs_bucket must be set if checkout.upload_results is"
checkout.upload_results(spec.gcs_bucket, namespace=upload_namespace)
if spec.artifact_gcs_bucket and not api.platform.is_mac:
checkout.upload_source_manifest(
spec.artifact_gcs_bucket, namespace=upload_namespace
)
repo_path = None
if perfcompare:
# Do sanity checks to catch problems before doing the build.
assert (
bb_input.gerrit_changes
), "perfcompare mode is for CQ builds only, not CI: no Gerrit changes found"
project = bb_input.gerrit_changes[0].project
with api.context(cwd=checkout.root_dir):
repo_path = api.jiri.project(projects=[project]).json.output[0]["path"]
assert repo_path, (
"Unable to find the path of the Git repo that"
" the CL applies to for perfcompare mode"
)
# Build infratools.
spec.build.universe_packages.append("//bundles:infratools")
spec.build.ninja_targets.append("bundles:infratools")
# In perfcompare mode, we want each test to run a predictable number
# of times, regardless of the files being changed.
use_affected_tests = not perfcompare
run_build_steps(
api,
spec,
parent_id,
upload_namespace,
checkout,
bb_input,
multipliers,
without_cl=False,
comment_led=comment_led,
use_affected_tests=use_affected_tests,
)
if perfcompare:
with api.step.nest("build without CL"):
# Unapply the topmost Git commit that was applied from Gerrit. If
# the CQ is testing a stack of multiple CLs from Gerrit, the other
# CLs are left applied.
# TODO(mseaborn): This does not handle cases where the CL changed
# Jiri manifest files or contained a patches.json file.
api.git.raw_checkout(
step_name='git checkout of "without CL" revision',
ref="HEAD^",
directory=repo_path,
)
run_build_steps(
api,
spec,
parent_id,
upload_namespace,
checkout,
bb_input,
multipliers,
without_cl=True,
comment_led=comment_led,
use_affected_tests=use_affected_tests,
)
def run_build_steps(
api,
spec,
parent_id,
upload_namespace,
checkout,
bb_input,
modifiers,
without_cl,
comment_led,
use_affected_tests,
):
if spec.build.upload_results:
assert spec.gcs_bucket, "gcs_bucket must be set if checkout.upload_results is"
if without_cl:
upload_namespace += "_without_cl"
# If SDK subbuild, set SDK ID to parent ID.
sdk_id = parent_id if spec.build.sdk_subbuild else None
build_results = api.build.from_spec(
spec.build,
checkout,
pave=spec.test.pave,
sdk_id=sdk_id,
gcs_bucket=spec.gcs_bucket,
buildstats_upload_namespace=upload_namespace,
)
with api.step.nest("check if build skipped") as presentation:
presentation.properties["skipped_because_unaffected"] = not build_results
if not build_results:
return
# For simplicity we pass them into testsharder.execute() even if use_affected_tests
# is False; initialize them here so we can do that.
affected_tests_file = None
affected_tests_max_attempts = 0
if spec.build.run_tests and not without_cl:
affected_tests_file, no_work = build_results.calculate_affected_tests(bb_input)
with api.step.nest("record affected_tests_no_work") as presentation:
presentation.properties["affected_tests_no_work"] = no_work
if no_work and not api.recipe_testing.enabled:
return
max_attempts_per_test = 1
if not spec.test.retry_task_on_test_failure:
max_attempts_per_test = (
spec.test.max_attempts or api.swarming_retry.DEFAULT_MAX_ATTEMPTS
)
affected_tests_max_attempts = max_attempts_per_test
# Add modifiers to specify that these tests are affected.
# TODO(fxbug.dev/50301): Remove this file I/O and affected_test_modifiers().
# It should be handled by testsharder.
affected_tests = api.file.read_text(
"read affected tests", affected_tests_file, test_data="test1\ntest2\n",
).splitlines()
if (
not spec.test.affected_tests_multiply_threshold
and use_affected_tests
and affected_tests
):
modifiers.extend(
api.testsharder.affected_test_modifiers(
affected_tests, affected_tests_max_attempts
)
)
# Set max_attempts for unaffected tests from the spec.
max_attempts_per_test = (
spec.test.max_attempts_per_test or max_attempts_per_test
)
if use_affected_tests or not spec.test.retry_task_on_test_failure:
# Add a default modifier to set max attempts for any other tests.
modifiers.append(
api.testsharder.TestModifier(
name="*", total_runs=-1, max_attempts=max_attempts_per_test,
)
)
# TODO(garymm): assert spec.gcs_bucket set if upload_results set.
if spec.gcs_bucket and spec.build.upload_results:
build_results.upload(
gcs_bucket=spec.gcs_bucket,
is_release_version=spec.checkout.is_release_version,
namespace=upload_namespace,
)
build_results.check_binary_sizes()
# In SDK subbuild mode, isolate SDK archive and ninja targets.
if spec.build.sdk_subbuild:
sdk_archive_path = build_results.fuchsia_build_dir.join("sdk", "archive")
isolated = api.isolated.isolated(sdk_archive_path)
sdk_ninja_targets = [
target
for target in spec.build.ninja_targets
if target.startswith("sdk/archive")
]
for ninja_target in sdk_ninja_targets:
isolated.add_file(
path=build_results.fuchsia_build_dir.join(*ninja_target.split("/"))
)
sdk_archive_isolated_hash = isolated.archive(ISOLATE_STEP_NAME)
api.step.active_result.presentation.properties[
ISOLATED_OUTPUT_KEY
] = sdk_archive_isolated_hash
# Must be set before testing_requests.task_requests() is called.
api.artifacts.gcs_bucket = spec.artifact_gcs_bucket
api.artifacts.uuid = upload_namespace
send_comment = True
if spec.build.run_tests:
if parent_id.isdigit():
# Use parent build so that testing task requests refer to
# that build, which actually orchestrates testing.
buildbucket_build = api.buildbucket.get(int(parent_id))
# If it's a try build, the parent build will not have its gitiles_commit
# populated (it's populated at runtime by `build_input_resolver`, but
# that doesn't change the input values stored in Buildbucket). So we need
# to populate it with the same commit that `build_input_resolver`
# resolved for the subbuild.
buildbucket_build.input.gitiles_commit.CopyFrom(bb_input.gitiles_commit)
build_url = "https://ci.chromium.org/b/%s" % buildbucket_build.id
else:
# When the parent was launched by led, it's not possible to retrieve
# the parent build, so we fall back to using our own build.
# This is technically incorrect and any tests that rely on having
# correct buildbucket metadata may fail when run via led. Ideally
# we wouldn't have any tests that knew about buildbucket, but
# for now this is OK since none of those tests run in recipes CQ,
# which uses led to test recipes changes.
buildbucket_build = api.buildbucket.build
build_url = "https://ci.chromium.org/swarming/task/%s?server=%s" % (
api.swarming.task_id,
api.buildbucket.build.infra.swarming.hostname,
)
if not comment_led:
send_comment = False
if spec.test.test_in_shards:
shards = api.testsharder.execute(
"create test shards",
testsharder_path=build_results.tool("testsharder"),
build_dir=build_results.fuchsia_build_dir,
max_shard_size=spec.test.max_shard_size,
target_duration_secs=spec.test.target_shard_duration_secs,
max_shards_per_env=spec.test.max_shards_per_env,
modifiers=modifiers,
tags=spec.build.environment_tags,
# TODO(fxbug.dev/50301): Remove "and" once rolled out.
use_affected_tests=use_affected_tests
and spec.test.affected_tests_multiply_threshold,
affected_tests_file=affected_tests_file,
affected_tests_multiply_threshold=spec.test.affected_tests_multiply_threshold,
affected_tests_max_attempts=affected_tests_max_attempts,
)
if bb_input.gerrit_changes and send_comment:
gerrit_change = bb_input.gerrit_changes[0]
report_multipliers(api, shards, gerrit_change, build_url)
task_requests = api.testing_requests.task_requests(
build_results,
buildbucket_build,
spec.test.per_test_timeout_secs,
spec.test.pool,
shards,
spec.test.swarming_expiration_timeout_secs,
spec.test.swarming_io_timeout_secs,
spec.test.use_runtests,
spec.test.timeout_secs,
default_service_account=spec.test.default_service_account,
pave=spec.test.pave,
targets_serial=spec.test.targets_serial,
catapult_dashboard_master=spec.test.catapult_dashboard_master,
catapult_dashboard_bot=spec.test.catapult_dashboard_bot,
release_version=checkout.release_version,
zircon_args=spec.test.zircon_args,
)
else:
task_requests = api.testing_requests.deprecated_task_requests(
build_results,
buildbucket_build,
api.testing_requests.deprecated_test_cmds(spec.test),
spec.test.device_type,
spec.test.pool,
spec.test.timeout_secs,
spec.test.pave,
swarming_expiration_timeout_secs=spec.test.swarming_expiration_timeout_secs,
swarming_io_timeout_secs=spec.test.swarming_io_timeout_secs,
default_service_account=spec.test.default_service_account,
zircon_args=spec.test.zircon_args,
)
orchestration_inputs = api.build.test_orchestration_inputs_from_build_results(
build_results,
task_requests,
include_generated_sources=("profile" in spec.build.variants),
)
orchestration_inputs_hash = orchestration_inputs.isolate()
step_result = api.step("logging orchestration_inputs_hash", cmd=None)
dest_property = api.build.test_orchestration_inputs_property_name(without_cl)
step_result.presentation.properties[dest_property] = orchestration_inputs_hash
# The checkout root is needed for the coverage recipe to remap file
# paths for source files from this checkout to the checkout created in
# the coverage recipe where we call covargs.
step_result.presentation.properties[
api.checkout.ROOT_DIR_PROPERTY
] = api.path.abspath(checkout.root_dir)
# Must be done after testing_requests.task_requests() is called, because that
# modifies the filesystem images. TODO(garymm,joshuaseaton): once legacy_qemu
# code paths are removed, remove this comment as it will become false.
if spec.artifact_gcs_bucket:
api.artifacts.upload(
"upload artifacts", build_results, sign_artifacts=spec.build.sign_artifacts
)
def read_multipliers(api, bb_input, checkout):
# Get project dir for gerrit change from source info
gerrit_change = bb_input.gerrit_changes[0]
change_remote = "https://%s/%s" % (
gerrit_change.host.replace("-review", ""),
gerrit_change.project,
)
project_dir = None
for repo in checkout.source_info:
relpath = repo["relativePath"]
if api.sso.sso_to_https(repo["remote"]) == change_remote:
if relpath == ".":
project_dir = checkout.root_dir
else:
project_dir = checkout.root_dir.join(relpath)
break
if not project_dir:
return []
with api.context(cwd=project_dir):
commit_msg = api.git.get_commit_message(step_name="get commit msg")
return api.testsharder.extract_multipliers(commit_msg)
def report_multipliers(api, shards, gerrit_change, build_url):
has_multiplier_shards = False
for shard in shards:
# A multiplier shard will start with "multiplied:".
# TODO(fxb/51896): Remove dependency on shard name.
if shard.name.startswith("multiplied:"):
has_multiplier_shards = True
break
if has_multiplier_shards:
try:
api.gerrit.set_review(
"report multiplier shards",
str(gerrit_change.change),
message=(
"A builder created multiplier shards. Click the following "
"link for more details: %s" % build_url
),
test_data=api.json.test_api.output({}),
)
except api.step.StepFailure:
# Comment failures shouldn't fail the build.
pass
def GenTests(api):
def spec_data(
build_type="debug",
ninja_targets=(),
sdk_subbuild=False,
variants=(),
device_type="QEMU",
run_tests=True,
test_in_shards=True,
gcs_bucket=None,
pave=True,
catapult_dashboard_master=None,
catapult_dashboard_bot=None,
max_attempts_per_test=0,
skip_if_unaffected=False,
retry_task_on_test_failure=False,
):
test_spec = None
if run_tests:
test_spec = Fuchsia.Test(
device_type=device_type,
max_shard_size=0,
target_shard_duration_secs=10 * 60,
max_shards_per_env=8,
timeout_secs=30 * 60,
pool="fuchsia.tests",
test_in_shards=test_in_shards,
swarming_expiration_timeout_secs=10 * 60,
swarming_io_timeout_secs=5 * 60,
default_service_account="service_account",
targets_serial=True,
pave=pave,
catapult_dashboard_master=catapult_dashboard_master,
catapult_dashboard_bot=catapult_dashboard_bot,
max_attempts_per_test=max_attempts_per_test,
retry_task_on_test_failure=retry_task_on_test_failure,
)
spec = Fuchsia(
checkout=Fuchsia.Checkout(
manifest="minimal",
project="integration",
remote="https://fuchsia.googlesource.com/manifest",
upload_results=bool(gcs_bucket),
),
build=Fuchsia.Build(
variants=variants,
build_type=build_type,
run_tests=run_tests,
ninja_targets=ninja_targets,
sdk_subbuild=sdk_subbuild,
board="boards/x64.gni",
product="products/core.gni",
target="x64",
upload_results=bool(gcs_bucket),
skip_if_unaffected=skip_if_unaffected,
use_goma=True,
stats_gcs_bucket=gcs_bucket,
),
test=test_spec,
gcs_bucket=gcs_bucket,
artifact_gcs_bucket="fuchsia-infra-artifacts",
)
return api.spec.spec_loaded_ok(step_name="load spec.build_init", message=spec)
default_gitiles_refs_steps = api.gitiles.refs(
"refs", ["refs/heads/master", "deadbeef",]
)
spec_remote = "https://fuchsia.googlesource.com/integration"
properties = {
# We rely on the buildbucket test API using this same
# ID for ci_build_message and the builds returned by get().
"parent_id": str(api.buildbucket.ci_build_message().id),
"spec_remote": spec_remote,
}
source_info = [
{
"name": "integration",
"remote": spec_remote,
"revision": "a491082dc1b632bbcd60ba3618d20b503c2de738",
"relativePath": "integration",
},
{
"name": "fuchsia",
"remote": "https://fuchsia.googlesource.com/fuchsia",
"revision": "a491082dc1b632bbcd60ba3618d20b503c2de738",
"relativePath": ".",
},
]
yield (
api.checkout.test("default", tryjob=False, source_info=source_info)
+ api.build.test("default")
+ spec_data(gcs_bucket="fuchsia-infra", run_tests=True, variants=["profile"])
+ api.properties(**properties)
)
yield (
api.checkout.test(
"non_numeric_parent_id", tryjob=False, source_info=source_info
)
+ api.build.test("default")
+ spec_data(gcs_bucket="fuchsia-infra", run_tests=True)
+ api.properties(parent_id="not-a-number")
)
yield (
api.status_check.test("subbuild_no_parent_id", status="infra_failure")
+ spec_data(run_tests=True)
+ api.buildbucket.try_build()
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(parent_id="")
)
# Test the case where the test spec includes fields that enable uploading
# to the Catapult performance dashboard.
yield (
api.checkout.test(
"catapult_dashboard_upload_enabled", tryjob=False, source_info=source_info,
)
+ api.build.test("default")
+ spec_data(
gcs_bucket="fuchsia-infra",
run_tests=True,
catapult_dashboard_master="example.fuchsia.global.ci",
catapult_dashboard_bot="example-fuchsia-x64-nuc",
)
+ api.properties(**properties)
)
yield (
api.checkout.test("default_cq", tryjob=True, source_info=source_info)
+ api.build.test("default_cq", tryjob=True)
+ spec_data(gcs_bucket="fuchsia-infra", run_tests=True, max_attempts_per_test=5)
+ api.buildbucket.try_build(
# Values chosen to match source_info so that we trigger the test
# multipliers code path.
project="integration",
git_repo=spec_remote,
)
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(**properties)
)
# Test that max_attempts_per_test is ignored if no affected tests are
# detected and default within-task attempts will be set to 1 if
# retry_task_on_test_failure is set to True.
yield (
api.checkout.test(
"default_cq_no_affected_retry_task_on_test_failure",
tryjob=True,
source_info=source_info,
)
+ api.build.test(
"default_cq_no_affected_retry_task_on_test_failure", tryjob=True
)
+ spec_data(
gcs_bucket="fuchsia-infra",
run_tests=True,
# This spec field will be ignored if no affected tests are detected.
max_attempts_per_test=5,
# This will cause default within-task attempts to be 1.
retry_task_on_test_failure=True,
)
+ api.buildbucket.try_build(
# Values chosen to match source_info so that we trigger the test
# multipliers code path.
project="fuchsia",
git_repo="https://fuchsia.googlesource.com/fuchsia",
)
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.step_data(
"affected tests.find affected tests", api.raw_io.stream_output("")
)
+ api.properties(**properties)
)
yield (
api.checkout.test("skip_if_unaffected", tryjob=True, source_info=source_info)
+ api.build.test("skip_if_unaffected", tryjob=True, create_shards=False)
+ spec_data(gcs_bucket="fuchsia-infra", skip_if_unaffected=True)
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(**properties)
)
yield (
api.checkout.test(
"affected_tests_no_work", tryjob=True, source_info=source_info
)
+ api.build.test(
"affected_tests_no_work",
tryjob=True,
create_shards=False,
affected_tests_no_work=True,
)
+ api.buildbucket.try_build()
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ spec_data()
+ api.properties(**properties)
)
yield (
api.checkout.test("default_multipliers", tryjob=True, source_info=source_info)
+ api.build.test("default_multipliers", tryjob=True, with_multipliers=True)
+ spec_data(run_tests=True)
+ api.buildbucket.try_build()
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(**properties)
+ api.step_data("report multiplier shards", retcode=1)
+ api.post_process(post_process.MustRun, "isolate test orchestration inputs")
)
yield (
api.checkout.test("sdk", tryjob=False, source_info=source_info)
+ api.build.test("sdk", tryjob=False, create_shards=False)
+ spec_data(
run_tests=False,
ninja_targets=[
"sdk/archive/core.tar.gz",
"sdk/archive/fuchsia_dart.tar.gz",
],
sdk_subbuild=True,
)
+ api.properties(parent_id="sdk-id")
)
yield (
api.checkout.test("not_test_in_shards", source_info=source_info)
+ api.build.test("", create_shards=False)
+ spec_data(run_tests=True, test_in_shards=False, pave=True)
+ api.properties(**properties)
)
test = (
api.checkout.test("cq_perfcompare", tryjob=True, source_info=source_info)
+ api.build.test("default", tryjob=True)
+ spec_data(gcs_bucket="fuchsia-infra", run_tests=True, max_attempts_per_test=5)
+ api.buildbucket.try_build(
# Use an example repo that is different from integration.git
# (which is still used for spec files).
git_repo="https://fuchsia.googlesource.com/third_party/example_repo"
)
+ api.build_input_resolver.set_gerrit_branch("master")
+ default_gitiles_refs_steps
+ api.properties(perfcompare=True, **properties)
)
test.step_data["build without CL.create test shards"] = test.step_data[
"create test shards"
]
yield test