blob: afcd770b22dd6a9ee72283d33fbca2bc0a476faa [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia."""
from recipe_engine.config import List
from recipe_engine.recipe_api import Property
DEPS = [
"fuchsia/artifacts",
"fuchsia/build",
"fuchsia/buildbucket_util",
"fuchsia/checkout",
"fuchsia/recipe_testing",
"fuchsia/testing_requests",
"fuchsia/testsharder",
"recipe_engine/buildbucket",
"recipe_engine/cipd",
"recipe_engine/file",
"recipe_engine/path",
"recipe_engine/platform",
"recipe_engine/properties",
"recipe_engine/raw_io",
"recipe_engine/step",
"recipe_engine/swarming",
]
PROPERTIES = {
"gcs_bucket": Property(
kind=str,
help="GCS bucket for uploading checkout, build, and test results",
default="###fuchsia-build###",
),
"artifact_gcs_bucket": Property(
kind=str,
help="Artifacts GCS bucket for uploading build results",
default="artifact_gcs_bucket",
),
"release_branch": Property(
kind=List(basestring),
help="The release branch corresponding to the checkout.",
default=None,
),
"release_version": Property(
kind=str, help="The release version of the checkout.", default=None
),
"extract_artifacts": Property(
kind=bool, help="Whether to extract the test input artifacts", default=False
),
"sdk_id": Property(kind=str, help="sdk_id to set in GN", default="sdk-id"),
"incremental": Property(
kind=bool, help="Whether to build incrementally", default=False
),
}
def RunSteps(
api,
gcs_bucket,
artifact_gcs_bucket,
release_branch,
release_version,
extract_artifacts,
sdk_id,
incremental,
):
checkout_root = api.path["start_dir"]
checkout = api.checkout.CheckoutResults(
root_dir=checkout_root,
snapshot_file=checkout_root.join("snapshot"),
release_branch=release_branch,
release_version=release_version,
source_info={},
)
build_results = api.build.with_options(
checkout,
fint_params_path="infra/specs/fint-params.textproto",
gcs_bucket=gcs_bucket,
incremental=incremental,
sdk_id=sdk_id,
stats_gcs_bucket=gcs_bucket,
upload_namespace="namespace",
)
if not build_results:
assert not api.recipe_testing.enabled
return
build_results.filtered_compdb(["third_party"])
# Second run should reuse file created by the first run.
build_results.filtered_compdb(["third_party"])
# These statements are just for test coverage, so don't lint them.
# pylint: disable=pointless-statement
build_results.compdb_path
build_results.generated_sources
build_results.gn_results.zbi_tests
build_results.authorized_key
build_results.private_key
build_results.affected_tests
build_results.no_work
build_results.cts_artifacts
try:
build_results.tool("does-not-exist", mock_for_tests=False)
except api.build.NoSuchTool:
pass
# pylint: enable=pointless-statement
try:
build_results.check_binary_sizes()
except api.step.StepFailure:
pass
build_results.upload_tracing_data(
gcs_bucket=artifact_gcs_bucket,
namespace="namespace",
)
build_results.upload(
gcs_bucket=gcs_bucket,
is_release_version=bool(release_version),
)
# Run the testsharder to collect test specifications and shard them.
shards = api.testsharder.execute(
"create test shards",
testsharder_path=build_results.tool("testsharder"),
build_dir=build_results.build_dir,
tags=[],
)
if extract_artifacts:
# Extract and manipulate a TestOrchestrationInputs object for test coverage.
# artifacts has to be configured before calling task_requests().
api.artifacts.gcs_bucket = gcs_bucket
api.artifacts.namespace = "namespace"
task_requests = api.testing_requests.task_requests(
build_results,
api.buildbucket.build,
0,
"DUMMY.POOL",
shards,
0,
0,
True,
timeout_secs=0,
)
test_orchestration_inputs = (
api.build.test_orchestration_inputs_from_build_results(
build_results, task_requests, include_generated_sources=True
)
)
test_orchestration_inputs.upload()
api.build.download_test_orchestration_inputs("")
api.build.test_orchestration_inputs_property_name(False)
api.build.test_orchestration_inputs_property_name(True)
def GenTests(api):
yield api.buildbucket_util.test("default") + api.build.create_shards()
yield (
api.buildbucket_util.test("default_cq", tryjob=True)
+ api.build.create_shards(with_multipliers=True)
)
yield (
api.buildbucket_util.test("skip_if_unaffected", tryjob=True)
+ api.build.fint_set_artifacts(skip_build=True)
)
yield (
api.buildbucket_util.test(
"no_skip_for_integration_change",
tryjob=True,
repo="integration",
)
+ api.build.fint_set_artifacts(skip_build=True)
+ api.build.create_shards()
)
yield (
api.buildbucket_util.test("no_skip_if_recipe_testing_enabled", tryjob=True)
+ api.properties(**{"$fuchsia/recipe_testing": {"enabled": True}})
+ api.build.fint_set_artifacts(skip_build=True)
+ api.build.create_shards()
)
yield (
# A subbuild should use the duration file corresponding to its parent
# builder.
api.buildbucket_util.test(
"subbuild", repo="fuchsia", builder="builder-subbuild"
)
+ api.build.create_shards()
)
yield (
api.buildbucket_util.test("mac")
+ api.platform.name("mac")
+ api.build.create_shards()
)
yield (
api.buildbucket_util.test("release_with_version")
+ api.properties(release_version="0.19700101.0.77")
+ api.build.create_shards()
)
# Test signing a release build_results.
yield (
api.buildbucket_util.test("release_with_signature")
+ api.properties(release_version="0.19700101.0.77")
+ api.step_data(
"upload build results.run signing script",
stdout=api.raw_io.output("signature"),
)
+ api.build.create_shards()
)
yield (
api.buildbucket_util.test("fint_set_failure", status="failure")
+ api.step_data("build.gn gen", retcode=1)
+ api.build.fint_set_artifacts(failure_summary="gen failed")
)
yield (
api.buildbucket_util.test("fint_set_unrecognized_failure", status="failure")
+ api.step_data("build.gn gen", retcode=1)
+ api.build.fint_set_artifacts(failure_summary="")
)
yield (
api.buildbucket_util.test(
"fint_set_unexpected_failure_summary", status="failure"
)
+ api.build.fint_set_artifacts(failure_summary="something failed")
)
yield (
api.buildbucket_util.test("fint_build_failure", status="failure")
+ api.step_data("build.ninja", retcode=1)
+ api.build.fint_build_artifacts(failure_summary="build failed")
)
yield (
api.buildbucket_util.test("fint_build_timeout", status="failure")
+ api.step_data("build.ninja", times_out_after=5 * 60 * 60)
)
yield (
api.buildbucket_util.test("fint_build_unrecognized_failure", status="failure")
+ api.step_data("build.ninja", retcode=1)
+ api.build.fint_build_artifacts(failure_summary="")
)
yield (
api.buildbucket_util.test(
"fint_build_unexpected_failure_summary", status="failure"
)
+ api.build.fint_build_artifacts(failure_summary="something failed")
)
yield (
api.buildbucket_util.test("extract_artifacts")
+ api.properties(**{"extract_artifacts": True})
+ api.testing_requests.task_requests_step_data(
[api.swarming.example_task_request_jsonish()],
"download test orchestration inputs.load task requests",
)
+ api.step_data(
"download test orchestration inputs.load triage sources",
api.file.read_json(["config.triage", "other/config.triage"]),
)
+ api.build.create_shards()
)
yield (
api.buildbucket_util.test("storage_sparse_too_large")
+ api.path.exists(
api.path["start_dir"].join("out", "not-default", "filesystem_sizes.json")
)
+ api.step_data("check binary sizes.size_checker", retcode=1)
+ api.build.create_shards()
)
# Test case for generating build traces and bloaty analysis
yield (
api.buildbucket_util.test("upload_build_metrics")
+ api.path.exists(
api.path["start_dir"].join(
"out", "not-default", "obj", "build", "images", "system.snapshot"
)
)
+ api.build.create_shards()
)
# Test case for skipping blobstats uploading if the blobstats script fails.
yield (
api.buildbucket_util.test("blobstats_fails")
+ api.step_data("upload build results.blobstats", retcode=255)
+ api.build.create_shards()
)
yield (
api.buildbucket_util.test("custom_toolchains")
+ api.properties(
**{
"$fuchsia/build": {
"clang_toolchain": {
"source": "cipd",
"version": api.cipd.make_resolved_version(None),
},
"gcc_toolchain": {
"source": "cipd",
"version": api.cipd.make_resolved_version(None),
},
"rust_toolchain": {
"source": "isolated",
"version": "abc123",
},
}
}
)
+ api.build.fint_set_artifacts(use_goma=False)
+ api.build.create_shards()
)
yield (
api.buildbucket_util.test(
"clang_crash_in_fuchsia_and_buildstats_failure", status="failure"
)
+ api.step_data("build.ninja", retcode=1)
+ api.step_data(
"build.upload fuchsia-buildstats.json to ###fuchsia-build###", retcode=1
)
)
yield (
api.buildbucket_util.test("upload_buildstats_failure")
+ api.step_data(
"build.upload fuchsia-buildstats.json to ###fuchsia-build###", retcode=1
)
+ api.build.create_shards()
)
# Test incremental build.
yield (
api.buildbucket_util.test("incremental_build")
+ api.properties(
incremental=True,
)
+ api.build.create_shards()
)