blob: d07e4ce375de4d6b7c4d586bce1992ac7e0f263e [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" sdk.py - Builds, tests, and publishes Fuchsia SDKs.
# Execution overview
## Trigger companion image subbuilds
Optionally trigger companion images, which are built alongside the SDK to give
developers prebuilt images to test with.
## Trigger SDK subbuilds
Trigger x64 and arm64 subbuilds. We run them in parallel to speed up the build.
See https://fuchsia.dev/fuchsia-src/development/sdk#recipes for more info.
## Collect SDK subbuilds and merge archives
Invoke an in-tree script to merge the SDK archives produced by each SDK
subbuild, yielding the core SDK.
## Test the core SDK
Invoke an in-tree script to generate a bazel workspace using the core SDK, and
invoke bazel to run tests.
## Build the GN SDK
Invoke an in-tree script to generate the GN SDK based on the core SDK.
See https://fuchsia.dev/fuchsia-src/development/sdk/gn for more info.
## Test the GN SDK
Invoke an in-tree script to generate a bazel workspace using the GN SDK, and
invoke bazel to run tests.
## Collect companion image subbuilds
Collect companion images, if they were requested to be built.
## Publish
Publish the core and GN SDKs, and the optional companion images and associated
packages to GCS. Also publish the core and GN SDKs to CIPD. For official
releases, also apply refs e.g. "latest" to these uploads.
## Trigger external tests
Optionally trigger tests against an external CQ to test that the new SDK is
compatible.
## Collect external tests
If external tests were triggered, wait on CQ to complete, and report success or
fail.
## Trigger Fxt tests
Optionally trigger tests against Fxt to test that the new SDK is compatible.
## Collect Fxt tests
If Fxt tests were triggered, wait on Fxt to complete and report success or fail.
"""
import datetime
from google.protobuf import json_format as jsonpb
from PB.recipes.fuchsia.sdk import InputProperties
from PB.recipes.fuchsia.fuchsia.build import InputProperties as SubbuildProperties
PYTHON_VERSION_COMPATIBILITY = "PY3"
DEPS = [
"fuchsia/build",
"fuchsia/build_input_resolver",
"fuchsia/buildbucket_util",
"fuchsia/bundle_fetcher",
"fuchsia/cas_util",
"fuchsia/checkout",
"fuchsia/cipd_util",
"fuchsia/cl_util",
"fuchsia/display_util",
"fuchsia/fxt",
"fuchsia/git",
"fuchsia/gsutil",
"fuchsia/jiri",
"fuchsia/macos_sdk",
"fuchsia/release",
"fuchsia/sso",
"fuchsia/subbuild",
"fuchsia/tar",
"recipe_engine/buildbucket",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/path",
"recipe_engine/platform",
"recipe_engine/properties",
"recipe_engine/raw_io",
"recipe_engine/step",
"recipe_engine/time",
]
# The name of the output file from bundle_fetcher describing the product bundles
# which should be uploaded with the sdk.
PRODUCT_BUNDLES_JSON = "product_bundles.json"
# This represents the location of the CAS digest for the sdk archives in the
# output of fuchsia/build.py. Needs to be kept in sync with fuchsia/build.py.
SDK_ARCHIVE_OUTPUT_KEY = "isolated_output_hash"
TARGETS = ["arm64", "x64"]
PROPERTIES = InputProperties
def fetch_sdk_artifacts(api, checkout_root, builds):
with api.step.nest("fetch sdk artifacts"):
output_path = checkout_root.join("archives")
for build_id, build in builds.items():
build_id = str(build_id)
with api.step.nest(build_id):
digest = api.subbuild.get_property(
build.build_proto, SDK_ARCHIVE_OUTPUT_KEY
)
output_dir = output_path.join(build_id)
api.file.ensure_directory("makedirs", dest=output_dir)
api.cas_util.download(
step_name="download %s archive" % build_id,
digest=digest,
output_dir=output_dir,
)
def relocate_artifacts(api, builds, companion_images, sdk_id, gcs_bucket):
def relocate_artifact(api, src_bucket, dst_bucket, src_path, dst_path):
api.gsutil.copy(
src_bucket=src_bucket,
src=src_path,
dst_bucket=dst_bucket,
dst=dst_path,
link_name="gsutil.copy",
unauthenticated_url=False,
)
with api.step.nest("relocate sdk image(s)"):
for build in builds:
image_name = next(
image.name
for image in companion_images
if image.builder == build.builder.builder
)
with api.step.nest(image_name):
build_gcs_bucket = build.output.properties["gcs_bucket"].strip('"')
# build-archive.tgz
relocate_artifact(
api=api,
src_bucket=build_gcs_bucket,
dst_bucket=gcs_bucket,
src_path="builds/%s/build-archive.tgz" % build.id,
dst_path="development/%s/images/%s.tgz" % (sdk_id, image_name),
)
# packages.tar.gz
relocate_artifact(
api=api,
src_bucket=build_gcs_bucket,
dst_bucket=gcs_bucket,
src_path="builds/%s/packages.tar.gz" % build.id,
dst_path="development/%s/packages/%s.tar.gz" % (sdk_id, image_name),
)
def resolve_ref_to_update(api, ref_settings, remote, integration_repo, release_version):
"""Resolve which ref to update.
Ref settings order is respected i.e. the first branch which contains
the input release version will be used. If the release version is
not reachable on any of the given branches, return None.
"""
for settings in ref_settings:
branch_head = api.git.get_remote_branch_head(
api.sso.sso_to_https(remote), settings.branch
)
with api.context(cwd=integration_repo):
api.git.fetch("origin", refspec=branch_head)
if release_version in api.release.get_release_versions(
ref=branch_head, repo_path=integration_repo
):
return settings.update_ref
return None
def run_external_tests(api, sdk_id, bucket, external_testing):
"""Dry-run SDK in an external Gerrit-based CQ.
The external repository must support SDK version and bucket overrides via
`version_filepath` and `bucket_filepath` files.
"""
api.cl_util.gerrit_host = external_testing.gerrit_host
api.cl_util.gerrit_project = external_testing.gerrit_project
change_info = api.cl_util.create_cl(
"create CL",
subject="[test] Dryrun SDK version %s" % sdk_id,
file_edits=[
(external_testing.bucket_filepath, bucket),
(external_testing.version_filepath, sdk_id),
(
external_testing.version_list_filepath,
external_testing.version_list_contents,
),
],
ref=external_testing.ref,
)
try:
# If we aren't explicitly triggering tryjobs, build ids are resolved
# during collection.
build_ids = None
patchset_num = change_info["revisions"][change_info["current_revision"]][
"number"
]
# If we are, build ids are provided by the trigger output.
if external_testing.trigger_tryjobs:
build_ids = [
b.id
for b in api.cl_util.trigger_tryjobs(
"trigger tryjobs",
change_num=change_info["number"],
patchset_num=patchset_num,
builders=external_testing.tryjobs,
gerrit_host=external_testing.tryjobs_gerrit_host,
)
]
else:
api.cl_util.trigger_cq(
"trigger CQ+1",
change_num=change_info["number"],
dry_run=True,
# If we're collecting tryjobs, don't wait on CQ to complete.
wait=not bool(external_testing.tryjobs),
timeout_secs=external_testing.timeout_secs,
)
if external_testing.tryjobs:
# Give tryjobs time to start after applying CQ label.
# This is only required in the case that we used CQ to trigger
# tryjobs, not when we have triggered the tryjobs explicitly.
if not external_testing.trigger_tryjobs:
api.time.sleep(external_testing.tryjobs_wait_secs)
api.cl_util.collect_tryjobs(
"collect tryjobs",
change_num=change_info["number"],
patchset_num=patchset_num,
# If build ids are not known, then we implicitly created tryjobs
# by applying CQ label, and must specify the builders to search
# for.
builders=external_testing.tryjobs if not build_ids else None,
build_ids=build_ids,
gerrit_host=external_testing.tryjobs_gerrit_host,
timeout_secs=external_testing.timeout_secs,
)
finally:
api.cl_util.abandon_cl("abandon CL", change_num=change_info["number"])
# TODO(fxbug.dev/90091): Deprecate tap_testing for fxt_tests.
def run_fxt_tests(api, presentation, sdk_id, bucket, tap_testing, fxt_tests):
"""Dry-run SDK against one or more TAP projects.
The TAP projects must respect custom Fuchsia SDK parameters on a CL, which
are specified by the bucket and namespace overrides.
"""
api.fxt.use_staging_host = fxt_tests.use_staging_host
tap_projects = fxt_tests.tap_projects
timeout_secs = fxt_tests.timeout_secs
# Use tap_testing.tap_projects if not empty
if tap_testing.tap_projects:
api.fxt.use_staging_host = tap_testing.use_staging_host
tap_projects = tap_testing.tap_projects
timeout_secs = tap_testing.timeout_secs
resp = api.fxt.launch(
step_name="launch",
bucket=bucket,
namespace=sdk_id,
name=api.buildbucket.builder_name,
tap_projects=tap_projects,
guitar_projects=fxt_tests.guitar_projects,
sdk_mode=True,
presentation=presentation,
)
try:
api.fxt.monitor(
step_name="monitor",
tap_request_id=resp["tap_request_id"],
guitar_project_request_ids=resp["guitar_project_request_ids"],
timeout_secs=timeout_secs,
)
finally:
api.fxt.cleanup(step_name="cleanup", workspace=resp["workspace"])
def RunSteps(api, props):
api.build_input_resolver.resolve(
default_project_url="https://fuchsia.googlesource.com/fuchsia"
)
build = api.buildbucket.build
sdk_id = api.buildbucket_util.id
checkout = api.checkout.fuchsia_with_options(
manifest=props.manifest,
remote=props.remote,
)
integration_repo = checkout.root_dir.join("integration")
# Resolve the incoming release version and update refs.
update_refs = []
if props.is_release_version:
release_version = api.release.get_current_release_version(integration_repo)
# Override SDK ID with release version.
sdk_id = str(release_version).replace("releases/", "")
update_ref = resolve_ref_to_update(
api=api,
ref_settings=props.ref_settings,
remote=props.remote,
integration_repo=integration_repo,
release_version=release_version,
)
if update_ref:
update_refs.append(update_ref)
image_builds = {}
if props.companion_images:
with api.step.nest("launch image subbuilds") as presentation:
image_builds = api.subbuild.launch(
builder_names=[i.builder for i in props.companion_images],
presentation=presentation,
# TODO(olivernewman): Swarming currently breaks if many builds
# are launched simultaneously with the same
# swarming_parent_run_id. Set the swarming_parent_run_id param
# here after that bug is fixed.
set_swarming_parent_run_id=False,
)
with api.step.nest("launch SDK subbuilds") as presentation:
sdk_builds = api.subbuild.launch(
builder_names=[
"%s-%s-build_only" % (build.builder.builder, target)
for target in TARGETS
],
presentation=presentation,
extra_properties=jsonpb.MessageToDict(
SubbuildProperties(sdk_id=sdk_id),
preserving_proto_field_name=True,
),
# TODO(olivernewman): Swarming currently breaks if many builds
# are launched simultaneously with the same
# swarming_parent_run_id. Set the swarming_parent_run_id param
# here after that bug is fixed.
set_swarming_parent_run_id=False,
)
with api.step.nest("collect SDK subbuilds"):
sdk_builds = api.subbuild.collect(
build_ids=[b.build_id for b in sdk_builds.values()],
)
sdk_build_protos = [b.build_proto for b in sdk_builds.values()]
# Display SDK builds status and show failures on UI, if any.
api.display_util.display_builds(
step_name="display SDK subbuilds",
builds=sdk_build_protos,
raise_on_failure=True,
)
# Download the individual SDK archives
fetch_sdk_artifacts(api, checkout.root_dir, sdk_builds)
with api.step.nest("gen") as presentation:
# Run GN to get access to tool_paths.json for looking up paths to
# prebuilt tools.
gn_results = api.build.gen(
checkout=checkout,
fint_params_path=props.fint_params_path,
presentation=presentation,
)
# Create a `python` executable that will be available in $PATH as an
# alias for `python3`, since there's no `python` executable in the
# checkout.
python3 = gn_results.tool("python3")
python_dir = api.path.mkdtemp("python")
api.file.symlink(
"symlink python3 to python", python3, python_dir.join("python")
)
def run_python(step_name, cmd, **kwargs):
# Override $PATH to make sure that the script only uses the prebuilt
# Python from the checkout, and doesn't fall back to whatever is
# ambiently installed.
path_prefix = [python_dir, api.path.dirname(python3)]
with api.context(env_prefixes={"PATH": path_prefix}):
api.step(step_name, [python3] + cmd, **kwargs)
with api.step.nest("merge archives"):
# Merge the SDK archives for each target into a single archive.
merge_path = checkout.root_dir.join("scripts", "sdk", "merger", "merge.py")
full_archive_path = api.path["cleanup"].join("merged_sdk_archive.tar.gz")
sdk_archives_dir = checkout.root_dir.join("archives")
sdk_archives_paths = api.file.glob_paths(
"get sdk paths",
sdk_archives_dir,
"*/*.tar.gz",
test_data=["%s/core.tar.gz" % b for b in sdk_builds],
)
api.file.move(
"create merged_sdk_archive.tar.gz", sdk_archives_paths[0], full_archive_path
)
for sdk_archives_path in sdk_archives_paths[1:]:
run_python(
"merge %s" % sdk_archives_path,
[
merge_path,
"--first-archive",
sdk_archives_path,
"--second-archive",
full_archive_path,
"--output-archive",
full_archive_path,
],
)
# Generate a Bazel workspace along with its tests.
# These tests are being run for every SDK flavor.
generate_bazel_path = checkout.root_dir.join(
"scripts", "sdk", "bazel", "generate.py"
)
sdk_dir = api.path["cleanup"].join("sdk-bazel")
test_workspace_dir = api.path["cleanup"].join("tests")
run_python(
"create bazel sdk",
[
generate_bazel_path,
"--archive",
full_archive_path,
"--output",
sdk_dir,
"--tests",
test_workspace_dir,
],
)
with api.step.nest("test sdk") as presentation:
bazel_path = gn_results.tool("bazel")
bazel_user_root_path = api.path["cleanup"].join("bazel")
with api.macos_sdk():
run_python(
"run bazel tests",
[
test_workspace_dir.join("run.py"),
"--output_user_root",
bazel_user_root_path,
"--bazel",
bazel_path,
],
timeout=datetime.timedelta(hours=1),
)
with api.step.nest("generate gn sdk"):
# Generate a GN workspace along with its tests.
# These tests are being run for every SDK flavor.
generate_gn_path = checkout.root_dir.join("scripts", "sdk", "gn", "generate.py")
gn_sdk_dir = api.path["cleanup"].join("sdk-gn")
gn_sdk_archive = api.path["cleanup"].join("gn.tar.gz")
test_workspace_dir = api.path["cleanup"].join("tests")
run_python(
"create gn sdk",
[
generate_gn_path,
"--archive",
full_archive_path,
"--output",
gn_sdk_dir,
"--output-archive",
gn_sdk_archive,
"--tests",
test_workspace_dir,
],
)
with api.step.nest("test gn sdk"):
run_python(
"run gn tests",
[test_workspace_dir.join("run.py")],
timeout=datetime.timedelta(hours=1),
)
if image_builds:
with api.step.nest("collect image subbuilds"):
image_builds = api.subbuild.collect(
build_ids=[b.build_id for b in image_builds.values()],
)
# Display SDK builds status and show failures on UI, if any.
api.display_util.display_builds(
step_name="display image subbuilds",
builds=[b.build_proto for b in image_builds.values()],
raise_on_failure=True,
)
artifact_gcs_buckets = list(
set(
api.subbuild.get_property(b.build_proto, "artifact_gcs_bucket")
for b in image_builds.values()
)
)
assert len(artifact_gcs_buckets) == 1, artifact_gcs_buckets
bundle_dir = api.path.mkdtemp("bundle")
api.bundle_fetcher.download(
build_ids=[b.build_id for b in image_builds.values()],
gcs_bucket=artifact_gcs_buckets[0],
out_dir=bundle_dir,
)
api.gsutil.upload(
bucket=props.gcs_bucket,
src=bundle_dir.join(PRODUCT_BUNDLES_JSON),
dst="development/%s/sdk/%s" % (sdk_id, PRODUCT_BUNDLES_JSON),
name="upload %s" % PRODUCT_BUNDLES_JSON,
# Publicly available.
unauthenticated_url=True,
)
# Relocate the image build outputs into a well-known location based on
# sdk_id.
relocate_artifacts(
api,
[b.build_proto for b in image_builds.values()],
props.companion_images,
sdk_id,
props.gcs_bucket,
)
# Publish the core and GN SDK.
#
# GCS publishing paths:
# gs://fuchsia/development/${sdk_id}/sdk/${platform}
# |-- core.tar.gz
# `-- gn.tar.gz
#
# CIPD publishing paths (versioning is built into CIPD):
# https://chrome-infra-packages.appspot.com/p/fuchsia/sdk/
# |-- core
# | `-- ${platform}
# `-- gn
# `-- ${platform}
# Publish core.
gcs_archive_path = "development/%s/sdk/%s-amd64/%s.tar.gz" % (
sdk_id,
api.platform.name,
props.sdk_name,
)
cipd_pkg_name = "%s/sdk/%s/${platform}" % (props.cipd_root, props.sdk_name)
cipd_metadata = [
("jiri_snapshot", sdk_id),
("version", sdk_id),
]
revision = build.input.gitiles_commit.id
upload_core(
api,
sdk_name=props.sdk_name,
gcs_archive_bucket=props.gcs_bucket,
gcs_archive_path=gcs_archive_path,
cipd_pkg_name=cipd_pkg_name if props.is_release_version else None,
archive_path=full_archive_path,
cipd_metadata=cipd_metadata,
update_refs=update_refs,
sdk_id=sdk_id,
revision=revision,
checkout_root=checkout.root_dir,
)
# Publish GN SDK.
with api.step.nest("publish gn sdk"):
# Upload SDK dir to CIPD and tarball to GCS.
gcs_path = "development/%s/sdk/%s-amd64/gn.tar.gz" % (sdk_id, api.platform.name)
api.gsutil.upload(
bucket=props.gcs_bucket,
src=gn_sdk_archive,
dst=gcs_path,
name="upload gn fuchsia-sdk %s" % sdk_id,
# Publicly available.
unauthenticated_url=True,
)
# Upload GN SDK CIPD.
if props.is_release_version:
api.cipd_util.upload_package(
"%s/sdk/gn/${platform}" % props.cipd_root,
gn_sdk_dir,
[gn_sdk_dir],
{"git_revision": revision},
repository=None,
refs=update_refs,
metadata=cipd_metadata,
)
if props.external_testing.gerrit_host:
with api.step.nest("run external tests") as presentation:
run_external_tests(
api,
sdk_id=sdk_id,
bucket=props.gcs_bucket,
external_testing=props.external_testing,
)
# TODO(atyfto): Currently we trigger and collect Gerrit-based external tests
# and TAP tests serially, which is not efficient if both are specified by
# the proto. If we ever have builders which want to run both, we should
# frontload both triggers ahead of their respective collects.
# TODO(fxbug.dev/90091): Deprecate tap_testing for fxt_tests.
if (
props.tap_testing.tap_projects
or props.fxt_tests.tap_projects
or props.fxt_tests.guitar_projects
):
with api.step.nest("run fxt tests") as presentation:
run_fxt_tests(
api,
presentation=presentation,
sdk_id=sdk_id,
bucket=props.gcs_bucket,
tap_testing=props.tap_testing,
fxt_tests=props.fxt_tests,
)
def upload_core(
api,
sdk_name,
gcs_archive_bucket,
gcs_archive_path,
cipd_pkg_name,
archive_path,
cipd_metadata,
update_refs,
revision,
sdk_id,
checkout_root,
):
sdk_dir = api.path["cleanup"].join(sdk_name)
# Extract the archive to a directory for CIPD processing.
with api.step.nest("extract " + sdk_name):
api.file.ensure_directory("create sdk dir", sdk_dir)
api.tar.extract(
step_name="unpack sdk archive",
path=archive_path,
directory=sdk_dir,
)
with api.step.nest("upload " + sdk_name):
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=archive_path,
dst=gcs_archive_path,
link_name="archive",
name="upload %s fuchsia-sdk %s" % (sdk_name, sdk_id),
# Publicly available.
unauthenticated_url=True,
)
# Note that this will upload the snapshot to a location different from the
# path that api.fuchsia copied it to. This uses a path based on the hash of
# the SDK artifact, not based on the hash of the snapshot itself. Clients
# can use this to find the snapshot used to build a specific SDK artifact.
snapshot_file = api.path["cleanup"].join("jiri.snapshot")
with api.context(cwd=checkout_root):
api.jiri.snapshot(snapshot_file)
api.gsutil.upload(
bucket="fuchsia-snapshots",
src=snapshot_file,
dst=sdk_id,
link_name="jiri.snapshot",
name="upload jiri.snapshot",
)
if update_refs:
# Record the sdk_id of the most recently uploaded archive for downstream
# autorollers.
sdk_id_path = api.path["cleanup"].join("sdk_id")
api.file.write_text("write sdk_id", sdk_id_path, sdk_id)
for update_ref in update_refs:
upper_update_ref = update_ref.upper()
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=sdk_id_path,
dst="development/%s_%s" % (upper_update_ref, api.platform.name.upper()),
link_name=upper_update_ref,
name="upload %s sdk_id" % update_ref,
)
# Upload the SDK to CIPD as well.
if cipd_pkg_name:
api.cipd_util.upload_package(
cipd_pkg_name,
sdk_dir,
[sdk_dir],
{"git_revision": revision},
repository=None,
refs=update_refs,
metadata=cipd_metadata,
)
def GenTests(api):
def add_hash_property(build):
build.output.properties[SDK_ARCHIVE_OUTPUT_KEY] = "###HASH###"
return build
def properties(**kwargs):
props = {
"gcs_bucket": "fuchsia",
"sdk_name": "core",
"cipd_root": "fuchsia",
"is_release_version": False,
"subbuild_collect_timeout_secs": 3600,
"fint_params_path": "fint_params/sdk.textproto",
"manifest": "flower",
"remote": "https://fuchsia.googlesource.com/integration",
"revision": api.jiri.example_revision,
}
props.update(kwargs)
return api.properties(**props)
ci_subbuilds = api.subbuild.child_build_steps(
builds=[
add_hash_property(
api.subbuild.ci_build_message(
build_id=123,
builder="sdk-core-linux-arm64-build_only",
status="SUCCESS",
)
),
add_hash_property(
api.subbuild.ci_build_message(
build_id=456,
builder="sdk-core-linux-x64-build_only",
status="SUCCESS",
)
),
],
launch_step="launch SDK subbuilds",
collect_step="collect SDK subbuilds",
)
ci_subbuilds_infra_failure = api.subbuild.child_build_steps(
builds=[
add_hash_property(
api.subbuild.ci_build_message(
build_id=123,
builder="sdk-core-linux-arm64-build_only",
status="INFRA_FAILURE",
)
),
add_hash_property(
api.subbuild.ci_build_message(
build_id=456,
builder="sdk-core-linux-x64-build_only",
status="INFRA_FAILURE",
)
),
],
launch_step="launch SDK subbuilds",
collect_step="collect SDK subbuilds",
)
ci_subbuilds_failure = api.subbuild.child_build_steps(
builds=[
add_hash_property(
api.subbuild.ci_build_message(
build_id=123,
builder="sdk-core-linux-arm64-build_only",
status="FAILURE",
)
),
add_hash_property(
api.subbuild.ci_build_message(
build_id=456,
builder="sdk-core-linux-x64-build_only",
status="FAILURE",
)
),
],
launch_step="launch SDK subbuilds",
collect_step="collect SDK subbuilds",
)
cq_subbuilds = api.subbuild.child_build_steps(
builds=[
add_hash_property(
api.subbuild.try_build_message(
build_id=123,
builder="sdk-core-linux-arm64-build_only",
status="SUCCESS",
)
),
add_hash_property(
api.subbuild.try_build_message(
build_id=456,
builder="sdk-core-linux-x64-build_only",
status="SUCCESS",
)
),
],
launch_step="launch SDK subbuilds",
collect_step="collect SDK subbuilds",
)
image_build = api.subbuild.ci_build_message(
builder="###SDK_IMAGE_BUILDER###", status="SUCCESS", build_id=123456789
)
image_build.output.properties["artifact_gcs_bucket"] = "fuchsia-artifacts"
image_build.output.properties["gcs_bucket"] = "###BUCKET###"
other_image_build = api.subbuild.ci_build_message(
builder="###OTHER_SDK_IMAGE_BUILDER###", status="SUCCESS", build_id=9876543210
)
other_image_build.output.properties["artifact_gcs_bucket"] = "fuchsia-artifacts"
other_image_build.output.properties["gcs_bucket"] = "###BUCKET###"
ci_image_builds = api.subbuild.child_build_steps(
builds=[image_build, other_image_build],
launch_step="launch image subbuilds",
collect_step="collect image subbuilds",
)
describe = api.release.ref_to_release_version("releases/0.20191018.0.1")
ref_settings = [
{"branch": "refs/heads/main", "update_ref": "latest"},
]
release_versions = api.step_data(
"get release versions on h3ll0",
api.raw_io.stream_output_text(
"\n".join(
[
"releases/0.20191019.0.1",
"releases/0.20191018.0.1",
]
)
),
)
no_release_versions = api.step_data(
"get release versions on h3ll0", api.raw_io.stream_output_text("")
)
companion_images = [
{
"name": "###SDK_IMAGE###",
"builder": "###SDK_IMAGE_BUILDER###",
},
{
"name": "###OTHER_SDK_IMAGE###",
"builder": "###OTHER_SDK_IMAGE_BUILDER###",
},
]
yield api.buildbucket_util.test("cq", tryjob=True) + properties() + cq_subbuilds
# TODO(fxbug.dev/90091): Deprecate tap_testing for fxt_tests.
yield (
api.buildbucket_util.test("tap_tests")
+ properties(
tap_testing=InputProperties.TapTesting(
tap_projects=["foo"],
use_staging_host=False,
timeout_secs=60 * 60,
),
)
+ ci_subbuilds
+ api.fxt.launch(
"run fxt tests.launch",
test_data={
"tap_request_id": "tap-id",
"guitar_project_request_ids": [],
"workspace": "test-ws",
"change_num": 12345,
},
)
+ api.fxt.monitor("run fxt tests.monitor", success=True)
)
yield (
api.buildbucket_util.test("fxt_tests")
+ properties(
fxt_tests=InputProperties.FxtTests(
tap_projects=["foo"],
guitar_projects={"guitar-project-id": "guitar-cluster-id"},
use_staging_host=False,
timeout_secs=60 * 60,
),
)
+ ci_subbuilds
+ api.fxt.launch(
"run fxt tests.launch",
test_data={
"tap_request_id": "tap-id",
"guitar_project_request_ids": ["guitar-project-id1"],
"workspace": "test-ws",
"change_num": 12345,
},
)
+ api.fxt.monitor("run fxt tests.monitor", success=True)
)
yield (
api.buildbucket_util.test("tap_and_fxt_tests")
+ properties(
tap_testing=InputProperties.TapTesting(
tap_projects=["foo_tap"],
use_staging_host=False,
timeout_secs=60 * 60,
),
fxt_tests=InputProperties.FxtTests(
tap_projects=["foo_fxt"],
guitar_projects={"guitar-project-id": "guitar-cluster-id"},
use_staging_host=False,
timeout_secs=60 * 60,
),
)
+ ci_subbuilds
+ api.fxt.launch(
"run fxt tests.launch",
test_data={
"tap_request_id": "tap_fxt-id",
"guitar_project_request_ids": ["guitar-project-id1"],
"workspace": "test-ws",
"change_num": 12345,
},
)
+ api.fxt.monitor("run fxt tests.monitor", success=True)
)
yield (
api.buildbucket_util.test("external_testing")
+ properties(
ref_settings=ref_settings,
external_testing=InputProperties.ExternalTesting(
gerrit_host="chromium-review.googlesource.com",
gerrit_project="chromium/src",
bucket_filepath="build/fuchsia/sdk-bucket.txt",
version_filepath="build/fuchsia/sdk.version",
version_list_filepath="build/fuchsia/sdk.list",
version_list_contents="sdk.version",
timeout_secs=7200,
tryjobs=["chromium/try/fuchsia_x64"],
tryjobs_wait_secs=180,
),
)
+ ci_subbuilds
+ api.cl_util.create_cl(
"run external tests.create CL",
test_data={
"number": 123456,
"current_revision": "foo",
"revisions": {"foo": {"number": 1}},
},
)
+ api.cl_util.collect_tryjobs(
"run external tests.collect tryjobs", builders=["chromium/try/fuchsia_x64"]
)
)
tryjob_msgs = [
api.buildbucket.try_build_message(
project="chromium",
bucket="try",
builder="fuchsia_x64",
),
]
yield (
api.buildbucket_util.test("explicit_tryjobs")
+ properties(
ref_settings=ref_settings,
external_testing=InputProperties.ExternalTesting(
gerrit_host="chromium-review.googlesource.com",
gerrit_project="chromium/src",
bucket_filepath="build/fuchsia/sdk-bucket.txt",
version_filepath="build/fuchsia/sdk.version",
version_list_filepath="build/fuchsia/sdk.list",
version_list_contents="sdk.version",
timeout_secs=7200,
tryjobs=["chromium/try/fuchsia_x64"],
trigger_tryjobs=True,
tryjobs_wait_secs=180,
),
)
+ ci_subbuilds
+ api.cl_util.create_cl(
"run external tests.create CL",
test_data={
"number": 123456,
"current_revision": "foo",
"revisions": {"foo": {"number": 1}},
},
)
+ api.cl_util.trigger_tryjobs(
"run external tests.trigger tryjobs", tryjob_msgs=tryjob_msgs
)
+ api.cl_util.collect_tryjobs(
"run external tests.collect tryjobs", tryjob_msgs=tryjob_msgs
)
)
yield (
api.buildbucket_util.test("release_ci", git_ref="refs/heads/release")
+ properties(
is_release_version=True,
companion_images=companion_images,
ref_settings=ref_settings,
)
+ ci_subbuilds
+ ci_image_builds
+ describe
+ release_versions
)
yield (
api.buildbucket_util.test(
"release_ci_no_update_ref", git_ref="refs/heads/release"
)
+ properties()
+ ci_subbuilds
+ ci_image_builds
+ api.properties(
is_release_version=True,
companion_images=companion_images,
ref_settings=ref_settings,
)
+ describe
+ no_release_versions
)
yield (
api.buildbucket_util.test("subbuild_build_failure", status="failure")
+ properties()
+ ci_subbuilds_failure
)
yield (
api.buildbucket_util.test("subbuild_infra_failure", status="infra_failure")
+ properties()
+ ci_subbuilds_infra_failure
)
yield (
api.buildbucket_util.test("release_ci_image_failure", status="failure")
+ properties(
is_release_version=True,
companion_images=companion_images,
ref_settings=ref_settings,
)
)