| # Copyright 2017 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """ sdk.py - Builds, tests, and uploads SDKs and companion images. |
| |
| # Execution overview |
| |
| ## Trigger companion image subbuilds |
| |
| Optionally trigger companion images for go/fuchsia-sdk-companion-images. |
| |
| ## Trigger core or Bazel SDK subbuild & collect |
| |
| Trigger a subbuild to build the core or Bazel SDK, which will upload to CAS upon |
| a successful completion. Collect from CAS. |
| |
| ## Generate, test, & upload GN SDK |
| |
| TODO(fxbug.dev/130226): Delete. |
| See https://fuchsia.dev/fuchsia-src/development/sdk/gn for more info. |
| |
| ## Collect companion image subbuilds & upload |
| |
| Collect any companion images, upload PBv2 metadata, and optionally upload the |
| companion images to GCS. |
| |
| ## Upload core or Bazel SDK |
| |
| Upload the core or Bazel SDK to GCS, CAS, and/or CIPD with the appropriate |
| metadata. This signals availability for downstream use, including any uploaded |
| companion images from the previous step. |
| |
| ## Orchestrate external presubmit and/or FXT tests |
| |
| Optionally orchestrate tests against external presubmit(s) to test that the new |
| SDK (and possibly companion images) are compatible. |
| """ |
| |
| import datetime |
| |
| from google.protobuf import json_format |
| from google.protobuf import struct_pb2 |
| |
| from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2 |
| from PB.recipe_engine.result import RawResult |
| from PB.recipes.fuchsia.sdk import InputProperties |
| from PB.recipes.fuchsia.fuchsia.build import InputProperties as SubbuildProperties |
| |
| DEPS = [ |
| "fuchsia/build", |
| "fuchsia/buildbucket_util", |
| "fuchsia/bundle_fetcher", |
| "fuchsia/cas_util", |
| "fuchsia/checkout", |
| "fuchsia/cipd_util", |
| "fuchsia/fxt", |
| "fuchsia/git", |
| "fuchsia/gsutil", |
| "fuchsia/jiri", |
| "fuchsia/presubmit_util", |
| "fuchsia/release", |
| "fuchsia/sso", |
| "fuchsia/subbuild", |
| "recipe_engine/archive", |
| "recipe_engine/buildbucket", |
| "recipe_engine/context", |
| "recipe_engine/file", |
| "recipe_engine/path", |
| "recipe_engine/platform", |
| "recipe_engine/properties", |
| "recipe_engine/raw_io", |
| "recipe_engine/step", |
| ] |
| |
| # The name of the output file from bundle_fetcher describing the product bundles |
| # which should be uploaded with the SDK. |
| PRODUCT_BUNDLES_JSON = "product_bundles.json" |
| |
| # These represent the locations of the CAS digest for the SDK archives and Bazel |
| # SDK archives in the output of fuchsia/build.py. Needs to be kept in sync with |
| # fuchsia/build.py. |
| SDK_ARCHIVE_OUTPUT_KEY = "sdk_archive_cas_digest" |
| BAZEL_SDK_OUTPUT_KEY = "bazel_sdk_cas_digest" |
| # This represents a map of tool name to CAS digest for the tools listed in the |
| # tool_overrides property and passed to the fuchsia/build.py recipe through the |
| # cas_tools property. Needs to be kept in sync with fuchsia/build.py. |
| CAS_TOOLS_OUTPUT_KEY = "tools_cas_digests" |
| |
| PROPERTIES = InputProperties |
| |
| |
| def RunSteps(api, props): |
| sdk_id = api.buildbucket_util.id |
| checkout = api.checkout.fuchsia_with_options( |
| project=props.project, |
| manifest=props.manifest, |
| remote=props.remote, |
| allow_skipping_patch=props.allow_skipping_patch, |
| ) |
| |
| integration_root = checkout.project_path(props.project) |
| # Resolve the incoming release version and update refs. |
| update_refs = [] |
| if props.is_release_version: |
| release_version = api.release.get_current_release_version(integration_root) |
| assert release_version, "failed to resolve release version" |
| # Override SDK ID with release version. |
| sdk_id = str(release_version) |
| update_ref = resolve_ref_to_update( |
| api=api, |
| ref_settings=props.ref_settings, |
| remote=props.remote, |
| integration_root=integration_root, |
| release_version=release_version, |
| ) |
| if update_ref: |
| update_refs.append(update_ref) |
| |
| cas_tools = [] |
| if props.sdk_override_configs.tool_overrides: |
| for tool in props.sdk_override_configs.tool_overrides: |
| cas_tools.append(tool.tool_name) |
| extra_properties = json_format.MessageToDict( |
| SubbuildProperties(sdk_id=sdk_id, cas_tools=cas_tools), |
| preserving_proto_field_name=True, |
| ) |
| |
| companion_image_builds = {} |
| if props.companion_images: |
| with api.step.nest("launch companion image subbuilds") as presentation: |
| companion_image_builds = api.subbuild.launch( |
| builder_names=[i.builder for i in props.companion_images], |
| presentation=presentation, |
| # TODO(olivernewman): Swarming currently breaks if many builds |
| # are launched simultaneously with the same |
| # swarming_parent_run_id. Set the swarming_parent_run_id param |
| # here after that bug is fixed. |
| set_swarming_parent_run_id=False, |
| extra_properties=extra_properties, |
| ) |
| |
| with api.step.nest("SDK build") as presentation: |
| builder_name = f"{api.buildbucket.build.builder.builder}-subbuild" |
| builds = api.subbuild.launch( |
| builder_names=[builder_name], |
| presentation=presentation, |
| extra_properties=extra_properties, |
| ) |
| build_id = builds[builder_name].build_id |
| builds = api.subbuild.collect([build_id]) |
| sdk_build = builds[build_id].build_proto |
| api.buildbucket_util.display_builds( |
| step_name="check SDK build status", |
| builds=[sdk_build], |
| raise_on_failure=True, |
| ) |
| # Copy various output properties from the child build to the parent so |
| # that tools don't need to look up the child build in order to get these |
| # values. |
| output_props = json_format.MessageToDict(sdk_build.output.properties) |
| for prop in ( |
| api.build.FINT_PARAMS_PATH_PROPERTY, |
| api.checkout.CHECKOUT_INFO_PROPERTY, |
| api.checkout.CACHED_REVISION_PROPERTY, |
| ): |
| if prop in output_props: |
| presentation.properties[prop] = output_props[prop] |
| |
| core_sdk_archive_path = run_core_sdk_steps(api, sdk_build) |
| |
| sdk_id_gcs_prefix = f"development/{sdk_id}" |
| sdk_gcs_prefix = f"{sdk_id_gcs_prefix}/sdk" |
| sdk_gcs_path = f"{sdk_gcs_prefix}/{api.platform.name}-amd64/{props.sdk_name}.tar.gz" |
| cipd_metadata = [ |
| ("jiri_snapshot", sdk_id), |
| ("version", sdk_id), |
| ] |
| revision = api.buildbucket.build.input.gitiles_commit.id |
| # TODO(fxbug.dev/130226): Delete. |
| if core_sdk_archive_path and not props.skip_gn_sdk: |
| run_gn_sdk_steps( |
| api, |
| props, |
| checkout, |
| core_sdk_archive_path, |
| sdk_gcs_prefix, |
| sdk_id, |
| revision, |
| update_refs, |
| cipd_metadata, |
| ) |
| |
| bazel_sdk_path = run_bazel_sdk_steps(api, sdk_build) |
| |
| assert ( |
| core_sdk_archive_path or bazel_sdk_path |
| ), "neither a core nor Bazel SDK were built" |
| |
| tools_digests = None |
| if props.sdk_override_configs.tool_overrides: |
| tools_digests = api.subbuild.get_property(sdk_build, CAS_TOOLS_OUTPUT_KEY) |
| |
| pbv2_path = None |
| package_overrides = {} |
| |
| if props.companion_images: |
| with api.step.nest("collect companion image subbuilds"): |
| companion_image_builds = api.subbuild.collect( |
| build_ids=[b.build_id for b in companion_image_builds.values()], |
| ) |
| api.buildbucket_util.display_builds( |
| step_name="check companion image subbuilds", |
| builds=[b.build_proto for b in companion_image_builds.values()], |
| raise_on_failure=True, |
| ) |
| pbv2_path = run_companion_image_steps( |
| api, |
| props, |
| companion_image_builds, |
| sdk_id, |
| package_overrides, |
| ) |
| |
| run_upload_steps( |
| api, |
| props, |
| core_sdk_archive_path, |
| bazel_sdk_path, |
| tools_digests, |
| pbv2_path, |
| checkout, |
| sdk_id, |
| revision, |
| sdk_gcs_path, |
| sdk_id_gcs_prefix, |
| cipd_metadata, |
| update_refs, |
| package_overrides, |
| ) |
| |
| if package_overrides: |
| with api.step.nest("set cas_digests output property") as presentation: |
| presentation.properties["cas_digests"] = package_overrides |
| |
| if ( |
| props.external_testing.gerrit_host |
| or props.sdk_override_configs.use_gclient_variables |
| ): |
| with api.step.nest("run external tests"): |
| run_external_tests( |
| api, |
| sdk_id=sdk_id, |
| bucket=props.gcs_bucket, |
| sdk_gcs_prefix=sdk_gcs_prefix, |
| sdk_gcs_path=sdk_gcs_path, |
| package_overrides=package_overrides, |
| sdk_override_configs=props.sdk_override_configs, |
| external_testing=props.external_testing, |
| ) |
| |
| run_fxt_tests = any( |
| [ |
| props.fxt_options.tap_projects, |
| props.fxt_options.guitar_config, |
| props.fxt_options.piper_presubmit, |
| ] |
| ) |
| |
| if run_fxt_tests: |
| api.fxt.orchestrate_fxt_tests( |
| bucket=props.gcs_bucket, |
| namespace=sdk_id, |
| options=props.fxt_options, |
| ) |
| |
| |
| def fetch_sdk_from_subbuild(api, build_proto, output_key, artifact_type): |
| with api.step.nest(f"fetch {artifact_type} artifacts"): |
| try: |
| digest = api.subbuild.get_property(build_proto, output_key) |
| except api.step.InfraFailure: |
| return None |
| output_path = api.path.cleanup_dir / output_key |
| api.cas_util.download(digest=digest, output_dir=output_path) |
| return output_path |
| |
| |
| def run_core_sdk_steps(api, sdk_build): |
| core_sdk_path = fetch_sdk_from_subbuild( |
| api, |
| sdk_build, |
| SDK_ARCHIVE_OUTPUT_KEY, |
| "sdk", |
| ) |
| if not core_sdk_path: |
| return None |
| |
| core_sdk_archive_paths = api.file.glob_paths( |
| "get core SDK path", |
| core_sdk_path, |
| "*.tar.gz", |
| test_data=["sdk.tar.gz"], |
| ) |
| assert len(core_sdk_archive_paths) == 1 |
| return core_sdk_archive_paths[0] |
| |
| |
| # TODO(fxbug.dev/130226): Delete. |
| def run_gn_sdk_steps( |
| api, |
| props, |
| checkout, |
| core_sdk_archive_path, |
| sdk_gcs_prefix, |
| sdk_id, |
| revision, |
| update_refs, |
| cipd_metadata, |
| ): |
| with api.step.nest("gen") as presentation: |
| # Run GN to get access to tool_paths.json for looking up paths to |
| # prebuilt tools. |
| gn_results = api.build.gen( |
| checkout=checkout, |
| fint_params_path=props.fint_params_path, |
| presentation=presentation, |
| ) |
| |
| # Create a `python` executable that will be available in $PATH as an |
| # alias for `python3`, since there's no `python` executable in the |
| # checkout. |
| python3 = gn_results.tool("python3") |
| python_dir = api.path.mkdtemp("python") |
| api.file.symlink("symlink python3 to python", python3, python_dir / "python") |
| # As per RFC-0129, "Python scripts intended to be directly invoked MUST |
| # contain a shebang which ultimately references the vendored Python". |
| # This applies to all Python GN build scripts, some of which are |
| # triggered in the SDK tests. To unblock Python build script migration |
| # to "fuchsia-vendored-python", we need to provide it as a runtime |
| # executable in this checkout accordingly. |
| api.file.symlink( |
| "symlink python3 to fuchsia-vendored-python", |
| python3, |
| python_dir / "fuchsia-vendored-python", |
| ) |
| |
| def run_python(step_name, cmd, **kwargs): |
| # Override $PATH to make sure that the script only uses the prebuilt |
| # Python from the checkout, and doesn't fall back to whatever is |
| # ambiently installed. |
| path_prefix = [python_dir, api.path.dirname(python3)] |
| with api.context(env_prefixes={"PATH": path_prefix}): |
| api.step(step_name, [python3] + cmd, **kwargs) |
| |
| with api.step.nest("generate gn sdk"): |
| # Generate a GN workspace along with its tests. |
| # These tests are being run for every SDK flavor. |
| generate_gn_path = checkout.root_dir.joinpath( |
| "scripts", "sdk", "gn", "generate.py" |
| ) |
| gn_sdk_dir = api.path.cleanup_dir / "sdk-gn" |
| gn_sdk_archive = api.path.cleanup_dir / "gn.tar.gz" |
| test_workspace_dir = api.path.cleanup_dir / "tests" |
| |
| run_python( |
| "create gn sdk", |
| [ |
| generate_gn_path, |
| "--archive", |
| core_sdk_archive_path, |
| "--output", |
| gn_sdk_dir, |
| "--output-archive", |
| gn_sdk_archive, |
| "--tests", |
| test_workspace_dir, |
| ], |
| ) |
| with api.step.nest("test gn sdk"): |
| run_python( |
| "run gn tests", |
| [test_workspace_dir / "run.py"], |
| timeout=datetime.timedelta(hours=1), |
| ) |
| |
| with api.step.nest("publish gn sdk"): |
| gcs_path = f"{sdk_gcs_prefix}/{api.platform.name}-amd64/gn.tar.gz" |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=gn_sdk_archive, |
| dst=gcs_path, |
| name=f"upload gn fuchsia-sdk {sdk_id}", |
| unauthenticated_url=True, |
| ) |
| if props.is_release_version: |
| api.cipd_util.upload_package( |
| f"{props.cipd_root}/sdk/gn/{api.cipd_util.platform_name}", |
| gn_sdk_dir, |
| search_tag={"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| metadata=cipd_metadata, |
| ) |
| |
| |
| def run_bazel_sdk_steps(api, sdk_build): |
| return fetch_sdk_from_subbuild( |
| api, |
| sdk_build, |
| BAZEL_SDK_OUTPUT_KEY, |
| "bazel SDK", |
| ) |
| |
| |
| def run_companion_image_steps( |
| api, |
| props, |
| companion_image_builds, |
| sdk_id, |
| package_overrides, |
| ): |
| for b in companion_image_builds.values(): |
| try: |
| cas_digests = api.subbuild.get_property( |
| b.build_proto, |
| "cas_digests", |
| ) |
| except api.step.InfraFailure: |
| continue |
| for package, digest in cas_digests.items(): |
| assert ( |
| package not in package_overrides |
| ), f"duplicate overridden package {package}" |
| package_overrides[package] = digest |
| |
| artifact_gcs_buckets = list( |
| set( |
| api.subbuild.get_property(b.build_proto, "artifact_gcs_bucket") |
| for b in companion_image_builds.values() |
| ) |
| ) |
| assert len(artifact_gcs_buckets) == 1, artifact_gcs_buckets |
| artifact_gcs_bucket = artifact_gcs_buckets[0] |
| |
| with api.step.nest("resolve PB metadata"): |
| pbv2_dir = api.path.mkdtemp("pbv2") |
| api.bundle_fetcher.product_list( |
| build_ids=[b.build_id for b in companion_image_builds.values()], |
| gcs_bucket=artifact_gcs_bucket, |
| out_dir=pbv2_dir, |
| ) |
| pbv2_path = pbv2_dir / PRODUCT_BUNDLES_JSON |
| |
| if props.relocate_image_archives: |
| # Relocate the image archives into a well-known layout based on the |
| # SDK ID. Requires that the (deprecated) archives are published to GCS. |
| # TODO(fxbug.dev/127210): Remove once all users are migrated to PBv2. |
| with api.step.nest("relocate companion image archives"): |
| for b in companion_image_builds.values(): |
| image_name = next( |
| image.name |
| for image in props.companion_images |
| if image.builder == b.build_proto.builder.builder |
| ) |
| src_bucket = api.subbuild.get_property(b.build_proto, "gcs_bucket") |
| src_bucket = src_bucket.strip('"') |
| api.gsutil.copy( |
| src_bucket=src_bucket, |
| src=f"builds/{b.build_proto.id}/build-archive.tgz", |
| dst_bucket=props.gcs_bucket, |
| dst=f"development/{sdk_id}/images/{image_name}.tgz", |
| link_name=f"{image_name} build-archive", |
| ) |
| |
| return pbv2_path |
| |
| |
| def run_upload_steps( |
| api, |
| props, |
| core_sdk_archive_path, |
| bazel_sdk_path, |
| tools_digests, |
| pbv2_path, |
| checkout, |
| sdk_id, |
| revision, |
| sdk_gcs_path, |
| sdk_id_gcs_prefix, |
| cipd_metadata, |
| update_refs, |
| package_overrides, |
| ): |
| if core_sdk_archive_path: |
| with api.step.nest("upload core SDK"): |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=core_sdk_archive_path, |
| dst=sdk_gcs_path, |
| link_name="core SDK archive", |
| unauthenticated_url=True, |
| ) |
| |
| # Note that this will upload the snapshot to a location different from the |
| # path that api.fuchsia copied it to. This uses a path based on the hash of |
| # the SDK artifact, not based on the hash of the snapshot itself. Clients |
| # can use this to find the snapshot used to build a specific SDK artifact. |
| snapshot_file = api.path.cleanup_dir / "jiri.snapshot" |
| with api.context(cwd=checkout.root_dir): |
| api.jiri.snapshot(snapshot_file) |
| api.gsutil.upload( |
| bucket="fuchsia-snapshots", |
| src=snapshot_file, |
| dst=sdk_id, |
| link_name="jiri.snapshot", |
| name="upload jiri.snapshot", |
| ) |
| |
| for update_ref in update_refs: |
| upper_update_ref = update_ref.upper() |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=api.raw_io.input(sdk_id), |
| dst=f"development/{upper_update_ref}_{api.platform.name.upper()}", |
| link_name=upper_update_ref, |
| name=f"upload {update_ref} sdk_id", |
| ) |
| |
| # Extract archive to a directory for CIPD processing. |
| cipd_staging_dir = api.path.cleanup_dir / props.sdk_name |
| api.file.ensure_directory("create SDK dir", cipd_staging_dir) |
| api.archive.extract( |
| step_name="unpack SDK archive", |
| archive_file=core_sdk_archive_path, |
| output=cipd_staging_dir, |
| ) |
| cipd_pkg = ( |
| f"{props.cipd_root}/sdk/{props.sdk_name}/{api.cipd_util.platform_name}" |
| ) |
| if props.is_release_version: |
| api.cipd_util.upload_package( |
| cipd_pkg, |
| cipd_staging_dir, |
| search_tag={"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| metadata=cipd_metadata, |
| # The default timeout (5 mins) is sometime not sufficient. See |
| # crbug.com/1404130 for more details. |
| verification_timeout="10m", |
| ) |
| else: |
| package_overrides[cipd_pkg] = api.cas_util.upload(cipd_staging_dir) |
| if bazel_sdk_path: |
| with api.step.nest("upload bazel SDK"): |
| cipd_pkg = f"{props.cipd_root}/sdk/core/fuchsia-bazel-rules/{api.cipd_util.platform_name}" |
| if props.is_release_version: |
| api.cipd_util.upload_package( |
| cipd_pkg, |
| bazel_sdk_path, |
| search_tag={"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| metadata=cipd_metadata, |
| ) |
| else: |
| package_overrides[cipd_pkg] = api.cas_util.upload(bazel_sdk_path) |
| |
| if tools_digests: |
| for tool in props.sdk_override_configs.tool_overrides: |
| package_overrides[ |
| tool.cipd_pkg_name.replace("${platform}", api.cipd_util.platform_name) |
| ] = tools_digests[tool.tool_name] |
| |
| if pbv2_path: |
| with api.step.nest("upload PBv2 metadata"): |
| pbv2_gcs_path = f"{sdk_id_gcs_prefix}/{PRODUCT_BUNDLES_JSON}" |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=pbv2_path, |
| dst=pbv2_gcs_path, |
| name=f"upload {PRODUCT_BUNDLES_JSON} v2", |
| unauthenticated_url=True, |
| ) |
| pbv2_staging_dir = api.path.mkdtemp("pbv2_staging") |
| api.file.copy("stage manifest", pbv2_path, pbv2_staging_dir) |
| pbv2_cipd_pkg = f"{props.cipd_root}/development/product_bundles/v2" |
| if props.is_release_version: |
| api.cipd_util.upload_package( |
| pbv2_cipd_pkg, |
| pbv2_staging_dir, |
| search_tag={"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| metadata=cipd_metadata, |
| ) |
| else: |
| package_overrides[pbv2_cipd_pkg] = api.cas_util.upload(pbv2_staging_dir) |
| |
| |
| def resolve_ref_to_update(api, ref_settings, remote, integration_root, release_version): |
| """Resolve which ref to update. |
| |
| Ref settings order is respected i.e. the first branch which contains |
| the input release version will be used. If the release version is |
| not reachable on any of the given branches, return None. |
| """ |
| for settings in ref_settings: |
| branch_head = api.git.get_remote_branch_head( |
| api.sso.sso_to_https(remote), settings.branch |
| ) |
| with api.context(cwd=integration_root): |
| api.git.fetch("origin", refspec=branch_head) |
| if release_version in api.release.get_release_versions( |
| ref=branch_head, repo_path=integration_root |
| ): |
| return settings.update_ref |
| return None # pragma: no cover |
| |
| |
| def run_external_tests( |
| api, |
| sdk_id, |
| bucket, |
| sdk_gcs_prefix, |
| sdk_gcs_path, |
| package_overrides, |
| sdk_override_configs, |
| external_testing, |
| ): |
| """Dry-run SDK in an external Buildbucket-based presubmit.""" |
| file_edits = None |
| final_package_overrides = None |
| gclient_variables = None |
| |
| if sdk_override_configs.use_package_overrides: |
| final_package_overrides = package_overrides |
| elif sdk_override_configs.use_gclient_variables: |
| gclient_variables = { |
| "download_fuchsia_sdk": True, |
| "fuchsia_sdk_path": sdk_gcs_path, |
| } |
| elif sdk_override_configs.override_config_file: |
| file_edits = [ |
| ( |
| sdk_override_configs.override_config_file, |
| f"gs://{bucket}/{sdk_gcs_prefix}", |
| ) |
| ] |
| else: # pragma: no cover |
| return RawResult( |
| summary_markdown="no SDK override mechanism specified", |
| status=common_pb2.INFRA_FAILURE, |
| ) |
| |
| api.presubmit_util.orchestrate( |
| options=external_testing, |
| cl_subject=f"[test] Dryrun SDK version {sdk_id}", |
| file_edits=file_edits, |
| package_overrides=final_package_overrides, |
| gclient_variables=gclient_variables, |
| ) |
| |
| |
| def GenTests(api): |
| def properties(**kwargs): |
| props = { |
| "project": "integration", |
| "manifest": "flower", |
| "remote": "https://fuchsia.googlesource.com/integration", |
| "gcs_bucket": "fuchsia", |
| "sdk_name": "core", |
| "cipd_root": "fuchsia", |
| "fint_params_path": "fint_params/sdk.textproto", |
| } |
| props.update(kwargs) |
| return api.properties(**props) |
| |
| def sdk_subbuild(output_key, cas_tools=()): |
| b = api.subbuild.ci_build_message( |
| build_id=123, |
| builder="builder-subbuild", |
| output_props={ |
| "checkout_info": {"manifest": "foo"}, |
| }, |
| status="SUCCESS", |
| ) |
| b.output.properties[output_key] = "cas-digest" |
| tools_digests = {} |
| for i, tool in enumerate(cas_tools): |
| tools_digests[tool] = f"cas-digest/{i}" |
| if tools_digests: |
| b.output.properties[CAS_TOOLS_OUTPUT_KEY] = tools_digests |
| return api.subbuild.child_build_steps( |
| builds=[b], |
| launch_step="SDK build", |
| collect_step="SDK build", |
| ) |
| |
| image_build = api.subbuild.ci_build_message( |
| builder="###SDK_IMAGE_BUILDER###", |
| status="SUCCESS", |
| build_id=123456789, |
| ) |
| image_build.output.properties["artifact_gcs_bucket"] = "fuchsia-artifacts" |
| image_build.output.properties["gcs_bucket"] = "###BUCKET###" |
| image_build.output.properties["cas_digests"] = {"fuchsia/tools/foo": "abc123/50"} |
| other_image_build = api.subbuild.ci_build_message( |
| builder="###OTHER_SDK_IMAGE_BUILDER###", |
| status="SUCCESS", |
| build_id=9876543210, |
| ) |
| other_image_build.output.properties["artifact_gcs_bucket"] = "fuchsia-artifacts" |
| other_image_build.output.properties["gcs_bucket"] = "###BUCKET###" |
| |
| ci_image_builds = api.subbuild.child_build_steps( |
| builds=[image_build, other_image_build], |
| launch_step="launch companion image subbuilds", |
| collect_step="collect companion image subbuilds", |
| ) |
| |
| ref_settings = [ |
| {"branch": "refs/heads/main", "update_ref": "latest"}, |
| ] |
| release_versions = api.step_data( |
| "get release versions on h3ll0", |
| api.raw_io.stream_output_text( |
| "\n".join( |
| [ |
| "releases/0.20191019.0.1", |
| "releases/0.20191018.0.1", |
| ] |
| ) |
| ), |
| ) |
| |
| companion_images = [ |
| { |
| "name": "###SDK_IMAGE###", |
| "builder": "###SDK_IMAGE_BUILDER###", |
| }, |
| { |
| "name": "###OTHER_SDK_IMAGE###", |
| "builder": "###OTHER_SDK_IMAGE_BUILDER###", |
| }, |
| ] |
| yield ( |
| api.buildbucket_util.test("fxt_tests") |
| + properties( |
| fxt_options=api.fxt.Options( |
| sdk_mode=True, |
| tap_projects=["foo"], |
| guitar_config=json_format.ParseDict( |
| {"projectfoo": "bar"}, struct_pb2.Struct() |
| ), |
| piper_presubmit=True, |
| use_staging_host=False, |
| timeout_secs=60 * 60, |
| ), |
| ) |
| + sdk_subbuild(SDK_ARCHIVE_OUTPUT_KEY) |
| + api.fxt.orchestrate_fxt_tests(piper_presubmit=True) |
| ) |
| yield ( |
| api.buildbucket_util.test("external_testing") |
| + properties( |
| ref_settings=ref_settings, |
| sdk_override_configs=InputProperties.SDKOverrideConfigs( |
| override_config_file="override_config", |
| ), |
| external_testing=api.presubmit_util.Options( |
| gerrit_host="foo-review.googlesource.com", |
| gerrit_project="repo", |
| ), |
| ) |
| + sdk_subbuild(SDK_ARCHIVE_OUTPUT_KEY) |
| + api.presubmit_util.create_cl(nesting="run external tests") |
| + api.presubmit_util.wait_for_cq(nesting="run external tests") |
| ) |
| yield ( |
| api.buildbucket_util.test("package_overrides") |
| + properties( |
| ref_settings=ref_settings, |
| sdk_override_configs=InputProperties.SDKOverrideConfigs( |
| use_package_overrides=True, |
| tool_overrides=[ |
| { |
| "tool_name": "tool1", |
| "cipd_pkg_name": "fuchsia/tool1/${platform}", |
| }, |
| { |
| "tool_name": "tool2", |
| "cipd_pkg_name": "fuchsia/tool2/${platform}", |
| }, |
| ], |
| ), |
| companion_images=companion_images, |
| external_testing=api.presubmit_util.Options( |
| gerrit_host="foo-review.googlesource.com", |
| gerrit_project="repo", |
| ), |
| ) |
| + sdk_subbuild(BAZEL_SDK_OUTPUT_KEY, cas_tools=["tool1", "tool2"]) |
| + ci_image_builds |
| + api.presubmit_util.create_cl(nesting="run external tests") |
| + api.presubmit_util.wait_for_cq(nesting="run external tests") |
| ) |
| tryjob_msgs = [ |
| api.buildbucket.try_build_message( |
| project="fuchsia", |
| bucket="try", |
| builder="bar.x64-debug", |
| ), |
| ] |
| yield ( |
| api.buildbucket_util.test("gclient_variables") |
| + properties( |
| ref_settings=ref_settings, |
| sdk_override_configs=InputProperties.SDKOverrideConfigs( |
| use_gclient_variables=True, |
| ), |
| external_testing=api.presubmit_util.Options( |
| tryjobs=["foo/try/a_builder"], |
| ), |
| companion_images=companion_images, |
| ) |
| + sdk_subbuild(SDK_ARCHIVE_OUTPUT_KEY) |
| + ci_image_builds |
| + api.presubmit_util.trigger_tryjobs( |
| nesting="run external tests", tryjob_msgs=tryjob_msgs |
| ) |
| + api.presubmit_util.collect_tryjobs( |
| nesting="run external tests", tryjob_msgs=tryjob_msgs |
| ) |
| ) |
| yield ( |
| api.buildbucket_util.test( |
| "release_with_companion_images", |
| git_ref="refs/heads/releases/foo", |
| ) |
| + properties( |
| is_release_version=True, |
| companion_images=companion_images, |
| relocate_image_archives=True, |
| ref_settings=ref_settings, |
| ) |
| + sdk_subbuild(SDK_ARCHIVE_OUTPUT_KEY) |
| + ci_image_builds |
| + api.release.ref_to_release_version("releases/0.20191018.0.1") |
| + release_versions |
| ) |
| yield ( |
| api.buildbucket_util.test( |
| "bazel_release", |
| git_ref="refs/heads/releases/foo", |
| ) |
| + properties( |
| is_release_version=True, |
| ref_settings=ref_settings, |
| ) |
| + sdk_subbuild(BAZEL_SDK_OUTPUT_KEY) |
| + api.release.ref_to_release_version("releases/0.20191018.0.1") |
| + release_versions |
| ) |