| # Copyright 2017 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """ sdk.py - Builds, tests, and publishes Fuchsia SDKs. |
| |
| # Execution overview |
| |
| ## Trigger companion image subbuilds |
| |
| Optionally trigger companion images, which are built alongside the SDK to give |
| developers prebuilt images to test with. |
| |
| ## Trigger SDK subbuilds |
| |
| Trigger x64 and arm64 subbuilds. We run them in parallel to speed up the build. |
| See https://fuchsia.dev/fuchsia-src/development/sdk#recipes for more info. |
| |
| ## Collect SDK subbuilds and merge archives |
| |
| Invoke an in-tree script to merge the SDK archives produced by each SDK |
| subbuild, yielding the core SDK. |
| |
| ## Test the core SDK |
| |
| Invoke an in-tree script to generate a bazel workspace using the core SDK, and |
| invoke bazel to run tests. |
| |
| ## Build the GN SDK |
| |
| Invoke an in-tree script to generate the GN SDK based on the core SDK. |
| See https://fuchsia.dev/fuchsia-src/development/sdk/gn for more info. |
| |
| ## Test the GN SDK |
| |
| Invoke an in-tree script to generate a bazel workspace using the GN SDK, and |
| invoke bazel to run tests. |
| |
| ## Collect companion image subbuilds |
| |
| Collect companion images, if they were requested to be built. |
| |
| ## Publish |
| |
| Publish the core and GN SDKs, and the optional companion images and associated |
| packages to GCS. Also publish the core and GN SDKs to CIPD. For official |
| releases, also apply refs e.g. "latest" to these uploads. |
| |
| ## Trigger external tests |
| |
| Optionally trigger tests against an external CQ to test that the new SDK is |
| compatible. |
| |
| ## Collect external tests |
| |
| If external tests were triggered, wait on CQ to complete, and report success or |
| fail. |
| |
| ## Trigger TAP tests |
| |
| Optionally trigger tests against TAP to test that the new SDK is compatible. |
| |
| ## Collect TAP tests |
| |
| If TAP tests were triggered, wait on TAP to complete and report success or fail. |
| """ |
| |
| from PB.go.chromium.org.luci.common.proto.gerrit import gerrit as gerrit_pb2 |
| |
| from PB.recipes.fuchsia.sdk import InputProperties |
| |
| DEPS = [ |
| "fuchsia/archive", |
| "fuchsia/build", |
| "fuchsia/build_input_resolver", |
| "fuchsia/buildbucket_util", |
| "fuchsia/checkout", |
| "fuchsia/cl_util", |
| "fuchsia/display_util", |
| "fuchsia/fxt", |
| "fuchsia/git", |
| "fuchsia/gsutil", |
| "fuchsia/jiri", |
| "fuchsia/macos_sdk", |
| "fuchsia/release", |
| "fuchsia/sso", |
| "fuchsia/status_check", |
| "fuchsia/subbuild", |
| "fuchsia/tar", |
| "fuchsia/upload", |
| "recipe_engine/buildbucket", |
| "recipe_engine/context", |
| "recipe_engine/file", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/platform", |
| "recipe_engine/properties", |
| "recipe_engine/python", |
| "recipe_engine/raw_io", |
| "recipe_engine/step", |
| "recipe_engine/time", |
| ] |
| |
| # This represents the location of the CAS digest for the sdk archives in the |
| # output of fuchsia/build.py. Needs to be kept in sync with fuchsia/build.py. |
| SDK_ARCHIVE_OUTPUT_KEY = "isolated_output_hash" |
| |
| TARGETS = ["arm64", "x64"] |
| |
| PROPERTIES = InputProperties |
| |
| |
| def fetch_sdk_artifacts(api, checkout_root, builds): |
| with api.step.nest("fetch sdk artifacts"): |
| output_path = checkout_root.join("archives") |
| for build_id, build in builds.iteritems(): |
| build_id = str(build_id) |
| with api.step.nest(build_id): |
| digest = api.subbuild.get_property( |
| build.build_proto, SDK_ARCHIVE_OUTPUT_KEY |
| ) |
| output_dir = output_path.join(build_id) |
| api.file.ensure_directory("makedirs", dest=output_dir) |
| api.archive.download( |
| step_name="download %s archive" % build_id, |
| digest=digest, |
| output_dir=output_dir, |
| ) |
| |
| |
| def relocate_artifacts(api, builds, companion_images, sdk_id): |
| def relocate_artifact(api, src_bucket, dst_bucket, src_path, dst_path): |
| api.gsutil.copy( |
| src_bucket=src_bucket, |
| src=src_path, |
| dst_bucket=dst_bucket, |
| dst=dst_path, |
| link_name="gsutil.copy", |
| unauthenticated_url=False, |
| ) |
| |
| with api.step.nest("relocate sdk image(s)"): |
| for build in builds: |
| image_name, image_bucket = next( |
| (image.name, image.bucket) |
| for image in companion_images |
| if image.builder == build.builder.builder |
| ) |
| with api.step.nest(image_name): |
| build_gcs_bucket = build.output.properties["gcs_bucket"].strip('"') |
| # build-archive.tgz |
| relocate_artifact( |
| api=api, |
| src_bucket=build_gcs_bucket, |
| dst_bucket=image_bucket, |
| src_path="builds/%s/build-archive.tgz" % build.id, |
| dst_path="development/%s/images/%s.tgz" % (sdk_id, image_name), |
| ) |
| # packages.tar.gz |
| relocate_artifact( |
| api=api, |
| src_bucket=build_gcs_bucket, |
| dst_bucket=image_bucket, |
| src_path="builds/%s/packages.tar.gz" % build.id, |
| dst_path="development/%s/packages/%s.tar.gz" % (sdk_id, image_name), |
| ) |
| |
| |
| def resolve_ref_to_update(api, ref_settings, remote, integration_repo, release_version): |
| """Resolve which ref to update. |
| |
| Ref settings order is respected i.e. the first branch which contains |
| the input release version will be used. If the release version is |
| not reachable on any of the given branches, return None. |
| """ |
| for settings in ref_settings: |
| branch_head = api.git.get_remote_branch_head( |
| api.sso.sso_to_https(remote), settings.branch |
| ) |
| with api.context(cwd=integration_repo): |
| api.git.fetch("origin", refspec=branch_head) |
| if release_version in api.release.get_release_versions( |
| ref=branch_head, repo_path=integration_repo |
| ): |
| return settings.update_ref |
| |
| |
| def run_external_tests(api, presentation, sdk_id, bucket, external_testing): |
| """Dry-run SDK in an external Gerrit-based CQ. |
| |
| The external repository must support SDK version and bucket overrides via |
| `version_filepath` and `bucket_filepath` files. |
| """ |
| api.cl_util.gerrit_host = external_testing.gerrit_host |
| api.cl_util.gerrit_project = external_testing.gerrit_project |
| change_info = api.cl_util.create_cl( |
| "create CL", |
| subject="[test] Dryrun SDK version %s" % sdk_id, |
| file_edits=[ |
| (external_testing.bucket_filepath, bucket), |
| (external_testing.version_filepath, sdk_id), |
| ( |
| external_testing.version_list_filepath, |
| external_testing.version_list_contents, |
| ), |
| ], |
| ) |
| try: |
| # If we aren't explicitly triggering tryjobs, build ids are resolved |
| # during collection. |
| build_ids = None |
| patchset_num = change_info.revisions[change_info.current_revision].number |
| # If we are, build ids are provided by the trigger output. |
| if external_testing.trigger_tryjobs: |
| build_ids = [ |
| b.id |
| for b in api.cl_util.trigger_tryjobs( |
| "trigger tryjobs", |
| change_num=change_info.number, |
| patchset_num=patchset_num, |
| builders=external_testing.tryjobs, |
| gerrit_host=external_testing.tryjobs_gerrit_host, |
| ) |
| ] |
| else: |
| api.cl_util.trigger_cq( |
| "trigger CQ+1", |
| change_num=change_info.number, |
| dry_run=True, |
| # If we're collecting tryjobs, don't wait on CQ to complete. |
| wait=not bool(external_testing.tryjobs), |
| timeout_secs=external_testing.timeout_secs, |
| ) |
| if external_testing.tryjobs: |
| # Give tryjobs time to start after applying CQ label. |
| # This is only required in the case that we used CQ to trigger |
| # tryjobs, not when we have triggered the tryjobs explicitly. |
| if not external_testing.trigger_tryjobs: |
| api.time.sleep(external_testing.tryjobs_wait_secs) |
| api.cl_util.collect_tryjobs( |
| "collect tryjobs", |
| change_num=change_info.number, |
| patchset_num=patchset_num, |
| # If build ids are not known, then we implicitly created tryjobs |
| # by applying CQ label, and must specify the builders to search |
| # for. |
| builders=external_testing.tryjobs if not build_ids else None, |
| build_ids=build_ids, |
| gerrit_host=external_testing.tryjobs_gerrit_host, |
| timeout_secs=external_testing.timeout_secs, |
| ) |
| finally: |
| api.cl_util.abandon_cl("abandon CL", change_num=change_info.number) |
| |
| |
| def run_tap_tests(api, presentation, sdk_id, bucket, tap_testing): |
| """Dry-run SDK against one or more TAP projects. |
| |
| The TAP projects must respect custom Fuchsia SDK parameters on a CL, which |
| are specified by the bucket and namespace overrides. |
| """ |
| api.fxt.use_staging_host = tap_testing.use_staging_host |
| resp = api.fxt.launch( |
| step_name="launch", |
| bucket=bucket, |
| namespace=sdk_id, |
| name=api.buildbucket.builder_name, |
| projects=tap_testing.tap_projects, |
| sdk_mode=True, |
| presentation=presentation, |
| ) |
| try: |
| api.fxt.monitor(step_name="monitor", request_id=resp["request_id"]) |
| finally: |
| api.fxt.cleanup(step_name="cleanup", workspace=resp["workspace"]) |
| |
| |
| def RunSteps(api, props): |
| api.build_input_resolver.resolve( |
| default_project_url="https://fuchsia.googlesource.com/fuchsia" |
| ) |
| build = api.buildbucket.build |
| revision = build.input.gitiles_commit.id |
| sdk_id = api.buildbucket_util.id |
| checkout = api.checkout.fuchsia_with_options( |
| manifest=props.manifest, |
| remote=props.remote, |
| ) |
| |
| integration_repo = checkout.root_dir.join("integration") |
| # Resolve the incoming release version and update refs. |
| update_refs = [] |
| if props.is_release_version: |
| release_version = api.release.ref_to_release_version( |
| ref=revision, repo_path=integration_repo |
| ) |
| # Override SDK ID with release version. |
| sdk_id = str(release_version).replace("releases/", "") |
| update_ref = resolve_ref_to_update( |
| api=api, |
| ref_settings=props.ref_settings, |
| remote=props.remote, |
| integration_repo=integration_repo, |
| release_version=release_version, |
| ) |
| if update_ref: |
| update_refs.append(update_ref) |
| |
| image_builds = {} |
| if props.companion_images: |
| with api.step.nest("launch image subbuilds") as presentation: |
| image_builds = api.subbuild.launch( |
| builder_names=[i.builder for i in props.companion_images], |
| presentation=presentation, |
| extra_properties={"sdk_id": sdk_id}, |
| # TODO(olivernewman): Swarming currently breaks if many builds |
| # are launched simultaneously with the same |
| # swarming_parent_run_id. Set the swarming_parent_run_id param |
| # here after that bug is fixed. |
| set_swarming_parent_run_id=False, |
| ) |
| |
| with api.step.nest("launch SDK subbuilds") as presentation: |
| sdk_builds = api.subbuild.launch( |
| builder_names=[ |
| "%s-%s-build_only" % (build.builder.builder, target) |
| for target in TARGETS |
| ], |
| presentation=presentation, |
| extra_properties={"parent_id": sdk_id}, |
| # TODO(olivernewman): Swarming currently breaks if many builds |
| # are launched simultaneously with the same |
| # swarming_parent_run_id. Set the swarming_parent_run_id param |
| # here after that bug is fixed. |
| set_swarming_parent_run_id=False, |
| ) |
| |
| with api.step.nest("collect SDK subbuilds") as presentation: |
| sdk_builds = api.subbuild.collect( |
| build_ids=[b.build_id for b in sdk_builds.itervalues()], |
| presentation=presentation, |
| ) |
| |
| sdk_build_protos = [b.build_proto for b in sdk_builds.itervalues()] |
| # Display SDK builds status and show failures on UI, if any. |
| api.display_util.display_builds( |
| step_name="display SDK subbuilds", |
| builds=sdk_build_protos, |
| raise_on_failure=True, |
| ) |
| |
| # Download the individual SDK archives |
| fetch_sdk_artifacts(api, checkout.root_dir, sdk_builds) |
| |
| with api.step.nest("merge archives"): |
| # Merge the SDK archives for each target into a single archive. |
| merge_path = checkout.root_dir.join("scripts", "sdk", "merger", "merge.py") |
| full_archive_path = api.path["cleanup"].join("merged_sdk_archive.tar.gz") |
| |
| sdk_archives_dir = checkout.root_dir.join("archives") |
| sdk_archives_paths = api.file.glob_paths( |
| "get sdk paths", |
| sdk_archives_dir, |
| "*/*.tar.gz", |
| test_data=["%s/core.tar.gz" % b for b in sdk_builds], |
| ) |
| api.file.move( |
| "create merged_sdk_archive.tar.gz", sdk_archives_paths[0], full_archive_path |
| ) |
| |
| for sdk_archives_path in sdk_archives_paths[1:]: |
| api.python( |
| "merge %s" % sdk_archives_path, |
| merge_path, |
| args=[ |
| "--first-archive", |
| sdk_archives_path, |
| "--second-archive", |
| full_archive_path, |
| "--output-archive", |
| full_archive_path, |
| ], |
| ) |
| |
| # Generate a Bazel workspace along with its tests. |
| # These tests are being run for every SDK flavor. |
| generate_bazel_path = checkout.root_dir.join( |
| "scripts", "sdk", "bazel", "generate.py" |
| ) |
| sdk_dir = api.path["cleanup"].join("sdk-bazel") |
| test_workspace_dir = api.path["cleanup"].join("tests") |
| |
| api.python( |
| "create bazel sdk", |
| generate_bazel_path, |
| args=[ |
| "--archive", |
| full_archive_path, |
| "--output", |
| sdk_dir, |
| "--tests", |
| test_workspace_dir, |
| ], |
| ) |
| |
| with api.step.nest("test sdk") as presentation: |
| # Generate tool_path.json to access bazel tool. |
| gn_results = api.build.gen( |
| checkout=checkout, |
| fint_params_path=props.fint_params_path, |
| presentation=presentation, |
| ) |
| bazel_path = gn_results.tool("bazel") |
| bazel_user_root_path = api.path["cleanup"].join("bazel") |
| |
| with api.macos_sdk(): |
| api.python( |
| "run bazel tests", |
| test_workspace_dir.join("run.py"), |
| args=[ |
| "--output_user_root", |
| bazel_user_root_path, |
| "--bazel", |
| bazel_path, |
| ], |
| timeout=60 * 60, |
| ) |
| with api.step.nest("generate gn sdk"): |
| # Generate a GN workspace along with its tests. |
| # These tests are being run for every SDK flavor. |
| generate_gn_path = checkout.root_dir.join("scripts", "sdk", "gn", "generate.py") |
| gn_sdk_dir = api.path["cleanup"].join("sdk-gn") |
| gn_sdk_archive = api.path["cleanup"].join("gn.tar.gz") |
| test_workspace_dir = api.path["cleanup"].join("tests") |
| |
| api.python( |
| "create gn sdk", |
| generate_gn_path, |
| args=[ |
| "--archive", |
| full_archive_path, |
| "--output", |
| gn_sdk_dir, |
| "--output-archive", |
| gn_sdk_archive, |
| "--tests", |
| test_workspace_dir, |
| ], |
| ) |
| with api.step.nest("test gn sdk"): |
| api.python("run gn tests", test_workspace_dir.join("run.py"), timeout=60 * 60) |
| |
| if image_builds: |
| with api.step.nest("collect image subbuilds") as presentation: |
| image_builds = api.subbuild.collect( |
| build_ids=[b.build_id for b in image_builds.itervalues()], |
| presentation=presentation, |
| ) |
| |
| # Display SDK builds status and show failures on UI, if any. |
| api.display_util.display_builds( |
| step_name="display image subbuilds", |
| builds=[b.build_proto for b in image_builds.itervalues()], |
| raise_on_failure=True, |
| ) |
| |
| # Publish the core and GN SDK. |
| # |
| # GCS publishing paths: |
| # gs://fuchsia/development/${sdk_id}/sdk/${platform} |
| # |-- core.tar.gz |
| # `-- gn-sdk.tar.gz |
| # |
| # CIPD publishing paths (versioning is built into CIPD): |
| # https://chrome-infra-packages.appspot.com/p/fuchsia/sdk/ |
| # |-- core |
| # | `-- ${platform} |
| # `-- gn |
| # `-- ${platform} |
| if image_builds: |
| # Relocate the image build outputs into a well-known location based on |
| # sdk_id. |
| relocate_artifacts( |
| api, |
| [b.build_proto for b in image_builds.itervalues()], |
| props.companion_images, |
| sdk_id, |
| ) |
| |
| # Publish core. |
| gcs_archive_path = "development/%s/sdk/%s-amd64/%s.tar.gz" % ( |
| sdk_id, |
| api.platform.name, |
| props.sdk_name, |
| ) |
| cipd_pkg_name = "%s/sdk/%s/${platform}" % (props.cipd_root, props.sdk_name) |
| extra_cipd_tags = { |
| "jiri_snapshot": sdk_id, |
| "version": sdk_id, |
| } |
| upload_core( |
| api, |
| sdk_name=props.sdk_name, |
| gcs_archive_bucket=props.gcs_bucket, |
| gcs_archive_path=gcs_archive_path, |
| cipd_pkg_name=cipd_pkg_name if props.is_release_version else None, |
| archive_path=full_archive_path, |
| extra_cipd_tags=extra_cipd_tags, |
| update_refs=update_refs, |
| sdk_id=sdk_id, |
| revision=revision, |
| checkout_root=checkout.root_dir, |
| ) |
| |
| # Publish GN SDK. |
| with api.step.nest("publish gn sdk"): |
| # Upload SDK dir to CIPD and tarball to GCS. |
| gcs_path = "development/%s/sdk/%s-amd64/gn.tar.gz" % (sdk_id, api.platform.name) |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=gn_sdk_archive, |
| dst=gcs_path, |
| name="upload gn fuchsia-sdk %s" % sdk_id, |
| # Publicly available. |
| unauthenticated_url=True, |
| ) |
| |
| # Upload GN SDK CIPD. |
| if props.is_release_version: |
| api.upload.cipd_package( |
| "%s/sdk/gn/${platform}" % props.cipd_root, |
| gn_sdk_dir, |
| [api.upload.DirectoryPath(gn_sdk_dir)], |
| {"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| extra_tags=extra_cipd_tags, |
| ) |
| |
| if props.external_testing.gerrit_host: |
| with api.step.nest("run external tests") as presentation: |
| run_external_tests( |
| api, |
| presentation=presentation, |
| sdk_id=sdk_id, |
| bucket=props.gcs_bucket, |
| external_testing=props.external_testing, |
| ) |
| # TODO(atyfto): Currently we trigger and collect Gerrit-based external tests |
| # and TAP tests serially, which is not efficient if both are specified by |
| # the proto. If we ever have builders which want to run both, we should |
| # frontload both triggers ahead of their respective collects. |
| if props.tap_testing.tap_projects: |
| with api.step.nest("run tap tests") as presentation: |
| run_tap_tests( |
| api, |
| presentation=presentation, |
| sdk_id=sdk_id, |
| bucket=props.gcs_bucket, |
| tap_testing=props.tap_testing, |
| ) |
| |
| |
| def upload_core( |
| api, |
| sdk_name, |
| gcs_archive_bucket, |
| gcs_archive_path, |
| cipd_pkg_name, |
| archive_path, |
| extra_cipd_tags, |
| update_refs, |
| revision, |
| sdk_id, |
| checkout_root, |
| ): |
| sdk_dir = api.path["cleanup"].join(sdk_name) |
| |
| # Extract the archive to a directory for CIPD processing. |
| with api.step.nest("extract " + sdk_name): |
| api.file.ensure_directory("create sdk dir", sdk_dir) |
| api.tar.extract( |
| step_name="unpack sdk archive", |
| path=archive_path, |
| directory=sdk_dir, |
| ) |
| with api.step.nest("upload " + sdk_name): |
| api.gsutil.upload( |
| bucket=gcs_archive_bucket, |
| src=archive_path, |
| dst=gcs_archive_path, |
| link_name="archive", |
| name="upload %s fuchsia-sdk %s" % (sdk_name, sdk_id), |
| # Publicly available. |
| unauthenticated_url=True, |
| ) |
| |
| # Note that this will upload the snapshot to a location different from the |
| # path that api.fuchsia copied it to. This uses a path based on the hash of |
| # the SDK artifact, not based on the hash of the snapshot itself. Clients |
| # can use this to find the snapshot used to build a specific SDK artifact. |
| snapshot_file = api.path["cleanup"].join("jiri.snapshot") |
| with api.context(cwd=checkout_root): |
| api.jiri.snapshot(snapshot_file) |
| api.gsutil.upload( |
| bucket="fuchsia-snapshots", |
| src=snapshot_file, |
| dst=sdk_id, |
| link_name="jiri.snapshot", |
| name="upload jiri.snapshot", |
| ) |
| |
| if update_refs: |
| # Record the sdk_id of the most recently uploaded archive for downstream |
| # autorollers. |
| sdk_id_path = api.path["cleanup"].join("sdk_id") |
| api.file.write_text("write sdk_id", sdk_id_path, sdk_id) |
| for update_ref in update_refs: |
| upper_update_ref = update_ref.upper() |
| api.gsutil.upload( |
| bucket=gcs_archive_bucket, |
| src=sdk_id_path, |
| dst="development/%s_%s" % (upper_update_ref, api.platform.name.upper()), |
| link_name=upper_update_ref, |
| name="upload %s sdk_id" % update_ref, |
| ) |
| |
| # Upload the SDK to CIPD as well. |
| if cipd_pkg_name: |
| api.upload.cipd_package( |
| cipd_pkg_name, |
| sdk_dir, |
| [api.upload.DirectoryPath(sdk_dir)], |
| {"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| extra_tags=extra_cipd_tags, |
| ) |
| |
| |
| def GenTests(api): |
| def add_hash_property(build): |
| build.output.properties[SDK_ARCHIVE_OUTPUT_KEY] = "###HASH###" |
| return build |
| |
| revision = api.jiri.example_revision |
| |
| default_properties = api.properties( |
| gcs_bucket="fuchsia", |
| sdk_name="core", |
| cipd_root="fuchsia", |
| is_release_version=False, |
| subbuild_collect_timeout_secs=3600, |
| fint_params_path="specs/sdk.fint.textproto", |
| ) |
| |
| topaz_properties = default_properties + api.properties( |
| project="integration", |
| manifest="fuchsia/topaz/topaz", |
| remote="https://fuchsia.googlesource.com/integration", |
| ) |
| |
| topaz_local_ci = ( |
| topaz_properties |
| + api.buildbucket.ci_build( |
| git_repo="https://fuchsia.googlesource.com/topaz", |
| revision=revision, |
| ) |
| + api.properties(revision=revision) |
| ) |
| |
| topaz_global_ci = ( |
| topaz_properties |
| + api.buildbucket.ci_build( |
| git_repo="https://fuchsia.googlesource.com/topaz", |
| revision=revision, |
| bucket="###global-integration-bucket###", |
| ) |
| + api.properties(revision=revision) |
| ) |
| |
| topaz_release_ci = ( |
| topaz_properties |
| + api.buildbucket.ci_build( |
| git_repo="https://fuchsia.googlesource.com/topaz", |
| git_ref="refs/heads/release", |
| revision=revision, |
| bucket="###global-integration-bucket###", |
| ) |
| + api.properties(revision=revision) |
| ) |
| |
| topaz_local_cq = topaz_properties + api.buildbucket.try_build() |
| |
| ci_subbuilds = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| ci_subbuilds_infra_failure = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="INFRA_FAILURE", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="INFRA_FAILURE", |
| ) |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| ci_subbuilds_failure = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="FAILURE", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="FAILURE", |
| ) |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| cq_subbuilds = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.try_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.try_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| |
| image_build = api.subbuild.ci_build_message( |
| builder="###SDK_IMAGE_BUILDER###", status="SUCCESS", build_id=123456789 |
| ) |
| image_build.output.properties["gcs_bucket"] = "###BUCKET###" |
| other_image_build = api.subbuild.ci_build_message( |
| builder="###OTHER_SDK_IMAGE_BUILDER###", status="SUCCESS", build_id=9876543210 |
| ) |
| other_image_build.output.properties["gcs_bucket"] = "###BUCKET###" |
| |
| ci_image_builds = api.subbuild.child_build_steps( |
| builds=[image_build, other_image_build], |
| launch_step="launch image subbuilds", |
| collect_step="collect image subbuilds", |
| ) |
| ci_image_builds_failure = api.subbuild.child_build_steps( |
| builds=[ |
| api.subbuild.ci_build_message( |
| builder="###SDK_IMAGE_BUILDER###", status="FAILURE", build_id=123456789 |
| ), |
| other_image_build, |
| ], |
| launch_step="launch image subbuilds", |
| collect_step="collect image subbuilds", |
| ) |
| |
| tags = [ |
| "releases/0.20191019.0.1", |
| "releases/0.20191018.0.1", |
| ] |
| describe = api.release.ref_to_release_version("releases/0.20191018.0.1") |
| ref_settings = [ |
| {"branch": "refs/heads/main", "update_ref": "latest"}, |
| ] |
| release_versions = api.step_data( |
| "get release versions on h3ll0", |
| api.raw_io.stream_output("\n".join(tags)), |
| ) |
| no_release_versions = api.step_data( |
| "get release versions on h3ll0", api.raw_io.stream_output("") |
| ) |
| |
| companion_images = [ |
| { |
| "name": "###SDK_IMAGE###", |
| "builder": "###SDK_IMAGE_BUILDER###", |
| "bucket": "###DEV_BUCKET###", |
| }, |
| { |
| "name": "###OTHER_SDK_IMAGE###", |
| "builder": "###OTHER_SDK_IMAGE_BUILDER###", |
| "bucket": "###OTHER_DEV_BUCKET###", |
| }, |
| ] |
| |
| yield ( |
| api.status_check.test("local_ci") |
| + topaz_local_ci |
| + ci_subbuilds |
| + api.fxt.launch( |
| "run tap tests.launch", |
| test_data={ |
| "request_id": "test-id", |
| "workspace": "test-ws", |
| "change_num": 12345, |
| }, |
| ) |
| + api.fxt.monitor("run tap tests.monitor", success=True) |
| + api.properties( |
| tap_testing=InputProperties.TapTesting( |
| tap_projects=["foo"], use_staging_host=False |
| ), |
| ) |
| ) |
| yield (api.status_check.test("local_cq") + topaz_local_cq + cq_subbuilds) |
| yield ( |
| api.status_check.test("local_ci_mac") |
| + topaz_local_ci |
| + api.platform.name("mac") |
| + ci_subbuilds |
| ) |
| yield ( |
| api.status_check.test("global_ci") |
| + topaz_global_ci |
| + ci_subbuilds |
| + api.cl_util.create_cl( |
| "run external tests.create CL", |
| test_data=gerrit_pb2.ChangeInfo( |
| number=123456, |
| current_revision="foo", |
| revisions={"foo": gerrit_pb2.RevisionInfo(number=1)}, |
| ), |
| ) |
| + api.cl_util.collect_tryjobs( |
| "run external tests.collect tryjobs", builders=["chromium/try/fuchsia_x64"] |
| ) |
| + api.properties( |
| ref_settings=ref_settings, |
| external_testing=InputProperties.ExternalTesting( |
| gerrit_host="chromium-review.googlesource.com", |
| gerrit_project="chromium/src", |
| bucket_filepath="build/fuchsia/sdk-bucket.txt", |
| version_filepath="build/fuchsia/sdk.version", |
| version_list_filepath="build/fuchsia/sdk.list", |
| version_list_contents="sdk.version", |
| timeout_secs=7200, |
| tryjobs=["chromium/try/fuchsia_x64"], |
| tryjobs_wait_secs=180, |
| ), |
| ) |
| ) |
| tryjob_msgs = [ |
| api.buildbucket.try_build_message( |
| project="chromium", |
| bucket="try", |
| builder="fuchsia_x64", |
| ), |
| ] |
| yield ( |
| api.status_check.test("explicit_tryjobs") |
| + topaz_global_ci |
| + ci_subbuilds |
| + api.cl_util.create_cl( |
| "run external tests.create CL", |
| test_data=gerrit_pb2.ChangeInfo( |
| number=123456, |
| current_revision="foo", |
| revisions={"foo": gerrit_pb2.RevisionInfo(number=1)}, |
| ), |
| ) |
| + api.cl_util.trigger_tryjobs( |
| "run external tests.trigger tryjobs", tryjob_msgs=tryjob_msgs |
| ) |
| + api.cl_util.collect_tryjobs( |
| "run external tests.collect tryjobs", tryjob_msgs=tryjob_msgs |
| ) |
| + api.properties( |
| ref_settings=ref_settings, |
| external_testing=InputProperties.ExternalTesting( |
| gerrit_host="chromium-review.googlesource.com", |
| gerrit_project="chromium/src", |
| bucket_filepath="build/fuchsia/sdk-bucket.txt", |
| version_filepath="build/fuchsia/sdk.version", |
| version_list_filepath="build/fuchsia/sdk.list", |
| version_list_contents="sdk.version", |
| timeout_secs=7200, |
| tryjobs=["chromium/try/fuchsia_x64"], |
| trigger_tryjobs=True, |
| tryjobs_wait_secs=180, |
| ), |
| ) |
| ) |
| yield ( |
| api.status_check.test("release_ci") |
| + topaz_release_ci |
| + ci_subbuilds |
| + ci_image_builds |
| + api.properties( |
| is_release_version=True, |
| companion_images=companion_images, |
| ref_settings=ref_settings, |
| ) |
| + describe |
| + release_versions |
| ) |
| yield ( |
| api.status_check.test("release_ci_no_update_ref") |
| + topaz_release_ci |
| + ci_subbuilds |
| + ci_image_builds |
| + api.properties( |
| is_release_version=True, |
| companion_images=companion_images, |
| ref_settings=ref_settings, |
| ) |
| + describe |
| + no_release_versions |
| ) |
| yield ( |
| api.status_check.test("local_ci_build_failure", status="failure") |
| + topaz_global_ci |
| + ci_subbuilds_failure |
| ) |
| yield ( |
| api.status_check.test("local_ci_infra_failure", status="infra_failure") |
| + topaz_global_ci |
| + ci_subbuilds_infra_failure |
| ) |
| yield ( |
| api.status_check.test("release_ci_image_failure", status="failure") |
| + topaz_global_ci |
| + ci_subbuilds |
| + ci_image_builds_failure |
| + api.properties( |
| is_release_version=True, |
| companion_images=companion_images, |
| ref_settings=ref_settings, |
| ) |
| + describe |
| + release_versions |
| ) |
| yield ( |
| api.status_check.test("release_ci_new_upload") |
| + topaz_release_ci |
| + api.step_data( |
| "upload core.cipd.cipd search fuchsia/sdk/core/${platform} " |
| + "git_revision:%s" % revision, |
| api.json.output({"result": []}), |
| ) |
| + ci_subbuilds |
| + ci_image_builds |
| + api.properties( |
| is_release_version=True, |
| companion_images=companion_images, |
| ref_settings=ref_settings, |
| ) |
| + describe |
| + release_versions |
| ) |