| # Copyright 2017 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """ sdk.py - Builds, tests, and publishes Fuchsia SDKs. |
| |
| # Execution overview |
| |
| ## Trigger companion image subbuilds |
| |
| Optionally trigger companion images, which are built alongside the SDK to give |
| developers prebuilt images to test with. |
| |
| ## Trigger SDK subbuilds |
| |
| Trigger x64 and arm64 subbuilds. We run them in parallel to speed up the build. |
| See https://fuchsia.dev/fuchsia-src/development/sdk#recipes for more info. |
| |
| ## Collect SDK subbuilds and merge archives |
| |
| Invoke an in-tree script to merge the SDK archives produced by each SDK |
| subbuild, yielding the core SDK. |
| |
| ## Build the GN SDK |
| |
| Invoke an in-tree script to generate the GN SDK based on the core SDK. |
| See https://fuchsia.dev/fuchsia-src/development/sdk/gn for more info. |
| |
| ## Test the GN SDK |
| |
| Invoke an in-tree script to generate a GN workspace using the GN SDK, and |
| invoke a script to run tests. |
| |
| ## Collect companion image subbuilds |
| |
| Collect companion images, if they were requested to be built. |
| |
| ## Publish |
| |
| Publish the core and GN SDKs, and the optional companion images and associated |
| packages to GCS. Also publish the core and GN SDKs to CIPD. For official |
| releases, also apply refs e.g. "latest" to these uploads. |
| |
| ## Trigger external tests |
| |
| Optionally trigger tests against an external CQ to test that the new SDK is |
| compatible. |
| |
| ## Collect external tests |
| |
| If external tests were triggered, wait on CQ to complete, and report success or |
| fail. |
| |
| ## Trigger Fxt tests |
| |
| Optionally trigger tests against Fxt to test that the new SDK is compatible. |
| |
| ## Collect Fxt tests |
| |
| If Fxt tests were triggered, wait on Fxt to complete and report success or fail. |
| """ |
| |
| import datetime |
| |
| from google.protobuf import json_format as jsonpb |
| from google.protobuf import struct_pb2 |
| |
| from PB.recipes.fuchsia.sdk import InputProperties |
| from PB.recipes.fuchsia.fuchsia.build import InputProperties as SubbuildProperties |
| |
| DEPS = [ |
| "fuchsia/build", |
| "fuchsia/buildbucket_util", |
| "fuchsia/bundle_fetcher", |
| "fuchsia/cas_util", |
| "fuchsia/checkout", |
| "fuchsia/cipd_util", |
| "fuchsia/fxt", |
| "fuchsia/git", |
| "fuchsia/gsutil", |
| "fuchsia/jiri", |
| "fuchsia/presubmit_util", |
| "fuchsia/release", |
| "fuchsia/sso", |
| "fuchsia/subbuild", |
| "recipe_engine/archive", |
| "recipe_engine/buildbucket", |
| "recipe_engine/context", |
| "recipe_engine/file", |
| "recipe_engine/futures", |
| "recipe_engine/path", |
| "recipe_engine/platform", |
| "recipe_engine/properties", |
| "recipe_engine/raw_io", |
| "recipe_engine/step", |
| ] |
| |
| # The name of the output file from bundle_fetcher describing the product bundles |
| # which should be uploaded with the sdk. |
| PRODUCT_BUNDLES_JSON = "product_bundles.json" |
| |
| # These represent the locations of the CAS digest for the sdk archives and |
| # bazel_sdk archives in the output of fuchsia/build.py. Needs to be kept in sync |
| # with fuchsia/build.py. |
| SDK_ARCHIVE_OUTPUT_KEY = "sdk_archive_cas_digest" |
| BAZEL_SDK_OUTPUT_KEY = "bazel_sdk_cas_digest" |
| |
| PROPERTIES = InputProperties |
| |
| |
| def fetch_sdk_artifacts(api, output_path, builds, output_key, artifact_type): |
| with api.step.nest(f"fetch {artifact_type} artifacts"): |
| for build_id, build in builds.items(): |
| build_id = str(build_id) |
| with api.step.nest(build_id): |
| digest = api.subbuild.get_property(build.build_proto, output_key) |
| output_dir = output_path.join(build_id) |
| api.file.ensure_directory("makedirs", dest=output_dir) |
| api.cas_util.download( |
| step_name=f"download {build_id} artifacts", |
| digest=digest, |
| output_dir=output_dir, |
| ) |
| |
| |
| def relocate_artifacts(api, builds, companion_images, sdk_id, gcs_bucket): |
| def relocate_artifact(api, src_bucket, dst_bucket, src_path, dst_path): |
| api.gsutil.copy( |
| src_bucket=src_bucket, |
| src=src_path, |
| dst_bucket=dst_bucket, |
| dst=dst_path, |
| link_name="gsutil.copy", |
| unauthenticated_url=False, |
| ) |
| |
| with api.step.nest("relocate sdk image(s)"): |
| for build in builds: |
| image_name = next( |
| image.name |
| for image in companion_images |
| if image.builder == build.builder.builder |
| ) |
| with api.step.nest(image_name): |
| build_gcs_bucket = build.output.properties["gcs_bucket"].strip('"') |
| # build-archive.tgz |
| relocate_artifact( |
| api=api, |
| src_bucket=build_gcs_bucket, |
| dst_bucket=gcs_bucket, |
| src_path=f"builds/{build.id}/build-archive.tgz", |
| dst_path=f"development/{sdk_id}/images/{image_name}.tgz", |
| ) |
| # packages.tar.gz |
| relocate_artifact( |
| api=api, |
| src_bucket=build_gcs_bucket, |
| dst_bucket=gcs_bucket, |
| src_path=f"builds/{build.id}/packages.tar.gz", |
| dst_path=f"development/{sdk_id}/packages/{image_name}.tar.gz", |
| ) |
| |
| |
| def resolve_ref_to_update(api, ref_settings, remote, integration_repo, release_version): |
| """Resolve which ref to update. |
| |
| Ref settings order is respected i.e. the first branch which contains |
| the input release version will be used. If the release version is |
| not reachable on any of the given branches, return None. |
| """ |
| for settings in ref_settings: |
| branch_head = api.git.get_remote_branch_head( |
| api.sso.sso_to_https(remote), settings.branch |
| ) |
| with api.context(cwd=integration_repo): |
| api.git.fetch("origin", refspec=branch_head) |
| if release_version in api.release.get_release_versions( |
| ref=branch_head, repo_path=integration_repo |
| ): |
| return settings.update_ref |
| return None |
| |
| |
| def run_external_tests( |
| api, |
| sdk_id, |
| bucket, |
| sdk_gcs_prefix, |
| gcs_archive_path, |
| product_bundles_v1_path, |
| product_bundles_v2_path, |
| package_overrides, |
| sdk_override_configs, |
| external_testing, |
| ): |
| """Dry-run SDK in an external Buildbucket-based presubmit.""" |
| file_edits = None |
| final_package_overrides = None |
| gclient_variables = None |
| |
| if sdk_override_configs.use_package_overrides: |
| final_package_overrides = package_overrides |
| elif sdk_override_configs.use_gclient_variables: |
| gclient_variables = { |
| "download_fuchsia_sdk": True, |
| "fuchsia_sdk_path": gcs_archive_path, |
| } |
| if product_bundles_v1_path is not None: |
| # Avoid changing existing gclient var name (i.e. omit "_v1"). |
| gclient_variables["product_bundles_path"] = product_bundles_v1_path |
| if product_bundles_v2_path is not None: |
| gclient_variables["product_bundles_v2_path"] = product_bundles_v2_path |
| else: |
| # TODO(fxb/103903): Clean this up once we have figured out what we |
| # want to do with the other LSC pipelines based on gclient. |
| file_edits = [] |
| if sdk_override_configs.bucket_filepath: |
| file_edits += [ |
| (sdk_override_configs.bucket_filepath, bucket), |
| (sdk_override_configs.version_filepath, sdk_id), |
| ( |
| sdk_override_configs.version_list_filepath, |
| sdk_override_configs.version_list_contents, |
| ), |
| ] |
| if sdk_override_configs.override_config_file: |
| file_edits.append( |
| ( |
| sdk_override_configs.override_config_file, |
| f"gs://{bucket}/{sdk_gcs_prefix}", |
| ) |
| ) |
| |
| api.presubmit_util.orchestrate( |
| options=external_testing, |
| cl_subject=f"[test] Dryrun SDK version {sdk_id}", |
| file_edits=file_edits, |
| package_overrides=final_package_overrides, |
| gclient_variables=gclient_variables, |
| ) |
| |
| |
| def RunSteps(api, props): |
| build = api.buildbucket.build |
| sdk_id = api.buildbucket_util.id |
| checkout = api.checkout.fuchsia_with_options( |
| manifest=props.manifest, |
| remote=props.remote, |
| attributes=props.attributes, |
| ) |
| |
| integration_repo = checkout.root_dir.join("integration") |
| # Resolve the incoming release version and update refs. |
| update_refs = [] |
| if props.is_release_version: |
| release_version = api.release.get_current_release_version(integration_repo) |
| assert release_version, "failed to resolve release version" |
| # Override SDK ID with release version. |
| sdk_id = str(release_version) |
| update_ref = resolve_ref_to_update( |
| api=api, |
| ref_settings=props.ref_settings, |
| remote=props.remote, |
| integration_repo=integration_repo, |
| release_version=release_version, |
| ) |
| if update_ref: |
| update_refs.append(update_ref) |
| |
| extra_properties = jsonpb.MessageToDict( |
| SubbuildProperties(sdk_id=sdk_id), |
| preserving_proto_field_name=True, |
| ) |
| |
| image_builds = {} |
| if props.companion_images: |
| with api.step.nest("launch image subbuilds") as presentation: |
| image_builds = api.subbuild.launch( |
| builder_names=[i.builder for i in props.companion_images], |
| presentation=presentation, |
| # TODO(olivernewman): Swarming currently breaks if many builds |
| # are launched simultaneously with the same |
| # swarming_parent_run_id. Set the swarming_parent_run_id param |
| # here after that bug is fixed. |
| set_swarming_parent_run_id=False, |
| extra_properties=extra_properties, |
| ) |
| |
| with api.step.nest("launch SDK subbuilds") as presentation: |
| sdk_builds = api.subbuild.launch( |
| builder_names=props.sdk_subbuilders, |
| presentation=presentation, |
| extra_properties=extra_properties, |
| ) |
| |
| with api.step.nest("collect SDK subbuilds"): |
| sdk_builds = api.subbuild.collect( |
| build_ids=[b.build_id for b in sdk_builds.values()], |
| ) |
| |
| sdk_build_protos = [b.build_proto for b in sdk_builds.values()] |
| # Display SDK builds status and show failures on UI, if any. |
| api.buildbucket_util.display_builds( |
| step_name="display SDK subbuilds", |
| builds=sdk_build_protos, |
| raise_on_failure=True, |
| ) |
| |
| # Download the individual SDK archives |
| fetch_sdk_artifacts( |
| api, |
| checkout.root_dir.join("archives"), |
| sdk_builds, |
| SDK_ARCHIVE_OUTPUT_KEY, |
| "sdk", |
| ) |
| |
| with api.step.nest("gen") as presentation: |
| # Run GN to get access to tool_paths.json for looking up paths to |
| # prebuilt tools. |
| gn_results = api.build.gen( |
| checkout=checkout, |
| fint_params_path=props.fint_params_path, |
| presentation=presentation, |
| ) |
| |
| # Create a `python` executable that will be available in $PATH as an |
| # alias for `python3`, since there's no `python` executable in the |
| # checkout. |
| python3 = gn_results.tool("python3") |
| python_dir = api.path.mkdtemp("python") |
| api.file.symlink( |
| "symlink python3 to python", python3, python_dir.join("python") |
| ) |
| # As per RFC-0129, "Python scripts intended to be directly invoked MUST |
| # contain a shebang which ultimately references the vendored Python". |
| # This applies to all Python GN build scripts, some of which are |
| # triggered in the SDK tests. To unblock Python build script migration |
| # to "fuchsia-vendored-python", we need to provide it as a runtime |
| # executable in this checkout accordingly. |
| api.file.symlink( |
| "symlink python3 to fuchsia-vendored-python", |
| python3, |
| python_dir.join("fuchsia-vendored-python"), |
| ) |
| |
| def run_python(step_name, cmd, **kwargs): |
| # Override $PATH to make sure that the script only uses the prebuilt |
| # Python from the checkout, and doesn't fall back to whatever is |
| # ambiently installed. |
| path_prefix = [python_dir, api.path.dirname(python3)] |
| with api.context(env_prefixes={"PATH": path_prefix}): |
| api.step(step_name, [python3] + cmd, **kwargs) |
| |
| with api.step.nest("merge archives"): |
| full_archive_path = merge_archives( |
| api, checkout.root_dir, sdk_builds, run_python |
| ) |
| |
| if not props.skip_gn_sdk: |
| with api.step.nest("generate gn sdk"): |
| # Generate a GN workspace along with its tests. |
| # These tests are being run for every SDK flavor. |
| generate_gn_path = checkout.root_dir.join( |
| "scripts", "sdk", "gn", "generate.py" |
| ) |
| gn_sdk_dir = api.path["cleanup"].join("sdk-gn") |
| gn_sdk_archive = api.path["cleanup"].join("gn.tar.gz") |
| test_workspace_dir = api.path["cleanup"].join("tests") |
| |
| run_python( |
| "create gn sdk", |
| [ |
| generate_gn_path, |
| "--archive", |
| full_archive_path, |
| "--output", |
| gn_sdk_dir, |
| "--output-archive", |
| gn_sdk_archive, |
| "--tests", |
| test_workspace_dir, |
| ], |
| ) |
| with api.step.nest("test gn sdk"): |
| run_python( |
| "run gn tests", |
| [test_workspace_dir.join("run.py")], |
| timeout=datetime.timedelta(hours=1), |
| ) |
| |
| sdk_id_gcs_prefix = f"development/{sdk_id}" |
| sdk_gcs_prefix = f"{sdk_id_gcs_prefix}/sdk" |
| |
| package_overrides = {} |
| |
| product_bundles_v1_path = None |
| product_bundles_v2_path = None |
| if image_builds: |
| with api.step.nest("collect image subbuilds"): |
| image_builds = api.subbuild.collect( |
| build_ids=[b.build_id for b in image_builds.values()], |
| ) |
| # Display SDK builds status and show failures on UI, if any. |
| api.buildbucket_util.display_builds( |
| step_name="display image subbuilds", |
| builds=[b.build_proto for b in image_builds.values()], |
| raise_on_failure=True, |
| ) |
| |
| for b in image_builds.values(): |
| try: |
| cas_digests = api.subbuild.get_property(b.build_proto, "cas_digests") |
| except Exception: |
| continue |
| for package, digest in cas_digests.items(): |
| assert ( |
| package not in package_overrides |
| ), f"duplicate overridden package {package}" |
| package_overrides[package] = digest |
| |
| artifact_gcs_buckets = list( |
| set( |
| api.subbuild.get_property(b.build_proto, "artifact_gcs_bucket") |
| for b in image_builds.values() |
| ) |
| ) |
| assert len(artifact_gcs_buckets) == 1, artifact_gcs_buckets |
| bundle_dir = api.path.mkdtemp("bundle_v1") |
| api.bundle_fetcher.download( |
| build_ids=[b.build_id for b in image_builds.values()], |
| gcs_bucket=artifact_gcs_buckets[0], |
| out_dir=bundle_dir, |
| ) |
| product_bundles_v1_path = f"{sdk_gcs_prefix}/{PRODUCT_BUNDLES_JSON}" |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=bundle_dir.join(PRODUCT_BUNDLES_JSON), |
| dst=product_bundles_v1_path, |
| name=f"upload {PRODUCT_BUNDLES_JSON} v1", |
| # Publicly available. |
| unauthenticated_url=True, |
| ) |
| |
| bundle_dir = api.path.mkdtemp("bundle_v2") |
| api.bundle_fetcher.product_list( |
| build_ids=[b.build_id for b in image_builds.values()], |
| gcs_bucket=artifact_gcs_buckets[0], |
| out_dir=bundle_dir, |
| ) |
| product_bundles_v2_path = f"{sdk_id_gcs_prefix}/{PRODUCT_BUNDLES_JSON}" |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=bundle_dir.join(PRODUCT_BUNDLES_JSON), |
| dst=product_bundles_v2_path, |
| name=f"upload {PRODUCT_BUNDLES_JSON} v2", |
| # Publicly available. |
| unauthenticated_url=True, |
| ) |
| |
| if props.relocate_image_archives: |
| # Relocate the image archives into a well-known layout based on |
| # SDK ID. Requires that the (deprecated) archives are published to |
| # GCS. Unfortunately they are still required by some downstream |
| # users. |
| relocate_artifacts( |
| api, |
| [b.build_proto for b in image_builds.values()], |
| props.companion_images, |
| sdk_id, |
| props.gcs_bucket, |
| ) |
| |
| # Publish the core and GN SDK. |
| # |
| # GCS publishing paths: |
| # gs://fuchsia/development/${sdk_id}/sdk/${platform} |
| # |-- core.tar.gz |
| # `-- gn.tar.gz |
| # |
| # CIPD publishing paths (versioning is built into CIPD): |
| # https://chrome-infra-packages.appspot.com/p/fuchsia/sdk/ |
| # |-- core |
| # | `-- ${platform} |
| # `-- gn |
| # `-- ${platform} |
| |
| # Publish core. |
| gcs_archive_path = ( |
| f"{sdk_gcs_prefix}/{api.platform.name}-amd64/{props.sdk_name}.tar.gz" |
| ) |
| cipd_pkg_name = "%s/sdk/%s/${platform}" % (props.cipd_root, props.sdk_name) |
| cipd_metadata = [ |
| ("jiri_snapshot", sdk_id), |
| ("version", sdk_id), |
| ] |
| revision = build.input.gitiles_commit.id |
| # The return value will be a CAS digest if we've uploaded to CAS, otherwise |
| # it will be None. |
| cas_digest = upload_core( |
| api, |
| sdk_name=props.sdk_name, |
| gcs_archive_bucket=props.gcs_bucket, |
| gcs_archive_path=gcs_archive_path, |
| cipd_pkg_name=cipd_pkg_name, |
| archive_path=full_archive_path, |
| cipd_metadata=cipd_metadata, |
| update_refs=update_refs, |
| sdk_id=sdk_id, |
| revision=revision, |
| is_release_version=props.is_release_version, |
| checkout_root=checkout.root_dir, |
| ) |
| package_overrides[cipd_pkg_name] = cas_digest |
| |
| # Publish GN SDK. |
| if not props.skip_gn_sdk: |
| with api.step.nest("publish gn sdk"): |
| # Upload SDK dir to CIPD and tarball to GCS. |
| gcs_path = f"{sdk_gcs_prefix}/{api.platform.name}-amd64/gn.tar.gz" |
| api.gsutil.upload( |
| bucket=props.gcs_bucket, |
| src=gn_sdk_archive, |
| dst=gcs_path, |
| name=f"upload gn fuchsia-sdk {sdk_id}", |
| # Publicly available. |
| unauthenticated_url=True, |
| ) |
| |
| # Upload GN SDK CIPD. |
| if props.is_release_version: |
| api.cipd_util.upload_package( |
| "%s/sdk/gn/${platform}" % props.cipd_root, |
| gn_sdk_dir, |
| search_tag={"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| metadata=cipd_metadata, |
| ) |
| |
| # Download the Bazel SDK from CAS. |
| bazel_sdk_dir = checkout.root_dir.join("bazel_artifacts") |
| fetch_sdk_artifacts( |
| api, bazel_sdk_dir, sdk_builds, BAZEL_SDK_OUTPUT_KEY, "bazel sdk" |
| ) |
| |
| merged_bazel_sdk = merge_bazel_sdk( |
| api, checkout.root_dir, bazel_sdk_dir, gn_results.tool("buildifier") |
| ) |
| with api.step.nest("publish bazel sdk"): |
| cipd_pkg_name = "%s/sdk/%s/fuchsia-bazel-rules/${platform}" % ( |
| props.cipd_root, |
| props.sdk_name, |
| ) |
| if props.is_release_version: |
| api.cipd_util.upload_package( |
| cipd_pkg_name, |
| merged_bazel_sdk, |
| search_tag={"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| metadata=cipd_metadata, |
| ) |
| else: |
| package_overrides[cipd_pkg_name] = api.cas_util.upload(merged_bazel_sdk) |
| |
| if ( |
| props.external_testing.gerrit_host |
| or props.sdk_override_configs.use_gclient_variables |
| ): |
| with api.step.nest("run external tests") as presentation: |
| run_external_tests( |
| api, |
| sdk_id=sdk_id, |
| bucket=props.gcs_bucket, |
| sdk_gcs_prefix=sdk_gcs_prefix, |
| gcs_archive_path=gcs_archive_path, |
| product_bundles_v1_path=product_bundles_v1_path, |
| product_bundles_v2_path=product_bundles_v2_path, |
| package_overrides=package_overrides, |
| sdk_override_configs=props.sdk_override_configs, |
| external_testing=props.external_testing, |
| ) |
| # TODO(atyfto): Currently we trigger and collect Gerrit-based external tests |
| # and TAP tests serially, which is not efficient if both are specified by |
| # the proto. If we ever have builders which want to run both, we should |
| # frontload both triggers ahead of their respective collects. |
| if props.fxt_options.tap_projects or props.fxt_options.guitar_config: |
| api.fxt.orchestrate_fxt_tests( |
| bucket=props.gcs_bucket, |
| namespace=sdk_id, |
| options=props.fxt_options, |
| ) |
| |
| |
| def merge_archives(api, checkout_dir, sdk_builds, run_python): |
| """Merge the SDK archives for each target into a single archive.""" |
| merge_path = checkout_dir.join("scripts", "sdk", "merger", "merge.py") |
| full_archive_path = api.path["cleanup"].join("merged_sdk_archive.tar.gz") |
| |
| sdk_archives_dir = checkout_dir.join("archives") |
| sdk_archives_paths = api.file.glob_paths( |
| "get sdk paths", |
| sdk_archives_dir, |
| "*/*.tar.gz", |
| test_data=[f"{b}/core.tar.gz" for b in sdk_builds], |
| ) |
| |
| def merge_pair(first, second): |
| output = api.path.mkstemp("merge") |
| run_python( |
| f"merge {api.path.basename(first)} and {api.path.basename(second)}", |
| [ |
| merge_path, |
| "--first-archive", |
| first, |
| "--second-archive", |
| second, |
| "--output-archive", |
| output, |
| ], |
| ) |
| return output |
| |
| # Concurrently merge pairs of archives until we're left with a single |
| # archive. |
| stack = sdk_archives_paths |
| futures = [] |
| while len(stack) > 1: |
| # Create Futures out of pairs in the stack. |
| while len(stack) > 1: |
| futures.append(api.futures.spawn(merge_pair, stack.pop(), stack.pop())) |
| # Perform merge pair operations and add results back to the stack as |
| # soon as the stack can grow to length-2, so that more Future(s) can be |
| # spawned. |
| done = api.futures.wait(futures, count=2 - len(stack)) |
| for f in done: |
| futures.remove(f) |
| stack.append(f.result()) |
| |
| api.file.move( |
| "create merged_sdk_archive.tar.gz", |
| stack[0], |
| full_archive_path, |
| ) |
| |
| return full_archive_path |
| |
| |
| def merge_bazel_sdk(api, checkout_dir, bazel_sdk_dir, buildifier): |
| """Merge the Bazel SDK subdirs for each target into one directory.""" |
| |
| merged_bazel_sdk = bazel_sdk_dir.join("bazel_sdk") |
| |
| api.step( |
| "collect architecture-specific variants of bazel sdk to merged directory", |
| cmd=[ |
| "python3", |
| checkout_dir.join("scripts", "sdk", "bazel", "merge_bazel_wrapper.py"), |
| "--source-dir", |
| bazel_sdk_dir, |
| "--dest-dir", |
| merged_bazel_sdk, |
| "--buildifier-path", |
| buildifier, |
| ], |
| ) |
| return merged_bazel_sdk |
| |
| |
| def upload_core( |
| api, |
| sdk_name, |
| gcs_archive_bucket, |
| gcs_archive_path, |
| cipd_pkg_name, |
| archive_path, |
| cipd_metadata, |
| update_refs, |
| revision, |
| sdk_id, |
| is_release_version, |
| checkout_root, |
| ): |
| sdk_dir = api.path["cleanup"].join(sdk_name) |
| |
| # Extract the archive to a directory for CIPD processing. |
| with api.step.nest("extract " + sdk_name): |
| api.file.ensure_directory("create sdk dir", sdk_dir) |
| api.archive.extract( |
| step_name="unpack sdk archive", |
| archive_file=archive_path, |
| output=sdk_dir, |
| ) |
| with api.step.nest("upload " + sdk_name): |
| api.gsutil.upload( |
| bucket=gcs_archive_bucket, |
| src=archive_path, |
| dst=gcs_archive_path, |
| link_name="archive", |
| name=f"upload {sdk_name} fuchsia-sdk {sdk_id}", |
| # Publicly available. |
| unauthenticated_url=True, |
| ) |
| |
| # Note that this will upload the snapshot to a location different from the |
| # path that api.fuchsia copied it to. This uses a path based on the hash of |
| # the SDK artifact, not based on the hash of the snapshot itself. Clients |
| # can use this to find the snapshot used to build a specific SDK artifact. |
| snapshot_file = api.path["cleanup"].join("jiri.snapshot") |
| with api.context(cwd=checkout_root): |
| api.jiri.snapshot(snapshot_file) |
| api.gsutil.upload( |
| bucket="fuchsia-snapshots", |
| src=snapshot_file, |
| dst=sdk_id, |
| link_name="jiri.snapshot", |
| name="upload jiri.snapshot", |
| ) |
| |
| # Record the sdk_id of the most recently uploaded archive for downstream |
| # autorollers, except for the experimental SDK. |
| # TODO(fxbug.dev/117322): Delete this check for the name when the |
| # experimental SDK is deprecated or the update ref files are split |
| # out per version. This check prevents users of the core SDK from |
| # attempting to get the wrong version if the experimental SDK builds |
| # but the core SDK does not. |
| # Experimental SDK users access it via CIPD and don't use this file. |
| if sdk_name != "experimental": |
| for update_ref in update_refs: |
| upper_update_ref = update_ref.upper() |
| api.gsutil.upload( |
| bucket=gcs_archive_bucket, |
| src=api.raw_io.input(sdk_id), |
| dst=f"development/{upper_update_ref}_{api.platform.name.upper()}", |
| link_name=upper_update_ref, |
| name=f"upload {update_ref} sdk_id", |
| ) |
| |
| # Upload to CIPD for releases, otherwise upload to CAS such that it may |
| # be used by downstream presubmits. |
| if is_release_version: |
| api.cipd_util.upload_package( |
| cipd_pkg_name, |
| sdk_dir, |
| search_tag={"git_revision": revision}, |
| repository=None, |
| refs=update_refs, |
| metadata=cipd_metadata, |
| # The default timeout (5 mins) is sometime not sufficient. See |
| # crbug.com/1404130 for more details. |
| verification_timeout="10m", |
| ) |
| return None |
| return api.cas_util.upload(sdk_dir) |
| |
| |
| def GenTests(api): |
| def add_hash_property(build): |
| build.output.properties[SDK_ARCHIVE_OUTPUT_KEY] = "###HASH###" |
| build.output.properties[BAZEL_SDK_OUTPUT_KEY] = "###HASH###" |
| return build |
| |
| def properties(**kwargs): |
| props = { |
| "gcs_bucket": "fuchsia", |
| "sdk_name": "core", |
| "cipd_root": "fuchsia", |
| "is_release_version": False, |
| "subbuild_collect_timeout_secs": 3600, |
| "fint_params_path": "fint_params/sdk.textproto", |
| "manifest": "flower", |
| "remote": "https://fuchsia.googlesource.com/integration", |
| "revision": api.jiri.example_revision, |
| "sdk_subbuilders": [ |
| "builder-arm64-build_only", |
| "builder-x64-build_only", |
| ], |
| } |
| props.update(kwargs) |
| return api.properties(**props) |
| |
| ci_subbuilds = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| ci_subbuilds_infra_failure = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="INFRA_FAILURE", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="INFRA_FAILURE", |
| ) |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| ci_subbuilds_failure = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="FAILURE", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.ci_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="FAILURE", |
| ), |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| cq_subbuilds = api.subbuild.child_build_steps( |
| builds=[ |
| add_hash_property( |
| api.subbuild.try_build_message( |
| build_id=123, |
| builder="sdk-core-linux-arm64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| add_hash_property( |
| api.subbuild.try_build_message( |
| build_id=456, |
| builder="sdk-core-linux-x64-build_only", |
| status="SUCCESS", |
| ) |
| ), |
| ], |
| launch_step="launch SDK subbuilds", |
| collect_step="collect SDK subbuilds", |
| ) |
| |
| image_build = api.subbuild.ci_build_message( |
| builder="###SDK_IMAGE_BUILDER###", status="SUCCESS", build_id=123456789 |
| ) |
| image_build.output.properties["artifact_gcs_bucket"] = "fuchsia-artifacts" |
| image_build.output.properties["gcs_bucket"] = "###BUCKET###" |
| image_build.output.properties["cas_digests"] = {"fuchsia/tools/foo": "abc123/50"} |
| other_image_build = api.subbuild.ci_build_message( |
| builder="###OTHER_SDK_IMAGE_BUILDER###", status="SUCCESS", build_id=9876543210 |
| ) |
| other_image_build.output.properties["artifact_gcs_bucket"] = "fuchsia-artifacts" |
| other_image_build.output.properties["gcs_bucket"] = "###BUCKET###" |
| |
| ci_image_builds = api.subbuild.child_build_steps( |
| builds=[image_build, other_image_build], |
| launch_step="launch image subbuilds", |
| collect_step="collect image subbuilds", |
| ) |
| |
| describe = api.release.ref_to_release_version("releases/0.20191018.0.1") |
| ref_settings = [ |
| {"branch": "refs/heads/main", "update_ref": "latest"}, |
| ] |
| release_versions = api.step_data( |
| "get release versions on h3ll0", |
| api.raw_io.stream_output_text( |
| "\n".join( |
| [ |
| "releases/0.20191019.0.1", |
| "releases/0.20191018.0.1", |
| ] |
| ) |
| ), |
| ) |
| no_release_versions = api.step_data( |
| "get release versions on h3ll0", api.raw_io.stream_output_text("") |
| ) |
| |
| companion_images = [ |
| { |
| "name": "###SDK_IMAGE###", |
| "builder": "###SDK_IMAGE_BUILDER###", |
| }, |
| { |
| "name": "###OTHER_SDK_IMAGE###", |
| "builder": "###OTHER_SDK_IMAGE_BUILDER###", |
| }, |
| ] |
| |
| yield ( |
| api.buildbucket_util.test("cq_with_attributes", tryjob=True) |
| + properties(attributes=["foo"]) |
| + cq_subbuilds |
| ) |
| yield ( |
| api.buildbucket_util.test("fxt_tests") |
| + properties( |
| fxt_options=api.fxt.Options( |
| sdk_mode=True, |
| tap_projects=["foo"], |
| guitar_config=jsonpb.ParseDict( |
| {"projectfoo": "bar"}, struct_pb2.Struct() |
| ), |
| use_staging_host=False, |
| timeout_secs=60 * 60, |
| ), |
| ) |
| + ci_subbuilds |
| + api.fxt.orchestrate_fxt_tests() |
| ) |
| yield ( |
| api.buildbucket_util.test("external_testing") |
| + properties( |
| ref_settings=ref_settings, |
| sdk_override_configs=InputProperties.SDKOverrideConfigs( |
| override_config_file="build/fuchsia/sdk_override.txt", |
| ), |
| external_testing=api.presubmit_util.Options( |
| gerrit_host="chromium-review.googlesource.com", |
| gerrit_project="chromium/src", |
| timeout_secs=7200, |
| tryjobs=["chromium/try/fuchsia_x64"], |
| tryjobs_wait_secs=180, |
| ), |
| ) |
| + ci_subbuilds |
| + api.presubmit_util.create_cl( |
| "run external tests.create CL", |
| test_data={ |
| "number": 123456, |
| "current_revision": "foo", |
| "revisions": {"foo": {"number": 1}}, |
| }, |
| ) |
| + api.presubmit_util.collect_tryjobs( |
| "run external tests.collect tryjobs", builders=["chromium/try/fuchsia_x64"] |
| ) |
| ) |
| yield ( |
| api.buildbucket_util.test("skip_testing_and_gn_sdk") |
| + properties(skip_gn_sdk=True) |
| + ci_subbuilds |
| ) |
| tryjob_msgs = [ |
| api.buildbucket.try_build_message( |
| project="chromium", |
| bucket="try", |
| builder="fuchsia_x64", |
| ), |
| ] |
| yield ( |
| api.buildbucket_util.test("explicit_tryjobs") |
| + properties( |
| ref_settings=ref_settings, |
| sdk_override_configs=InputProperties.SDKOverrideConfigs( |
| bucket_filepath="build/fuchsia/sdk-bucket.txt", |
| override_config_file="build/fuchsia/sdk_override.txt", |
| version_filepath="build/fuchsia/sdk.version", |
| version_list_filepath="build/fuchsia/sdk.list", |
| version_list_contents="sdk.version", |
| ), |
| external_testing=api.presubmit_util.Options( |
| gerrit_host="chromium-review.googlesource.com", |
| gerrit_project="chromium/src", |
| timeout_secs=7200, |
| tryjobs=["chromium/try/fuchsia_x64"], |
| trigger_tryjobs=True, |
| tryjobs_wait_secs=180, |
| ), |
| ) |
| + ci_subbuilds |
| + api.presubmit_util.create_cl( |
| "run external tests.create CL", |
| test_data={ |
| "number": 123456, |
| "current_revision": "foo", |
| "revisions": {"foo": {"number": 1}}, |
| }, |
| ) |
| + api.presubmit_util.trigger_tryjobs( |
| "run external tests.trigger tryjobs", tryjob_msgs=tryjob_msgs |
| ) |
| + api.presubmit_util.collect_tryjobs( |
| "run external tests.collect tryjobs", tryjob_msgs=tryjob_msgs |
| ) |
| ) |
| yield ( |
| api.buildbucket_util.test("explicit_tryjobs_without_cl") |
| + properties( |
| ref_settings=ref_settings, |
| sdk_override_configs=InputProperties.SDKOverrideConfigs( |
| use_gclient_variables=True, |
| use_package_overrides=False, |
| ), |
| external_testing=api.presubmit_util.Options( |
| timeout_secs=7200, |
| tryjobs=["chromium/try/fuchsia_x64"], |
| trigger_tryjobs=True, |
| ), |
| companion_images=companion_images, |
| ) |
| + ci_subbuilds |
| + ci_image_builds |
| + api.presubmit_util.trigger_tryjobs( |
| "run external tests.trigger tryjobs", tryjob_msgs=tryjob_msgs |
| ) |
| + api.presubmit_util.collect_tryjobs( |
| "run external tests.collect tryjobs", tryjob_msgs=tryjob_msgs |
| ) |
| ) |
| yield ( |
| api.buildbucket_util.test("package_overrides") |
| + properties( |
| ref_settings=ref_settings, |
| sdk_override_configs=InputProperties.SDKOverrideConfigs( |
| use_package_overrides=True, |
| ), |
| companion_images=companion_images, |
| external_testing=api.presubmit_util.Options( |
| gerrit_host="chromium-review.googlesource.com", |
| gerrit_project="chromium/src", |
| tryjobs=["chromium/try/fuchsia_x64"], |
| ), |
| ) |
| + ci_subbuilds |
| + ci_image_builds |
| + api.presubmit_util.create_cl( |
| "run external tests.create CL", |
| test_data={ |
| "number": 123456, |
| "current_revision": "foo", |
| "revisions": {"foo": {"number": 1}}, |
| }, |
| ) |
| + api.presubmit_util.collect_tryjobs( |
| "run external tests.collect tryjobs", builders=["chromium/try/fuchsia_x64"] |
| ) |
| ) |
| yield ( |
| api.buildbucket_util.test("release_ci", git_ref="refs/heads/release") |
| + properties( |
| is_release_version=True, |
| companion_images=companion_images, |
| relocate_image_archives=True, |
| ref_settings=ref_settings, |
| ) |
| + ci_subbuilds |
| + ci_image_builds |
| + describe |
| + release_versions |
| ) |
| yield ( |
| api.buildbucket_util.test( |
| "release_ci_no_update_ref", git_ref="refs/heads/release" |
| ) |
| + properties( |
| is_release_version=True, |
| companion_images=companion_images, |
| relocate_image_archives=True, |
| ref_settings=ref_settings, |
| ) |
| + ci_subbuilds |
| + ci_image_builds |
| + describe |
| + no_release_versions |
| ) |
| yield ( |
| api.buildbucket_util.test("subbuild_build_failure", status="FAILURE") |
| + properties() |
| + ci_subbuilds_failure |
| ) |
| yield ( |
| api.buildbucket_util.test("subbuild_infra_failure", status="INFRA_FAILURE") |
| + properties() |
| + ci_subbuilds_infra_failure |
| ) |