| # Copyright 2019 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for building Fuchsia and isolating build artifacts.""" |
| |
| from recipe_engine import post_process |
| |
| from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2 |
| from PB.infra.coverage import CoverageCollectionType |
| from PB.recipe_engine.result import RawResult |
| from PB.recipes.fuchsia.fuchsia.build import InputProperties |
| from PB.recipes.fuchsia.fuchsia.spec import Fuchsia |
| |
| # These represent the location of the CAS digest in the output of this |
| # recipe when building SDK archives. Must be kept in sync with sdk.py. |
| SDK_ARCHIVE_OUTPUT_KEY = "sdk_archive_cas_digest" |
| BAZEL_SDK_OUTPUT_KEY = "bazel_sdk_cas_digest" |
| # This represents a map of tool name to CAS digest for the tools provided in the |
| # cas_tools property. Must be kept in sync with sdk.py. |
| CAS_TOOLS_OUTPUT_KEY = "tools_cas_digests" |
| |
| DEPS = [ |
| "fuchsia/artifacts", |
| "fuchsia/autocorrelator", |
| "fuchsia/build", |
| "fuchsia/buildbucket_util", |
| "fuchsia/cas_util", |
| "fuchsia/checkout", |
| "fuchsia/cipd_util", |
| "fuchsia/fuchsia", |
| "fuchsia/gce", |
| "fuchsia/gerrit", |
| "fuchsia/git", |
| "fuchsia/recipe_testing", |
| "fuchsia/release", |
| "fuchsia/sso", |
| "fuchsia/swarming_retry", |
| "fuchsia/testing_requests", |
| "fuchsia/testsharder", |
| "recipe_engine/buildbucket", |
| "recipe_engine/context", |
| "recipe_engine/file", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/properties", |
| "recipe_engine/raw_io", |
| "recipe_engine/step", |
| "recipe_engine/swarming", |
| ] |
| |
| PROPERTIES = InputProperties |
| |
| |
| def RunSteps(api, props): |
| if props.parent_id: |
| # Present a link to make it easier to navigate to the parent's build |
| # results page. |
| with api.step.nest("parent build") as presentation: |
| presentation.links[props.parent_id] = parent_build_url(api, props.parent_id) |
| |
| spec = props.spec |
| api.fuchsia.setup(spec) |
| |
| bb_input = api.buildbucket.build.input |
| |
| if spec.build.run_tests and not props.parent_id: |
| raise api.step.empty( |
| "no parent_id", |
| step_text="subbuilds can only be triggered by parent builds", |
| status=api.step.INFRA_FAILURE, |
| ) |
| |
| checkout = api.checkout.from_spec( |
| spec.checkout, use_incremental_cache=spec.build.incremental |
| ) |
| |
| with api.step.nest("got_revision") as presentation: |
| # Set got_revision to the baseline integration revision for the binary-size |
| # Gerrit plugin. |
| # TODO(olivernewman): Move this into the checkout recipe module, as it's |
| # not specific to this recipe. |
| presentation.properties[api.checkout.GOT_REVISION_PROPERTY] = ( |
| checkout.integration_revision |
| ) |
| |
| # Load test modifiers before building so we can fail fast if the message is |
| # malformed, which avoids wasting a build. |
| multipliers = [] |
| if bb_input.gerrit_changes: |
| commit_msg = get_commit_msg(api, bb_input, checkout) |
| # Don't skip unaffected tests if collecting absolute coverage or if the |
| # commit message says to run all tests. |
| if ( |
| props.coverage_collection == CoverageCollectionType.FULL |
| ) or api.testsharder.should_run_all_tests(commit_msg): |
| spec.test.skip_unaffected_tests = False |
| # Don't multiply tests for coverage builders. |
| if not props.coverage_collection: |
| with api.step.nest("test multipliers") as presentation: |
| multipliers = api.testsharder.extract_multipliers(commit_msg) |
| presentation.logs["multipliers"] = api.json.dumps( |
| [m.render_to_jsonish() for m in multipliers], indent=2 |
| ) |
| |
| # Give SDK subbuilds their own namespaces for upload, so they do not clobber |
| # in the case of multiple subbuilds. |
| if not props.parent_id or spec.build.sdk_subbuild: |
| upload_namespace = api.buildbucket_util.id |
| else: |
| upload_namespace = props.parent_id |
| |
| if spec.artifact_gcs_bucket: |
| checkout.upload_results(spec.artifact_gcs_bucket, namespace=upload_namespace) |
| |
| repo_path = None |
| if props.perfcompare: |
| # Do input checks to catch problems before doing the build. |
| assert ( |
| bb_input.gerrit_changes |
| ), "perfcompare mode is for CQ builds only, not CI: no Gerrit changes found" |
| project = bb_input.gerrit_changes[0].project |
| with api.context(cwd=checkout.root_dir): |
| repo_path = api.path.abs_to_path(checkout.project(project)["path"]) |
| |
| try: |
| with api.autocorrelator.context( |
| ci_base_commit=checkout.integration_revision, ignore_skipped_build=True |
| ): |
| run_build_steps( |
| api, |
| props, |
| spec, |
| upload_namespace, |
| checkout, |
| multipliers, |
| without_cl=False, |
| ) |
| except api.step.StepFailure as exc: |
| if ( |
| spec.checkout.do_not_rebase_patch |
| and api.buildbucket_util.is_tryjob |
| and api.autocorrelator.AUTOCORRELATOR_HEADER not in exc.reason |
| ): |
| summary_markdown = ( |
| "This builder does not rebase on top of HEAD, which may have caused " |
| + "the following build failure. If it looks unrelated to your CL, " |
| + "try rebasing the CL and relaunching the builder.\n\n" |
| + exc.reason |
| ) |
| raise api.step.StepFailure(summary_markdown) |
| raise exc |
| |
| if props.perfcompare: |
| with api.step.nest("build without CL"): |
| # Unapply the topmost Git commit that was applied from Gerrit. If |
| # the CQ is testing a stack of multiple CLs from Gerrit, the other |
| # CLs are left applied. |
| # TODO(mseaborn): This does not handle cases where the CL changed |
| # Jiri manifest files or contained a patches.json file. |
| with api.context(cwd=repo_path): |
| api.git.raw_checkout( |
| step_name='git checkout of "without CL" revision', |
| ref="HEAD^", |
| ) |
| run_build_steps( |
| api, |
| props, |
| spec, |
| upload_namespace + "_without_cl", |
| checkout, |
| multipliers, |
| without_cl=True, |
| ) |
| |
| |
| def run_build_steps( |
| api, props, spec, upload_namespace, checkout, modifiers, without_cl |
| ): |
| if spec.build.upload_results: |
| assert spec.gcs_bucket, "gcs_bucket must be set if build.upload_results is" |
| |
| build_results = api.build.with_options( |
| checkout, |
| spec.build.fint_params_path, |
| allow_dirty=without_cl, |
| # Setting collect_coverage to true tells the build to use selective |
| # instrumentation if run on a gerrit change. Otherwise it will do a |
| # fully instrumented build. Thus, we only want to set it to true when |
| # collecting selective coverage and not when collecting full coverage. |
| collect_coverage=props.coverage_collection == CoverageCollectionType.SELECTIVE, |
| incremental=spec.build.incremental, |
| sdk_id=props.sdk_id, |
| artifact_gcs_bucket=spec.artifact_gcs_bucket, |
| timeout_secs=spec.build.timeout_secs or 90 * 60, |
| upload_namespace=upload_namespace, |
| run_all_tests=not spec.test.skip_unaffected_tests, |
| use_sandboxing=spec.build.enable_sandboxing, |
| ) |
| |
| had_input_multipliers = bool(modifiers) |
| |
| with api.step.nest("check if build skipped") as presentation: |
| presentation.properties["skipped_because_unaffected"] = not build_results |
| if not build_results: |
| return |
| # Used by ManagedOS. |
| presentation.properties["target_arch"] = build_results.set_metadata.target_arch |
| |
| # In perfcompare mode, we want each test to run a predictable number |
| # of times, regardless of the files being changed. |
| use_affected_tests = not props.perfcompare |
| |
| # For simplicity we pass them into testsharder.execute() even if use_affected_tests |
| # is False; initialize them here so we can do that. |
| affected_tests = [] |
| affected_tests_max_attempts = 0 |
| |
| if spec.build.upload_results: |
| assert spec.gcs_bucket |
| build_results.upload(gcs_bucket=spec.gcs_bucket, namespace=upload_namespace) |
| |
| if spec.build.run_tests and not without_cl: |
| affected_tests = build_results.affected_tests |
| no_work = build_results.no_work and spec.test.skip_unaffected_tests |
| # The orchestrator recipe checks this property to determine whether to |
| # skip testing. |
| with api.step.nest("record affected_tests_no_work") as presentation: |
| presentation.properties["affected_tests_no_work"] = no_work |
| if no_work: |
| return RawResult( |
| summary_markdown=( |
| "Testing will be skipped because the change did not affect the build graph." |
| ), |
| status=common_pb2.SUCCESS, |
| ) |
| max_attempts_per_test = 1 |
| if not spec.test.retry_task_on_test_failure: |
| max_attempts_per_test = ( |
| spec.test.max_attempts or api.swarming_retry.DEFAULT_MAX_ATTEMPTS |
| ) |
| affected_tests_max_attempts = max_attempts_per_test |
| if use_affected_tests or not spec.test.retry_task_on_test_failure: |
| # Add a default modifier to set max attempts for any other tests. |
| modifiers.append( |
| api.testsharder.TestModifier( |
| name="*", |
| total_runs=-1, |
| max_attempts=max_attempts_per_test, |
| ) |
| ) |
| |
| cas_digests = {} |
| with api.step.nest( |
| "publish assembly artifacts" |
| if spec.build.assembly_artifacts_cipd_package |
| else "validate assembly artifacts" |
| ) as presentation: |
| tree = api.cipd_util.hardlink_tree(api.path.mkdtemp("assembly-artifacts")) |
| # TODO(fxbug.dev/111162): Migrate to artifactory CIPD/CAS upload |
| # manifests. |
| for dest, source in build_results.cipd_assembly_artifacts( |
| # If we're going to upload to CIPD, then the manifests must have |
| # been built. Otherwise it's ok for them to not have been built, but |
| # if a manifest was built then all the files it references must also |
| # have been built. |
| missing_ok=not spec.build.assembly_artifacts_cipd_package, |
| ).items(): |
| tree.register_link(source, tree.root / dest) |
| # Unconditionally create links to validate that the assembly artifacts |
| # manifest contents are valid even if we're not going to upload to CIPD. |
| tree.create_links("create links") |
| if spec.build.assembly_artifacts_cipd_package: |
| publish_tree( |
| api, |
| checkout, |
| spec, |
| spec.build.assembly_artifacts_cipd_package, |
| tree.root, |
| cas_digests, |
| ) |
| |
| if spec.board_cipd_prefix: |
| for board_name, board_path in build_results.boards.items(): |
| with api.step.nest( |
| f"publish board to CIPD: {board_name}" |
| ) as presentation: |
| publish_tree( |
| api, |
| checkout, |
| spec, |
| "%s/%s" % (spec.board_cipd_prefix.rstrip("/"), board_name), |
| board_path, |
| cas_digests, |
| ) |
| |
| if spec.partitions_cipd_prefix: |
| for partitions_name, partitions_path in build_results.partitions.items(): |
| with api.step.nest( |
| f"publish partitions to CIPD: {partitions_name}" |
| ) as presentation: |
| publish_tree( |
| api, |
| checkout, |
| spec, |
| "%s/%s" |
| % (spec.partitions_cipd_prefix.rstrip("/"), partitions_path), |
| partitions_path, |
| cas_digests, |
| ) |
| |
| if cas_digests: |
| with api.step.nest("set cas_digests output property") as presentation: |
| presentation.properties["cas_digests"] = cas_digests |
| |
| # In SDK subbuild mode, upload SDK archive and ninja targets to CAS. |
| if spec.build.sdk_subbuild: |
| upload_paths = [] |
| for archive in build_results.gn_results.sdk_archives: |
| api.path.mock_add_paths(archive) |
| # TODO(fxbug.dev/92108): It will be safe to assume all archives are |
| # built after sdk builders are migrated to build the high-level sdk |
| # archives target. |
| if api.path.exists(archive): |
| upload_paths.append(archive) |
| if upload_paths: |
| sdk_archive_digest = api.cas_util.upload( |
| # Assumes all SDK archives are in the same directory. |
| api.path.dirname(upload_paths[0]), |
| upload_paths, |
| step_name="upload sdk archives to CAS", |
| ) |
| api.step.active_result.presentation.properties[SDK_ARCHIVE_OUTPUT_KEY] = ( |
| sdk_archive_digest |
| ) |
| |
| with api.step.nest("upload bazel sdk to cas"): |
| upload_dir = api.path.mkdtemp("bazel-sdk") / "upload-sdk" |
| api.path.mock_add_paths(build_results.bazel_sdk_path) |
| if api.path.exists(build_results.bazel_sdk_path): |
| # TODO(crbug.com/1216363): The cas CLI breaks when it encounters |
| # symlinks that point outside the directory being uploaded. |
| api.file.copytree( |
| f"materialize symlinks for {build_results.bazel_sdk_path}", |
| build_results.bazel_sdk_path, |
| upload_dir, |
| # Don't preserve symlinks, replace them with copies of their |
| # target files. |
| symlinks=False, |
| ) |
| |
| # Remove OWNERS files, which may be present because part of the |
| # Bazel SDK is a symlink back into the checkout. Including |
| # OWNERS files in the SDK would disrupt downstream consumers |
| # who also use OWNERS files. |
| api.file.rmglob("Remove OWNERS file", upload_dir, "**/OWNERS") |
| |
| bazel_sdk_digest = api.cas_util.upload(upload_dir) |
| api.step.active_result.presentation.properties[BAZEL_SDK_OUTPUT_KEY] = ( |
| bazel_sdk_digest |
| ) |
| |
| if props.cas_tools: |
| tools_cas_digests = {} |
| for tool in props.cas_tools: |
| tool_path = build_results.tool(tool) |
| tools_cas_digests[tool] = api.cas_util.upload( |
| api.path.dirname(tool_path), |
| [tool_path], |
| step_name=f"upload {tool} to CAS", |
| ) |
| api.step.active_result.presentation.properties[CAS_TOOLS_OUTPUT_KEY] = ( |
| tools_cas_digests |
| ) |
| |
| # Must be set before testing_requests.task_requests() is called. |
| api.artifacts.gcs_bucket = spec.artifact_gcs_bucket |
| api.artifacts.namespace = upload_namespace |
| |
| bb_input = api.buildbucket.build.input |
| |
| # If the user didn't specifically request multipliers, testsharder |
| # may still have produced them. In that case don't report anything. |
| should_report_multipliers = had_input_multipliers |
| if spec.build.run_tests: |
| if props.parent_id.isdigit(): |
| # Use parent build so that testing task requests refer to |
| # that build, which actually orchestrates testing. |
| buildbucket_build = api.buildbucket.get(int(props.parent_id)) |
| # If it's a try build, the parent build will not have its |
| # gitiles_commit populated (it's populated at runtime by |
| # recipe_wrapper, but that doesn't change the input values stored in |
| # Buildbucket). But the commit will have been passed through to the |
| # subbuild via the Buildbucket ScheduleBuild API, so we can get it |
| # from the current build. |
| buildbucket_build.input.gitiles_commit.CopyFrom(bb_input.gitiles_commit) |
| else: |
| # When the parent was launched by led, it's not possible to retrieve |
| # the parent build, so we fall back to using our own build. |
| # This is technically incorrect and any tests that rely on having |
| # correct buildbucket metadata may fail when run via led. Ideally |
| # we wouldn't have any tests that knew about buildbucket, but |
| # for now this is OK since none of those tests run in recipes CQ, |
| # which uses led to test recipes changes. |
| buildbucket_build = api.buildbucket.build |
| if not props.comment_led: |
| should_report_multipliers = False |
| |
| build_url = parent_build_url(api, props.parent_id) |
| |
| shards = api.testsharder.execute( |
| "create test shards", |
| testsharder_path=build_results.tool("testsharder"), |
| build_dir=build_results.build_dir, |
| max_shard_size=spec.test.max_shard_size, |
| target_test_count=spec.test.target_test_count, |
| target_duration_secs=spec.test.target_shard_duration_secs, |
| per_test_timeout_secs=spec.test.per_test_timeout_secs, |
| max_shards_per_env=spec.test.max_shards_per_env, |
| modifiers=modifiers, |
| tags=spec.build.environment_tags, |
| use_affected_tests=use_affected_tests, |
| affected_tests=affected_tests, |
| affected_tests_multiply_threshold=spec.test.affected_tests_multiply_threshold, |
| affected_tests_max_attempts=affected_tests_max_attempts, |
| # Only run affected tests when collecting incremental coverage. |
| affected_only=props.affected_tests_only and affected_tests, |
| product_bundle_name=build_results.product_bundle_name, |
| image_deps=spec.test.use_cas_for_images, |
| hermetic_deps=spec.test.use_cas, |
| pave=spec.test.pave, |
| disabled_device_types=spec.test.disabled_device_types, |
| skip_unaffected_tests=( |
| spec.test.skip_unaffected_tests |
| and use_affected_tests |
| # Recipe and integration changes can impact tests in ways that |
| # aren't encompassed by affected tests analysis, so we should |
| # run all tests on such changes. |
| and not checkout.contains_integration_patch |
| and not api.recipe_testing.enabled |
| ), |
| per_shard_package_repos=spec.test.per_shard_package_repos, |
| cache_test_packages=spec.test.cache_test_packages, |
| ) |
| |
| if bb_input.gerrit_changes and should_report_multipliers: |
| gerrit_change = bb_input.gerrit_changes[0] |
| report_multipliers(api, shards, gerrit_change, build_url) |
| |
| shards_to_run = [s for s in shards if not s.should_skip] |
| with api.step.nest("construct task requests"): |
| task_requests = api.testing_requests.task_requests( |
| shards_to_run, build_results, buildbucket_build, spec.test |
| ) |
| |
| orchestration_inputs = api.build.test_orchestration_inputs_from_build_results( |
| build_results, |
| task_requests, |
| shards, |
| include_generated_sources=any( |
| v in build_results.set_metadata.variants |
| for v in ["coverage", "coverage-rust", "coverage-cts", "profile"] |
| ), |
| ) |
| orchestration_inputs_digest = orchestration_inputs.upload() |
| dest_property = api.build.test_orchestration_inputs_property_name(without_cl) |
| api.step.empty("emit orchestration_inputs_hash").presentation.properties[ |
| dest_property |
| ] = orchestration_inputs_digest |
| |
| if spec.artifact_gcs_bucket: |
| build_results.upload_artifacts(sign_artifacts=spec.build.sign_artifacts) |
| if spec.test.test_on_gce and spec.build.run_tests: |
| api.gce.create_image( |
| spec.test.gce_mediator.endpoint, |
| spec.test.gce_mediator.cloud_project, |
| build_results.images, |
| api.buildbucket.backend_task_id_from_build(buildbucket_build), |
| spec.artifact_gcs_bucket, |
| upload_namespace, |
| ) |
| |
| # This property refers to the location of assembly manifest file of this |
| # build stored in GCS. It is used by size checker in future builds to |
| # compute size diff between the two builds. |
| if spec.build.report_binary_sizes: |
| with api.step.nest("set assembly_manifest_url property") as presentation: |
| presentation.properties["assembly_manifest_url"] = ( |
| api.artifacts.assembly_manifest_url() |
| ) |
| |
| if spec.build.report_binary_sizes: |
| build_results.report_binary_sizes() |
| if spec.build.report_binary_sizes and spec.build.size_diff_ci_bucket: |
| # Run this check as late as possible to increase the odds that a |
| # baseline CI build is found. |
| build_results.diff_product_size( |
| api.sso.sso_to_https(spec.checkout.remote), |
| checkout.integration_revision, |
| spec.build.size_diff_ci_bucket, |
| ci_builder=spec.build.size_diff_ci_builder, |
| ) |
| build_results.check_size_creep( |
| api.sso.sso_to_https(spec.checkout.remote), |
| checkout.integration_revision, |
| spec.build.size_diff_ci_bucket, |
| ci_builder=spec.build.size_diff_ci_builder, |
| gerrit_changes=bb_input.gerrit_changes, |
| size_creep_label=spec.build.size_creep_label, |
| ) |
| |
| # Run this last so that we still run a size diff even if the budgets have gone over. |
| if spec.build.check_size_budgets: |
| build_results.check_size_budgets() |
| |
| |
| def publish_tree(api, checkout, spec, pkg_name, root_dir, cas_digests): |
| # Upload to CIPD for releases, otherwise upload to CAS such that it |
| # may be used by downstream presubmits. |
| if spec.checkout.is_release_version: |
| integration_repo = checkout.root_dir / "integration" |
| assert checkout.release_version, "failed to resolve release version" |
| cipd_ref = resolve_cipd_ref_to_update( |
| api, |
| spec.checkout.remote, |
| integration_repo, |
| checkout.release_version, |
| ) |
| api.cipd_util.upload_package( |
| pkg_name=pkg_name, |
| pkg_root=root_dir, |
| refs=[cipd_ref] if cipd_ref else None, |
| metadata=[("version", str(checkout.release_version))], |
| search_tag={"git_revision": checkout.integration_revision}, |
| ) |
| else: |
| cas_digest = api.cas_util.upload(root_dir) |
| cas_digests[pkg_name] = cas_digest |
| |
| |
| def parent_build_url(api, parent_id): |
| if parent_id.isdigit(): |
| return f"https://ci.chromium.org/b/{parent_id}" |
| return f"https://ci.chromium.org/swarming/task/{api.swarming.task_id}?server={api.buildbucket.backend_hostname}" |
| |
| |
| def get_commit_msg(api, bb_input, checkout): |
| """Gets the commit message for a gerrit change from source info.""" |
| gerrit_change = bb_input.gerrit_changes[0] |
| change_remote = ( |
| f"https://{gerrit_change.host.replace('-review', '')}/{gerrit_change.project}" |
| ) |
| project_dir = None |
| for repo in checkout.source_info: |
| relpath = repo["relativePath"] |
| if api.sso.sso_to_https(repo["remote"]) == change_remote: |
| if relpath == ".": |
| project_dir = checkout.root_dir |
| else: |
| project_dir = checkout.root_dir / relpath |
| break |
| if not project_dir: |
| return "" |
| with api.context(cwd=project_dir): |
| commit_msg = api.git.get_commit_message(step_name="get commit msg") |
| return commit_msg |
| |
| |
| def report_multipliers(api, shards, gerrit_change, build_url): |
| has_multiplier_shards = False |
| for shard in shards: |
| # A multiplier shard will start with "multiplied:". |
| # TODO(fxb/51896): Remove dependency on shard name. |
| if shard.name.startswith("multiplied:"): |
| has_multiplier_shards = True |
| break |
| if has_multiplier_shards: |
| set_gerrit_comment( |
| api, |
| "report multiplier shards", |
| gerrit_change, |
| ( |
| f"A builder created multiplier shards. Click the following link for more details: {build_url}" |
| ), |
| tag="autogenerated:fuchsia-build", |
| ) |
| |
| |
| def set_gerrit_comment(api, step_name, gerrit_change, message, tag=None): |
| try: |
| api.gerrit.set_review( |
| step_name, |
| str(gerrit_change.change), |
| message=message, |
| test_data=api.json.test_api.output({}), |
| tag=tag, |
| ) |
| except api.step.StepFailure: |
| # Comment failures shouldn't fail the build. |
| pass |
| |
| |
| def resolve_cipd_ref_to_update(api, remote, integration_repo, release_version): |
| # TODO(fxbug.dev/99452): Configure the branch->ref mapping via properties, |
| # like the sdk recipe does. |
| canary_head = api.git.get_remote_branch_head( |
| api.sso.sso_to_https(remote), "refs/heads/releases/canary" |
| ) |
| with api.context(cwd=integration_repo): |
| api.git.fetch("origin", refspec=canary_head) |
| canary_release_versions = api.release.get_release_versions( |
| ref=canary_head, repo_path=integration_repo |
| ) |
| return "latest" if release_version in canary_release_versions else None |
| |
| |
| def GenTests(api): |
| def properties( |
| sdk_subbuild=False, |
| run_tests=True, |
| gcs_bucket=None, |
| pave=True, |
| catapult_dashboard_master=None, |
| catapult_dashboard_bot=None, |
| max_attempts_per_test=0, |
| retry_task_on_test_failure=False, |
| gce_mediator=None, |
| skip_unaffected_tests=False, |
| size_diff_ci_bucket=None, |
| size_creep_label=None, |
| test_on_gce=False, |
| build_timeout_secs=0, |
| # We rely on the buildbucket test API using this same |
| # ID for ci_build_message and the builds returned by get(). |
| parent_id=str(api.buildbucket.ci_build_message().id), |
| is_release_version=False, |
| do_not_rebase_patch=False, |
| **kwargs, |
| ): |
| test_spec = None |
| if run_tests: |
| test_spec = Fuchsia.Test( |
| max_shard_size=0, |
| target_test_count=0, |
| target_shard_duration_secs=10 * 60, |
| per_test_timeout_secs=5 * 60, |
| max_shards_per_env=8, |
| timeout_secs=30 * 60, |
| pool="fuchsia.tests", |
| swarming_expiration_timeout_secs=10 * 60, |
| swarming_io_timeout_secs=5 * 60, |
| swarming_grace_period_secs=30, |
| botanist_grace_period_secs=60, |
| default_service_account="service_account", |
| targets_serial=True, |
| test_on_gce=test_on_gce, |
| pave=pave, |
| catapult_dashboard_master=catapult_dashboard_master, |
| catapult_dashboard_bot=catapult_dashboard_bot, |
| max_attempts_per_test=max_attempts_per_test, |
| retry_task_on_test_failure=retry_task_on_test_failure, |
| gce_mediator=gce_mediator, |
| skip_unaffected_tests=skip_unaffected_tests, |
| enable_sandboxing=True, |
| ) |
| return api.properties( |
| parent_id=parent_id, |
| spec=Fuchsia( |
| checkout=Fuchsia.Checkout( |
| project="integration", |
| manifest="minimal", |
| remote="https://fuchsia.googlesource.com/manifest", |
| is_release_version=is_release_version, |
| do_not_rebase_patch=do_not_rebase_patch, |
| ), |
| build=Fuchsia.Build( |
| run_tests=run_tests, |
| sdk_subbuild=sdk_subbuild, |
| upload_results=bool(gcs_bucket), |
| timeout_secs=build_timeout_secs, |
| fint_params_path="fint_params/core.textproto", |
| report_binary_sizes=True, |
| check_size_budgets=True, |
| size_diff_ci_bucket=size_diff_ci_bucket, |
| size_creep_label=size_creep_label, |
| assembly_artifacts_cipd_package="fuchsia/assembly-inputs/core.x64", |
| enable_sandboxing=True, |
| ), |
| test=test_spec, |
| gcs_bucket=gcs_bucket, |
| artifact_gcs_bucket="fuchsia-infra-artifacts", |
| board_cipd_prefix="fuchsia/assembly/boards", |
| partitions_cipd_prefix="fuchsia/assembly/partitions", |
| ), |
| **kwargs, |
| ) |
| |
| def fint_set_metadata(**kwargs): |
| kwargs.setdefault("product", "products/core.gni") |
| kwargs.setdefault("board", "boards/x64.gni") |
| return dict(**kwargs) |
| |
| integration_remote = "https://fuchsia.googlesource.com/integration" |
| |
| def test(name, status="SUCCESS", tryjob=True, source_info=True, **kwargs): |
| ret = api.buildbucket_util.test(name, tryjob=tryjob, status=status, **kwargs) |
| if source_info: |
| ret += api.checkout.source_info( |
| [ |
| { |
| "name": "integration", |
| "remote": integration_remote, |
| "revision": "a491082dc1b632bbcd60ba3618d20b503c2de738", |
| "relativePath": "integration", |
| }, |
| { |
| "name": "fuchsia", |
| "remote": "https://fuchsia.googlesource.com/fuchsia", |
| "revision": "a491082dc1b632bbcd60ba3618d20b503c2de738", |
| "relativePath": ".", |
| }, |
| ] |
| ) |
| return ret |
| |
| yield ( |
| test("default", tryjob=False) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=True, |
| ) |
| + api.build.fint_set_artifacts(metadata=fint_set_metadata(variants=["profile"])) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| test("non_numeric_parent_id", tryjob=False) |
| + properties( |
| parent_id="not-a-number", gcs_bucket="fuchsia-infra", run_tests=True |
| ) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| test("subbuild_no_parent_id", status="INFRA_FAILURE", source_info=False) |
| + properties(run_tests=True, parent_id="") |
| ) |
| |
| # Test the case where the test spec includes fields that enable uploading |
| # to the Catapult performance dashboard. |
| yield ( |
| test("catapult_dashboard_upload_enabled", tryjob=False) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=True, |
| catapult_dashboard_master="example.fuchsia.global.ci", |
| catapult_dashboard_bot="example-core.x64-nuc", |
| ) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| test("default_cq", tryjob=True) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| size_diff_ci_bucket="ci", |
| size_creep_label="Size-Review", |
| run_tests=True, |
| max_attempts_per_test=5, |
| build_timeout_secs=120 * 60, |
| skip_unaffected_tests=True, |
| ) |
| + api.build.create_shards() |
| + api.step_data( |
| "check size creep.diff ci", |
| api.json.output( |
| { |
| "component_diffs": [ |
| { |
| "name": "componentA", |
| "baseline_size": 16, |
| "size": 32, |
| "size_diff": 16, |
| "budget": 48, |
| "creep_budget": 8, |
| "budget_exceeded": False, |
| "creep_budget_exceeded": True, |
| }, |
| ], |
| "creep_budget_exceeded": True, |
| "baseline_build_id": 123456, |
| } |
| ), |
| ) |
| + api.step_data( |
| "check size creep.get change details", |
| api.json.output( |
| { |
| "labels": { |
| "Size-Review": { |
| "approved": { |
| "email": "size-approver@google.com", |
| } |
| }, |
| }, |
| } |
| ), |
| ) |
| ) |
| |
| yield ( |
| test("collect_absolute_coverage", tryjob=True) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=True, |
| skip_unaffected_tests=True, |
| coverage_collection=CoverageCollectionType.FULL, |
| ) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| test("collect_selective_coverage", tryjob=True) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=True, |
| skip_unaffected_tests=True, |
| coverage_collection=CoverageCollectionType.SELECTIVE, |
| ) |
| + api.build.create_shards() |
| ) |
| |
| # Test that max_attempts_per_test is ignored if no affected tests are |
| # detected and default within-task attempts will be set to 1 if |
| # retry_task_on_test_failure is set to True. |
| yield ( |
| test("default_cq_no_affected_retry_task_on_test_failure", tryjob=True) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=True, |
| # This field will be ignored if no affected tests are detected. |
| max_attempts_per_test=5, |
| # This will cause default within-task attempts to be 1. |
| retry_task_on_test_failure=True, |
| ) |
| + api.build.fint_build_artifacts(affected_tests=[]) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| test("skip_if_unaffected", tryjob=True) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| skip_unaffected_tests=True, |
| ) |
| + api.build.fint_set_artifacts(skip_build=True, metadata=fint_set_metadata()) |
| ) |
| |
| yield ( |
| test("affected_tests_no_work", tryjob=True) |
| + properties(gcs_bucket="fuchsia-infra", skip_unaffected_tests=True) |
| + api.build.fint_build_artifacts(build_not_affected=True) |
| ) |
| |
| yield ( |
| test( |
| "default_multipliers", |
| tryjob=True, |
| # Values chosen to match source_info so that we trigger the test |
| # multipliers code path. |
| project="integration", |
| git_repo=integration_remote, |
| ) |
| + properties(run_tests=True, coverage_collection=CoverageCollectionType.NONE) |
| + api.build.create_shards(with_multipliers=True) |
| + api.step_data( |
| "get commit msg", |
| api.raw_io.stream_output_text("Foo\n\nMultiply: foo_tests: 123"), |
| ) |
| + api.step_data("report multiplier shards", retcode=1) |
| + api.post_process(post_process.MustRun, "upload test orchestration inputs") |
| ) |
| |
| yield ( |
| test( |
| "run_all_tests", |
| tryjob=True, |
| # Values chosen to match source_info so that we trigger the |
| # run-all-tests code path. |
| project="integration", |
| git_repo=integration_remote, |
| ) |
| + properties(run_tests=True) |
| + api.build.fint_set_artifacts( |
| skip_build=True, |
| metadata=fint_set_metadata(), |
| ) |
| + api.build.create_shards(with_multipliers=True) |
| + api.step_data( |
| "get commit msg", |
| api.raw_io.stream_output_text("Foo\n\nRun-All-Tests: True"), |
| ) |
| + api.post_process(post_process.MustRun, "upload test orchestration inputs") |
| ) |
| |
| yield ( |
| test("sdk", tryjob=False) |
| + properties( |
| run_tests=False, |
| sdk_subbuild=True, |
| sdk_id="sdk-id", |
| is_release_version=True, |
| cas_tools=["tool1", "tool2"], |
| ) |
| + api.release.ref_to_release_version( |
| "releases/0.20191018.0.1", nesting="checkout.resolve release version" |
| ) |
| + api.step_data( |
| "publish assembly artifacts.get release versions on h3ll0", |
| api.raw_io.stream_output_text( |
| "\n".join( |
| [ |
| "releases/0.20191018.0.1", |
| "releases/0.20191018.0.2", |
| ] |
| ) |
| ), |
| ) |
| + api.build.fint_build_artifacts() |
| ) |
| |
| yield ( |
| test( |
| "cq_perfcompare", |
| tryjob=True, |
| repo="third_party/example_repo", |
| ) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=True, |
| max_attempts_per_test=5, |
| perfcompare=True, |
| ) |
| + api.build.create_shards() |
| + api.build.create_shards(nesting="build without CL") |
| ) |
| |
| yield ( |
| test("test_on_gce", tryjob=False) |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=True, |
| gce_mediator=Fuchsia.Test.GCEMediator( |
| endpoint="gcem-endpoint", cloud_project="gcem-cloud-project" |
| ), |
| test_on_gce=True, |
| ) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| test("failed_build_cq", tryjob=True, status="FAILURE") |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=False, |
| do_not_rebase_patch=True, |
| **{"$fuchsia/autocorrelator": {"ci_bucket": "ci", "ci_builder": "builder"}}, |
| ) |
| + api.step_data("build.ninja", retcode=1) |
| + api.autocorrelator.check_try( |
| [{"build_id": "456", "score": 0.98, "is_green": False}] |
| ) |
| + api.autocorrelator.check_ci( |
| { |
| "build_id": "789", |
| "score": 0.96, |
| "is_green": False, |
| "commit_dist": 0, |
| } |
| ) |
| ) |
| |
| yield ( |
| test("failed_build_cq_no_rebase", tryjob=True, status="FAILURE") |
| + properties( |
| gcs_bucket="fuchsia-infra", |
| run_tests=False, |
| do_not_rebase_patch=True, |
| ) |
| + api.step_data("build.ninja", retcode=1) |
| ) |