| # Copyright 2019 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for building Fuchsia.""" |
| |
| from recipe_engine.config import List |
| from recipe_engine.recipe_api import Property |
| |
| from PB.recipes.fuchsia.fuchsia.spec import Fuchsia |
| |
| DEPS = [ |
| "fuchsia/artifacts", |
| "fuchsia/build", |
| "fuchsia/buildbucket_util", |
| "fuchsia/checkout", |
| "fuchsia/recipe_testing", |
| "fuchsia/testing_requests", |
| "fuchsia/testsharder", |
| "recipe_engine/buildbucket", |
| "recipe_engine/file", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/platform", |
| "recipe_engine/properties", |
| "recipe_engine/swarming", |
| ] |
| |
| PROPERTIES = { |
| "gcs_bucket": Property( |
| kind=str, |
| help="GCS bucket for uploading checkout, build, and test results", |
| default="###fuchsia-build###", |
| ), |
| "release_branch": Property( |
| kind=List(str), |
| help="The release branch corresponding to the checkout.", |
| default=None, |
| ), |
| "release_version": Property( |
| kind=str, help="The release version of the checkout.", default=None |
| ), |
| "extract_artifacts": Property( |
| kind=bool, help="Whether to extract the test input artifacts", default=False |
| ), |
| "sdk_id": Property(kind=str, help="sdk_id to set in GN", default="sdk-id"), |
| "incremental": Property( |
| kind=bool, help="Whether to build incrementally", default=False |
| ), |
| "size_diff_ci_bucket": Property( |
| kind=str, |
| help="CI bucket to use for size diff.", |
| default=None, |
| ), |
| } |
| |
| |
| def RunSteps( |
| api, |
| gcs_bucket, |
| release_branch, |
| release_version, |
| extract_artifacts, |
| sdk_id, |
| incremental, |
| size_diff_ci_bucket, |
| ): |
| source_info = [ |
| { |
| "name": "integration", |
| "remote": "https://fuchsia.googlesource.com/integration", |
| "revision": "a491082dc1b632bbcd60ba3618d20b503c2de738", |
| "relativePath": "integration", |
| }, |
| ] |
| |
| checkout_root = api.path.start_dir |
| checkout = api.checkout.CheckoutResults( |
| root_dir=checkout_root, |
| project="integration", |
| snapshot_file=checkout_root / "snapshot", |
| release_branch=release_branch, |
| release_version=release_version, |
| source_info=source_info, |
| ) |
| |
| build_results = api.build.with_options( |
| checkout, |
| fint_params_path="infra/specs/fint-params.textproto", |
| incremental=incremental, |
| sdk_id=sdk_id, |
| artifact_gcs_bucket=gcs_bucket, |
| upload_namespace="namespace", |
| ) |
| if not build_results: |
| assert not api.recipe_testing.enabled |
| return |
| |
| # These statements are just for test coverage, so don't lint them. |
| # pylint: disable=pointless-statement |
| build_results.boards |
| build_results.partitions |
| build_results.compdb_path |
| build_results.generated_sources |
| build_results.gn_results.sdk_archives |
| build_results.affected_tests |
| build_results.no_work |
| build_results.bazel_sdk_path |
| build_results.cts_artifacts |
| build_results.vnames_json_path |
| build_results.product_bundle_name |
| build_results.package_archive("foo", "x64") |
| try: |
| build_results.tool("does-not-exist", mock_for_tests=False) |
| except api.build.NoSuchTool: |
| pass |
| try: |
| build_results.package_archive("does-not-exist", "arm64") |
| except api.build.NoSuchPackageArchive: |
| pass |
| # pylint: enable=pointless-statement |
| |
| build_results.cipd_assembly_artifacts(missing_ok=False) |
| |
| build_results.report_binary_sizes() |
| build_results.check_size_budgets() |
| if size_diff_ci_bucket: |
| build_results.check_size_creep( |
| "https://fuchsia.googlesource.com/integration", |
| "basecommit", |
| size_diff_ci_bucket, |
| gerrit_changes=api.buildbucket.build.input.gerrit_changes, |
| size_creep_label="Size-Review", |
| ) |
| build_results.diff_product_size( |
| "https://fuchsia.googlesource.com/integration", |
| "basecommit", |
| size_diff_ci_bucket, |
| ) |
| |
| build_results.upload(gcs_bucket=gcs_bucket) |
| |
| # Run the testsharder to collect test specifications and shard them. |
| shards = api.testsharder.execute( |
| "create test shards", |
| testsharder_path=build_results.tool("testsharder"), |
| build_dir=build_results.build_dir, |
| tags=[], |
| product_bundle_name="core.x64", |
| ) |
| |
| if extract_artifacts: |
| # Extract and manipulate a TestOrchestrationInputs object for test coverage. |
| # artifacts has to be configured before calling task_requests(). |
| api.artifacts.gcs_bucket = gcs_bucket |
| api.artifacts.namespace = "namespace" |
| build_results.upload_artifacts(sign_artifacts=True) |
| task_requests = api.testing_requests.task_requests( |
| shards, |
| build_results, |
| api.buildbucket.build, |
| Fuchsia.Test( |
| pool="PLACEHOLDER.POOL", |
| pave=True, |
| swarming_grace_period_secs=30, |
| botanist_grace_period_secs=60, |
| ), |
| ) |
| test_orchestration_inputs = ( |
| api.build.test_orchestration_inputs_from_build_results( |
| build_results, |
| task_requests, |
| shards, |
| include_generated_sources=True, |
| ) |
| ) |
| test_orchestration_inputs.upload() |
| api.build.download_test_orchestration_inputs("") |
| api.build.test_orchestration_inputs_property_name(False) |
| api.build.test_orchestration_inputs_property_name(True) |
| |
| |
| def GenTests(api): |
| yield api.buildbucket_util.test("default") + api.build.create_shards() |
| |
| yield ( |
| api.buildbucket_util.test("default_cq", tryjob=True) |
| + api.build.create_shards(with_multipliers=True) |
| + api.properties(size_diff_ci_bucket="ci") |
| + api.step_data( |
| "check size creep.diff ci", |
| api.json.output( |
| { |
| "component_diffs": [ |
| { |
| "name": "componentA", |
| "baseline_size": 16, |
| "size": 32, |
| "size_diff": 16, |
| "budget": 48, |
| "creep_budget": 8, |
| "budget_exceeded": False, |
| "creep_budget_exceeded": True, |
| }, |
| ], |
| "creep_budget_exceeded": True, |
| "baseline_build_id": 123456, |
| } |
| ), |
| ) |
| + api.step_data( |
| "check size creep.get change details", |
| api.json.output( |
| { |
| "labels": { |
| "Size-Review": { |
| "approved": { |
| "email": "size-approver@google.com", |
| } |
| }, |
| }, |
| } |
| ), |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test( |
| "size_creep_label_not_approved", tryjob=True, status="FAILURE" |
| ) |
| + api.properties(size_diff_ci_bucket="ci") |
| + api.step_data( |
| "check size creep.diff ci", |
| api.json.output( |
| { |
| "component_diffs": [ |
| { |
| "name": "componentA", |
| "baseline_size": 16, |
| "size": 32, |
| "size_diff": 16, |
| "budget": 48, |
| "creep_budget": 8, |
| "budget_exceeded": False, |
| "creep_budget_exceeded": True, |
| }, |
| ], |
| "creep_budget_exceeded": True, |
| "baseline_build_id": 123456, |
| } |
| ), |
| ) |
| + api.step_data( |
| "check size creep.get change details", |
| api.json.output({"labels": {}}), |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("skip_if_unaffected", tryjob=True) |
| + api.build.fint_set_artifacts(skip_build=True) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test( |
| "no_skip_for_integration_change", |
| tryjob=True, |
| repo="integration", |
| ) |
| + api.build.fint_set_artifacts(skip_build=True) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("no_skip_if_recipe_testing_enabled", tryjob=True) |
| + api.properties(**{"$fuchsia/recipe_testing": {"enabled": True}}) |
| + api.build.fint_set_artifacts(skip_build=True) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| # A subbuild should use the duration file corresponding to its parent |
| # builder. |
| api.buildbucket_util.test( |
| "subbuild", repo="fuchsia", builder="builder-subbuild" |
| ) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("generate_skipped_shards") |
| + api.properties(extract_artifacts=True) |
| + api.build.create_shards(with_skipped=True) |
| + api.build.load_skipped_shards() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("mac__release_version") |
| + api.platform.name("mac") |
| + api.properties(release_version="0.19700101.0.77") |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("fint_set_failure", status="FAILURE") |
| + api.step_data("build.gn gen", retcode=1) |
| + api.build.fint_set_artifacts(failure_summary="gen failed") |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("fint_set_unrecognized_failure", status="FAILURE") |
| + api.step_data("build.gn gen", retcode=1) |
| + api.build.fint_set_artifacts(failure_summary="") |
| ) |
| |
| yield ( |
| api.buildbucket_util.test( |
| "fint_set_unexpected_failure_summary", status="FAILURE" |
| ) |
| + api.build.fint_set_artifacts(failure_summary="something failed") |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("fint_build_failure", status="FAILURE") |
| + api.step_data("build.ninja", retcode=1) |
| + api.build.fint_build_artifacts( |
| failure_summary="build failed", |
| debug_files=[ |
| { |
| "path": "[START_DIR]/foo.txt", |
| "upload_dest": "foo.txt", |
| } |
| ], |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("fint_build_timeout", status="FAILURE") |
| + api.step_data("build.ninja", times_out_after=5 * 60 * 60) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("fint_build_unrecognized_failure", status="FAILURE") |
| + api.step_data("build.ninja", retcode=1) |
| + api.build.fint_build_artifacts(failure_summary="") |
| ) |
| |
| yield ( |
| api.buildbucket_util.test( |
| "fint_build_unexpected_failure_summary", status="FAILURE" |
| ) |
| + api.build.fint_build_artifacts(failure_summary="something failed") |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("extract_artifacts") |
| + api.properties(extract_artifacts=True) |
| + api.testing_requests.task_requests_step_data( |
| [api.swarming.example_task_request_jsonish()], |
| "download test orchestration inputs.load task requests", |
| ) |
| + api.step_data( |
| "download test orchestration inputs.load triage sources", |
| api.file.read_json(["config.triage", "other/config.triage"]), |
| ) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("ninjatrace_fails") |
| + api.step_data("build.ninjatrace", retcode=1) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("no_fint_set_artifacts", status="FAILURE") |
| + api.step_data("build.gn gen", retcode=1) |
| + api.step_data("build.read fint set artifacts", api.file.errno("ENOENT")) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("no_fint_build_artifacts", status="FAILURE") |
| + api.step_data("build.ninja", retcode=1) |
| + api.step_data("build.read fint build artifacts", api.file.errno("ENOENT")) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("upload_buildstats_failure") |
| + api.step_data( |
| "build.upload fuchsia-buildstats.json to ###fuchsia-build###", retcode=1 |
| ) |
| + api.step_data( |
| "build.upload fuchsia-buildstats.json to ###fuchsia-build### (2)", retcode=1 |
| ) |
| + api.step_data( |
| "build.upload fuchsia-buildstats.json to ###fuchsia-build### (3)", retcode=1 |
| ) |
| + api.build.create_shards() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("incremental_build", status="FAILURE") |
| + api.properties(incremental=True) |
| # The incremental cache should be cleared if the build fails. |
| + api.step_data("build.ninja", retcode=1) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("duplicate_tool", status="FAILURE") |
| + api.step_data( |
| "build.read tool_paths manifest", |
| api.file.read_json( |
| [ |
| { |
| "name": "foo", |
| "cpu": "x64", |
| "os": "linux", |
| "path": "linux_x64/foo", |
| }, |
| { |
| "name": "foo", |
| "cpu": "x64", |
| "os": "linux", |
| "path": "linux_x64/other_foo", |
| }, |
| ] |
| ), |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("duplicate_package_archive", status="FAILURE") |
| + api.step_data( |
| "read package_archives manifest list", |
| api.file.read_json( |
| [ |
| "foo/bar.json", |
| "foo/baz.json", |
| ] |
| ), |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("no_goma") |
| + api.build.fint_set_artifacts(use_goma=False) |
| + api.build.create_shards() |
| ) |