| # Copyright 2021 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| """Recipe for triggering clang prod builds.""" |
| |
| from collections import OrderedDict |
| from urllib.parse import urlparse |
| |
| from google.protobuf import timestamp_pb2 |
| from PB.go.chromium.org.luci.buildbucket.proto import ( |
| builder_common as builder_common_pb2, |
| builds_service as builds_service_pb2, |
| common as common_pb2, |
| ) |
| from PB.recipes.fuchsia.contrib.clang_trigger import InputProperties |
| |
| |
| DEPS = [ |
| "fuchsia/buildbucket_util", |
| "fuchsia/gitiles", |
| "recipe_engine/buildbucket", |
| "recipe_engine/json", |
| "recipe_engine/properties", |
| "recipe_engine/step", |
| "recipe_engine/swarming", |
| "recipe_engine/time", |
| ] |
| |
| PROPERTIES = InputProperties |
| _SECONDS_PER_DAY = 24 * 3600 |
| SHALLOW_DEPTH = 256 |
| |
| |
| def RunSteps(api, props): |
| # Read repo commit history. |
| parsed = urlparse(props.repo) |
| git_host, git_project = parsed.hostname, parsed.path.strip("/") |
| if not props.repo: |
| gitiles_commit = api.buildbucket.build_input.gitiles_commit |
| git_host = gitiles_commit.host |
| git_project = gitiles_commit.project |
| repo_url = "https://%s/%s" % (git_host, git_project) |
| output = api.gitiles.log( |
| url=repo_url, |
| treeish="refs/heads/main", |
| limit=256, |
| step_name="read commit history", |
| ) |
| commit_history = [entry["id"] for entry in output] |
| api.step.empty("commit history of %s" % repo_url).presentation.logs[ |
| "stdout" |
| ] = commit_history |
| # Query the build results. |
| now = int(api.time.time()) |
| # Set time range to past 24h. |
| time_range = common_pb2.TimeRange( |
| start_time=timestamp_pb2.Timestamp(seconds=now - _SECONDS_PER_DAY), |
| end_time=timestamp_pb2.Timestamp(seconds=now), |
| ) |
| green_commits_map = {} |
| with api.step.nest("lookup build history"): |
| build_predicate_list = [] |
| for builder in props.input_builders: |
| builder_id = builder_common_pb2.BuilderID( |
| project=api.buildbucket.build.builder.project, |
| bucket=props.input_builder_bucket, |
| builder=builder, |
| ) |
| build_predicate = builds_service_pb2.BuildPredicate( |
| builder=builder_id, |
| create_time=time_range, |
| include_experimental=True, |
| ) |
| build_predicate_list.append(build_predicate) |
| builds = api.buildbucket.search( |
| predicate=build_predicate_list, step_name="lookup existing builds" |
| ) |
| commit_status_map = parse_builds(api, builds) |
| green_commits_map = find_green_commits(api, commit_history, commit_status_map) |
| |
| candidate_commit = "" |
| for commit in commit_history: |
| green_on_all_builders = all(commit in gc for gc in green_commits_map.values()) |
| if green_on_all_builders: |
| candidate_commit = commit |
| break |
| if not candidate_commit: |
| raise api.step.StepFailure( |
| "candidate_commit cannot be found" |
| ) # pragma: no cover |
| # TODO(haowei): Change it to the trigger builder once this change |
| # lands. |
| prod_builds = api.buildbucket.search( |
| predicate=[ |
| builds_service_pb2.BuildPredicate( |
| builder=builder_common_pb2.BuilderID( |
| project=api.buildbucket.build.builder.project, |
| bucket=props.bucket, |
| builder=builder, |
| ), |
| create_time=time_range, |
| include_experimental=True, |
| ) |
| for builder in props.triggers |
| ], |
| step_name="check scheduled tasks", |
| ) |
| already_scheduled = candidate_commit in { |
| b.input.gitiles_commit.id for b in prod_builds |
| } |
| if already_scheduled: |
| api.step.empty("candidate commit %s was already scheduled" % candidate_commit) |
| return |
| # Schedule the build. |
| api.step.empty("candidate commit %s" % candidate_commit) |
| # Trigger prod builders. |
| # TODO(haowei): Consider inherit fields except "id" from |
| # parent after verifying the pipeline works. |
| gitiles_commit = common_pb2.GitilesCommit( |
| host=git_host, |
| project=git_project, |
| id=candidate_commit, |
| ref="refs/heads/main", |
| ) |
| |
| build_results = api.buildbucket.run( |
| schedule_build_requests=[ |
| api.buildbucket.schedule_request( |
| builder, |
| bucket=props.bucket, |
| gitiles_commit=gitiles_commit, |
| swarming_parent_run_id=api.swarming.task_id, |
| inherit_buildsets=False, |
| tags=[ |
| common_pb2.StringPair( |
| key="buildset", |
| value="commit/gitiles/%s/%s/+/%s" |
| % (git_host, git_project, candidate_commit), |
| ), |
| ], |
| ) |
| for builder in props.triggers |
| ], |
| step_name="schedule builds", |
| collect_interval=props.interval_secs, |
| timeout=props.timeout_secs, |
| ) |
| |
| api.buildbucket_util.display_builds( |
| step_name="display builds", builds=build_results, raise_on_failure=True |
| ) |
| |
| |
| def find_green_commits(api, commit_history, commit_status_map): |
| # Use OrderedDict instead of set so we can have sorted commit hashes in the log. |
| retmap = {} |
| for builder in commit_status_map: |
| # Use it as a orderedset. |
| green_commits = OrderedDict() |
| status = False |
| for commit in commit_history: |
| if commit in commit_status_map[builder]: |
| status = commit_status_map[builder][commit] |
| if status: |
| green_commits[commit] = None |
| log = "\n".join(green_commits) |
| step_result = api.step.empty("green commits from %s" % builder) |
| step_result.presentation.logs["stdout"] = log |
| retmap[builder] = green_commits |
| return retmap |
| |
| |
| def parse_builds(api, builds): |
| retmap = {} |
| # builds are sorted from newest to oldest. |
| for build in builds: |
| if build.builder.builder not in retmap: |
| retmap[build.builder.builder] = OrderedDict() |
| retmap[build.builder.builder][build.input.gitiles_commit.id] = ( |
| build.status and build.status == common_pb2.Status.SUCCESS |
| ) |
| |
| for builder, results in retmap.items(): |
| log = "\n".join(results) |
| step_result = api.step.empty("commits from %s" % builder) |
| step_result.presentation.logs["stdout"] = log |
| return retmap |
| |
| |
| def gen_build_message( |
| api, id_offset, builder, revision, bucket="toolchain.ci", status="SUCCESS" |
| ): |
| return api.buildbucket.ci_build_message( |
| build_id=8945511751514863184 + id_offset, |
| project="fuchsia", |
| bucket=bucket, |
| builder=builder, |
| revision=revision, |
| status=status, |
| ) |
| |
| |
| def GenTests(api): |
| default_properties = api.properties( |
| input_builders=[ |
| "clang-linux-arm64", |
| "clang-linux-x64", |
| "clang-mac-x64", |
| "clang-windows-x64", |
| ], |
| input_builder_bucket="toolchain.ci", |
| triggers=["clang-prod-trigger"], |
| bucket="prod", |
| timeout_secs=3600, |
| ) |
| gitiles_json_test_data = [ |
| {"id": "6" * 40}, |
| {"id": "5" * 40}, |
| {"id": "4" * 40}, |
| {"id": "3" * 40}, |
| {"id": "2" * 40}, |
| {"id": "1" * 40}, |
| ] |
| |
| gitiles_step_data = api.step_data( |
| "read commit history", api.json.output(gitiles_json_test_data) |
| ) |
| mock_search_data = api.buildbucket.simulated_search_results( |
| step_name="lookup build history.lookup existing builds", |
| builds=[ |
| gen_build_message(api, 0, "clang-linux-x64", "1" * 40), |
| gen_build_message(api, 1, "clang-mac-x64", "2" * 40, status="FAILURE"), |
| gen_build_message(api, 2, "clang-windows-x64", "4" * 40), |
| gen_build_message(api, 3, "clang-linux-arm64", "5" * 40), |
| gen_build_message(api, 4, "clang-linux-x64", "3" * 40), |
| gen_build_message(api, 5, "clang-mac-x64", "4" * 40), |
| gen_build_message(api, 6, "clang-linux-x64", "5" * 40), |
| ], |
| ) |
| |
| mock_search_data_prod = api.buildbucket.simulated_search_results( |
| step_name="check scheduled tasks", |
| builds=[gen_build_message(api, 7, "clang-linux-x64", "4" * 40, bucket="prod")], |
| ) |
| yield api.buildbucket_util.test( |
| "default" |
| ) + default_properties + gitiles_step_data + mock_search_data |
| |
| yield api.buildbucket_util.test( |
| "already_scheduled" |
| ) + default_properties + gitiles_step_data + mock_search_data + mock_search_data_prod |