| # Copyright 2020 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for processing code coverage. |
| |
| # Execution overview |
| |
| ## Get build results |
| |
| This recipe gets or triggers profile builders and collects the results. |
| |
| ## Checkout + Build |
| |
| It creates a checkout and builds the generated files so that these |
| can be uploaded with the coverage report to GCS. |
| |
| ## Process coverage |
| |
| It collects the test results from all the profile builders and generates a |
| coverage report based on the profile data. |
| """ |
| |
| from google.protobuf import json_format |
| |
| from recipe_engine.config import List |
| from recipe_engine.recipe_api import Property |
| |
| from PB.infra.fuchsia import Fuchsia |
| from PB.recipe_modules.recipe_engine.led.properties import ( |
| InputProperties as LedInputProperties, |
| ) |
| from RECIPE_MODULES.fuchsia.testing_requests import api as testing_requests_api |
| |
| DEPS = [ |
| "fuchsia/artifacts", |
| "fuchsia/build", |
| "fuchsia/build_input_resolver", |
| "fuchsia/buildbucket_util", |
| "fuchsia/checkout", |
| "fuchsia/fuchsia", |
| "fuchsia/gitiles", |
| "fuchsia/gsutil", |
| "fuchsia/spec", |
| "fuchsia/subbuild", |
| "fuchsia/symbolize", |
| "fuchsia/testing", |
| "recipe_engine/buildbucket", |
| "recipe_engine/file", |
| "recipe_engine/isolated", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/platform", |
| "recipe_engine/properties", |
| "recipe_engine/python", |
| "recipe_engine/step", |
| "recipe_engine/swarming", |
| ] |
| |
| PROPERTIES = { |
| "spec_remote": Property( |
| kind=str, |
| help="URL of the specs git repository", |
| default="https://fuchsia.googlesource.com/integration", |
| ), |
| "profile_builder_names": Property( |
| kind=List(str), |
| help="The list of builders to poll and process coverage on", |
| default=(), |
| ), |
| } |
| |
| # The property name for the list of test task ids run by a fuchsia builder. |
| TASK_IDS_PROPERTY = "test-swarming-task-ids" |
| |
| COVARGS_LOG_LEVEL = "debug" |
| COVARGS_OUTPUT_JSON = "covargs-output.json" |
| CODE_COVERAGE_BUCKET = "code-coverage-data" |
| CODE_COVERAGE_PATH = "{type}/{host}/{project}/{change}/{bucket}/{builder}/{id}/metadata" |
| COVERAGE_REPORT_NAME = "index.html" |
| |
| |
| def _get_output_property(build, property_name): |
| property_value = None |
| if property_name in build.output.properties: |
| property_value = build.output.properties[property_name] |
| return property_value |
| |
| |
| def _get_builds(api, builder_names, spec_revision=None): |
| # TODO(fxb/9847): Instead of launching all builds, get existing builds |
| # corresponding to same gerrit_change or gitiles_commit. |
| builders_to_launch = list(builder_names) |
| # launch builders_to_launch |
| with api.step.nest("launch builders") as presentation: |
| properties = {} |
| if spec_revision: |
| properties = { |
| "spec_revision": spec_revision, |
| } |
| builds = api.subbuild.launch( |
| builders_to_launch, presentation, extra_properties=properties |
| ) |
| return [build.build_id for build in builds.values()] |
| |
| |
| def _collect_builds(api, build_ids): |
| with api.step.nest("collect builds") as presentation: |
| builds = api.subbuild.collect(build_ids, presentation) |
| return [build.build_proto for build in builds.values()] |
| |
| |
| def process_coverage( |
| api, |
| covargs_path, |
| summary_files, |
| symbolizer_output_files, |
| debug_symbol_url, |
| llvm_profdata, |
| llvm_cov, |
| gcs_bucket, |
| base_dir, |
| ): |
| output_dir = api.path["cleanup"].join("coverage") |
| report_dir = api.path["cleanup"].join("metadata") |
| temp_dir = api.path.mkdtemp("covargs") |
| |
| cmd = [ |
| covargs_path, |
| "-level", |
| COVARGS_LOG_LEVEL, |
| "-json-output", |
| api.json.output( |
| name=COVARGS_OUTPUT_JSON, leak_to=temp_dir.join(COVARGS_OUTPUT_JSON) |
| ), |
| "-output-dir", |
| output_dir, |
| "-llvm-profdata", |
| llvm_profdata, |
| "-llvm-cov", |
| llvm_cov, |
| "-symbol-server", |
| debug_symbol_url, |
| "-symbol-cache", |
| api.path["cache"].join("symbol"), |
| # TODO(phosek): remove this flag when debugging phase is over. |
| "-save-temps", |
| temp_dir, |
| "-report-dir", |
| report_dir, |
| "-base", |
| base_dir or "/b/s/w/ir/x/w/fuchsia", |
| ] |
| |
| for summary_file in summary_files: |
| cmd.extend(["-summary", summary_file]) |
| for output_file in symbolizer_output_files: |
| cmd.extend(["-symbolize-dump", output_file]) |
| |
| try: |
| api.step("covargs", cmd) |
| finally: |
| # TODO(phosek): remove this flag when debugging phase is over. |
| result = api.step.active_result |
| isolated = api.isolated.isolated(temp_dir) |
| isolated.add_dir(temp_dir) |
| isolated_hash = isolated.archive("isolate") |
| result.presentation.properties["isolated"] = isolated_hash |
| |
| # Upload the coverage report to the Chromium coverage service. |
| gitiles_commit = api.buildbucket.build.input.gitiles_commit |
| if ( |
| not api.buildbucket.build.input.gerrit_changes |
| and gitiles_commit.host |
| and gitiles_commit.project |
| and gitiles_commit.id |
| ): |
| dst = CODE_COVERAGE_PATH.format( |
| type="postsubmit", |
| host=gitiles_commit.host, |
| project=gitiles_commit.project, |
| change=gitiles_commit.id, |
| bucket=api.buildbucket.build.builder.bucket, |
| builder=api.buildbucket.build.builder.builder, |
| id=api.buildbucket.build.id, |
| ) |
| step_result = api.gsutil.rsync( |
| name="upload report", |
| src=report_dir, |
| bucket=CODE_COVERAGE_BUCKET, |
| dst=dst, |
| recursive=True, |
| options={ |
| "parallel_process_count": api.platform.cpu_count, |
| "parallel_thread_count": 1, |
| }, |
| multithreaded=True, |
| ) |
| step_result.presentation.properties.update( |
| { |
| "coverage_metadata_gs_paths": [dst], |
| "mimic_builder_names": [api.buildbucket.build.builder.builder], |
| "coverage_gs_bucket": CODE_COVERAGE_BUCKET, |
| "coverage_is_presubmit": False, |
| } |
| ) |
| |
| # Upload the coverage report to our own bucket. |
| # TODO(ihuh): move this into gsutil module/deduplicate this with other GCS logic |
| dst = "builds/%s/coverage" % api.buildbucket_util.id |
| try: |
| api.gsutil.upload( |
| name="upload coverage", |
| src=output_dir, |
| bucket=gcs_bucket, |
| dst=dst, |
| recursive=True, |
| gzip_exts=["html"], |
| options={ |
| "parallel_process_count": api.platform.cpu_count, |
| "parallel_thread_count": 1, |
| }, |
| multithreaded=True, |
| no_clobber=True, |
| ) |
| finally: |
| coverage_report_step = api.step("coverage report", None) |
| link = api.gsutil._http_url( |
| gcs_bucket, api.gsutil.join(dst, COVERAGE_REPORT_NAME), True |
| ) |
| coverage_report_step.presentation.links[COVERAGE_REPORT_NAME] = link |
| |
| |
| def RunSteps(api, spec_remote, profile_builder_names): |
| spec, spec_revision = api.fuchsia.setup_with_spec(spec_remote) |
| |
| build_ids = _get_builds(api, profile_builder_names, spec_revision=spec_revision) |
| assert build_ids, "failed to get builds" |
| |
| checkout = api.checkout.from_spec(spec.checkout) |
| |
| builds = _collect_builds(api, build_ids) |
| # Get orchestration_inputs_hash from any build. The artifacts used from here |
| # are host tools that should be the same across all builders. |
| orchestration_inputs_hash = None |
| base_dir = "" |
| for build in builds: |
| orchestration_inputs_hash = _get_output_property( |
| build, api.build.TEST_ORCHESTRATION_INPUTS_HASH_PROPERTY |
| ) |
| base_dir = _get_output_property(build, api.checkout.ROOT_DIR_PROPERTY) |
| if orchestration_inputs_hash: |
| # Collect generated sources from all builds. |
| orchestration_inputs = api.build.download_test_orchestration_inputs( |
| orchestration_inputs_hash |
| ) |
| api.python( |
| "copy generated sources to checkout", |
| api.resource("copy_sources.py"), |
| [ |
| "--source_dir", |
| orchestration_inputs.generated_sources_root, |
| "--dest_dir", |
| checkout.root_dir, |
| ], |
| ) |
| |
| assert orchestration_inputs_hash |
| |
| # Configure context of uploaded artifacts to get debug_symbol url. |
| api.artifacts.gcs_bucket = spec.artifact_gcs_bucket |
| summary_files = [] |
| symbolizer_output_files = [] |
| for build in builds: |
| task_ids = _get_output_property(build, TASK_IDS_PROPERTY) |
| if not task_ids: |
| continue |
| # All tasks should have been completed, so there's no need to have a timeout. |
| results = api.swarming.collect( |
| "collect", list(task_ids), output_dir=api.path.mkdtemp("swarming"), |
| ) |
| for result in results: |
| if not result.success: |
| continue |
| with api.step.nest("process result for %s" % result.name) as presentation: |
| if api.testing.task_targets_fuchsia(result): |
| # TODO(crbug.com/1124935): We use the serial_log instead of the task |
| # output because the task output gets truncated. |
| if testing_requests_api.SERIAL_NAME not in result.outputs: |
| presentation.step_text = ( |
| "%s not found; skipping" % testing_requests_api.SERIAL_NAME |
| ) |
| continue |
| logfile_data = api.file.read_text( |
| "read %s" % testing_requests_api.SERIAL_NAME, |
| result.outputs[testing_requests_api.SERIAL_NAME], |
| test_data="log contents", |
| ) |
| symbolizer_json_output = api.path["cleanup"].join( |
| "%s-%s-%s" |
| % ( |
| build.builder.builder, |
| result.name, |
| api.symbolize.OUTPUT_JSON, |
| ) |
| ) |
| presentation.logs["symbolized log"] = api.symbolize( |
| symbolize_tool=orchestration_inputs.symbolize_tool, |
| debug_symbol_url=api.artifacts.debug_symbol_url(), |
| llvm_symbolizer=orchestration_inputs.llvm_symbolizer, |
| data=logfile_data, |
| json_output=symbolizer_json_output, |
| ) |
| symbolizer_output_files.append(symbolizer_json_output) |
| |
| results_dir = api.testing.results_dir_on_host.join(result.id) |
| api.testing.extract_test_results( |
| step_name="extract", |
| task_result=result, |
| directory=results_dir, |
| minfs_path=orchestration_inputs.minfs, |
| ) |
| summary_files.append(results_dir.join(api.testing.TEST_SUMMARY_JSON)) |
| |
| process_coverage( |
| api=api, |
| covargs_path=orchestration_inputs.covargs, |
| summary_files=summary_files, |
| symbolizer_output_files=symbolizer_output_files, |
| debug_symbol_url=api.artifacts.debug_symbol_url(), |
| llvm_profdata=orchestration_inputs.llvm_profdata, |
| llvm_cov=orchestration_inputs.llvm_cov, |
| gcs_bucket=spec.gcs_bucket, |
| base_dir=base_dir, |
| ) |
| |
| |
| def GenTests(api): |
| def spec_data( |
| build_type="debug", |
| ninja_targets=(), |
| sdk_subbuild=False, |
| variants=(), |
| device_type="QEMU", |
| run_tests=True, |
| test_in_shards=True, |
| gcs_bucket=None, |
| pave=True, |
| ): |
| test_spec = None |
| if run_tests: |
| test_spec = Fuchsia.Test( |
| device_type=device_type, |
| max_shard_size=0, |
| target_shard_duration_secs=10 * 60, |
| max_shards_per_env=8, |
| timeout_secs=30 * 60, |
| pool="fuchsia.tests", |
| test_in_shards=test_in_shards, |
| swarming_expiration_timeout_secs=10 * 60, |
| swarming_io_timeout_secs=5 * 60, |
| default_service_account="service_account", |
| targets_serial=True, |
| pave=pave, |
| ) |
| spec = Fuchsia( |
| checkout=Fuchsia.Checkout( |
| manifest="minimal", |
| project="integration", |
| remote="https://fuchsia.googlesource.com/manifest", |
| upload_results=bool(gcs_bucket), |
| ), |
| build=Fuchsia.Build( |
| variants=variants, |
| build_type=build_type, |
| run_tests=run_tests, |
| ninja_targets=ninja_targets, |
| sdk_subbuild=sdk_subbuild, |
| board="boards/x64.gni", |
| product="products/core.gni", |
| target="x64", |
| upload_results=bool(gcs_bucket), |
| ), |
| test=test_spec, |
| gcs_bucket=gcs_bucket, |
| artifact_gcs_bucket="fuchsia-infra-artifacts", |
| ) |
| return api.spec.spec_loaded_ok(step_name="load spec.build_init", message=spec) |
| |
| default_gitiles_refs_steps = api.build_input_resolver.set_gerrit_branch() + api.gitiles.refs( |
| "refs", ["refs/heads/master", "deadbeef",] |
| ) |
| |
| task_request_jsonish = api.testing.task_request_jsonish(legacy_qemu=False) |
| download_step_data = ( |
| api.testing.task_requests_step_data( |
| [task_request_jsonish], |
| "download test orchestration inputs.load task requests", |
| ) |
| + api.step_data( |
| "download test orchestration inputs.load triage sources", |
| api.json.output(["triage/config.triage", "other/triage/config.triage"]), |
| ) |
| + api.testing.task_requests_step_data( |
| [task_request_jsonish], |
| "download test orchestration inputs (2).load task requests", |
| ) |
| + api.step_data( |
| "download test orchestration inputs (2).load triage sources", |
| api.json.output(["triage/config.triage", "other/triage/config.triage"]), |
| ) |
| ) |
| |
| collect_steps = ( |
| api.step_data( |
| "collect", |
| api.swarming.collect( |
| [ |
| api.swarming.task_result( |
| id="610", name="Linux", outputs=["out/path/to/output/file"], |
| ), |
| api.swarming.task_result( |
| id="710", |
| failure=True, |
| name="QEMU", |
| outputs=["out/path/to/output/file"], |
| ), |
| api.swarming.task_result( |
| id="810", name="QEMU-(2)", outputs=["output.fs", "serial.log"], |
| ), |
| api.swarming.task_result( |
| id="910", name="QEMU-(3)", outputs=["output.fs"], |
| ), |
| ] |
| ), |
| ) |
| + download_step_data |
| ) |
| spec_remote = "https://fuchsia.googlesource.com/integration" |
| properties = { |
| "spec_remote": spec_remote, |
| "profile_builder_names": ["profile-builder", "profile-builder2"], |
| } |
| led_properties = { |
| "spec_remote": spec_remote, |
| "profile_builder_names": ["profile-builder", "profile-builder2"], |
| "$recipe_engine/led": LedInputProperties( |
| led_run_id="led/user_example.com/abc123", |
| isolated_input=LedInputProperties.IsolatedInput( |
| hash="abc123", |
| namespace="default-gzip", |
| server="isolateserver.appspot.com", |
| ), |
| ), |
| } |
| profile_build = api.subbuild.ci_build_message( |
| build_id=8945511751514863184, |
| builder="profile-builder", |
| output_props={ |
| "test-swarming-task-ids": ["610", "710", "810", "910"], |
| "test_orchestration_inputs_hash": "abc", |
| "checkout_root": "/checkout/root", |
| }, |
| status="SUCCESS", |
| ) |
| profile_build_without_tasks = api.subbuild.ci_build_message( |
| build_id=8945511751514863185, |
| builder="profile-builder2", |
| output_props={ |
| "test_orchestration_inputs_hash": "abc", |
| "checkout_root": "/some/other/checkout/root", |
| }, |
| status="SUCCESS", |
| ) |
| |
| yield ( |
| api.checkout.test("default", tryjob=False) |
| + api.build.test("default", create_shards=False) |
| + spec_data(gcs_bucket="fuchsia-infra", run_tests=True) |
| + api.properties(**properties) |
| + collect_steps |
| + api.subbuild.child_build_steps( |
| builds=[profile_build, profile_build_without_tasks], |
| launch_step="launch builders", |
| collect_step="collect builds", |
| ) |
| ) |
| |
| yield ( |
| api.checkout.test("default_led", tryjob=True) |
| + api.build.test("default_led", create_shards=False, tryjob=True) |
| + spec_data(gcs_bucket="fuchsia-infra", run_tests=True) |
| + api.properties(**led_properties) |
| + default_gitiles_refs_steps |
| + collect_steps |
| + api.subbuild.child_led_steps( |
| builds=[profile_build, profile_build_without_tasks], |
| collect_step="collect builds", |
| ) |
| ) |