| # Copyright 2019 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for building Fuchsia.""" |
| |
| import copy |
| |
| from recipe_engine.config import Enum, List |
| from recipe_engine.recipe_api import Property |
| |
| from recipe_engine.recipe_api import Property |
| from recipe_engine.config import Enum |
| |
| from PB.infra.fuchsia import Fuchsia |
| |
| TARGETS = ['arm64', 'x64'] |
| |
| BUILD_TYPES = ['debug', 'release', 'thinlto', 'lto'] |
| |
| DEPS = [ |
| 'fuchsia/artifacts', |
| 'fuchsia/build', |
| 'fuchsia/checkout', |
| 'fuchsia/experimental', |
| 'fuchsia/fuchsia', |
| 'fuchsia/jiri', |
| 'fuchsia/testing_requests', |
| 'fuchsia/testsharder', |
| 'recipe_engine/buildbucket', |
| 'recipe_engine/cipd', |
| 'recipe_engine/file', |
| 'recipe_engine/isolated', |
| 'recipe_engine/json', |
| 'recipe_engine/path', |
| 'recipe_engine/platform', |
| 'recipe_engine/properties', |
| 'recipe_engine/raw_io', |
| 'recipe_engine/step', |
| # Needed by TestOrchestrationInputs due to recipe engine DEPs weirdness. |
| 'recipe_engine/swarming', |
| ] |
| |
| PROPERTIES = { |
| # Properties controlling a Fuchsia build. |
| 'target': |
| Property(kind=Enum(*TARGETS), help='Target to build'), |
| 'build_type': |
| Property( |
| kind=Enum(*BUILD_TYPES), help='The build type', default='debug'), |
| 'packages': |
| Property(kind=List(basestring), help='Packages to build', default=[]), |
| 'universe_packages': |
| Property( |
| kind=List(basestring), |
| help='Packages to build and add to the universe set', |
| default=[]), |
| 'variants': |
| Property( |
| kind=List(basestring), |
| help='--variant arguments to gen.py', |
| default=[]), |
| 'gn_args': |
| Property( |
| kind=List(basestring), help='Extra args to pass to GN', default=[]), |
| 'ninja_targets': |
| Property( |
| kind=List(basestring), |
| help='Extra target args to pass to ninja', |
| default=[]), |
| 'board': |
| Property(kind=str, help='Board to build', default=None), |
| 'product': |
| Property(kind=str, help='Product to build', default=None), |
| 'build_images': |
| Property( |
| kind=bool, |
| help='Whether to build images needed to boot and test fuchsia.', |
| default=False), |
| 'debug_symbol_gcs_bucket': |
| Property( |
| kind=str, |
| help='GCS bucket for uploading debug symbols', |
| default='debug-symbols'), |
| 'gcs_bucket': |
| Property( |
| kind=str, |
| help='GCS bucket for uploading checkout, build, and test results', |
| default=''), |
| 'checkout_snapshot': |
| Property( |
| kind=bool, |
| help='Whether the checkout is from snapshot.', |
| default=False), |
| 'export_compdb': |
| Property( |
| kind=bool, |
| help='Whether to export a compdb from GN.', |
| default=False), |
| 'release_version': |
| Property( |
| kind=str, help='The release version of the checkout.', |
| default=None), |
| 'extract_artifacts': |
| Property( |
| kind=bool, |
| help='Whether to extract the test input artifacts', |
| default=False), |
| 'include_breakpad_symbols': |
| Property( |
| kind=bool, |
| help='Whether to build and upload breakpad_symbols', |
| default=False), |
| 'include_symbol_archive': |
| Property( |
| kind=bool, |
| help='Whether to build and upload a tar archive of all symbolized binaries', |
| default=False), |
| 'exclude_images': |
| Property( |
| param_name='exclude_images', |
| kind=bool, |
| help='Whether to exclude the building of images', |
| default=False), |
| 'sdk_id': |
| Property(kind=str, help='sdk_id to set in GN', default='sdk-id'), |
| } |
| |
| |
| def RunSteps(api, target, build_type, packages, universe_packages, variants, |
| gn_args, ninja_targets, board, product, gcs_bucket, |
| debug_symbol_gcs_bucket, checkout_snapshot, release_version, |
| extract_artifacts, include_breakpad_symbols, |
| include_symbol_archive, exclude_images, sdk_id): |
| checkout = api.checkout.CheckoutResults( |
| api=api, |
| root_dir=api.path['start_dir'], |
| snapshot_file=api.path['start_dir'].join('snapshot'), |
| is_from_snapshot=checkout_snapshot, |
| release_version=release_version, |
| source_manifest=api.jiri.test_api.example_source_manifest, |
| ) |
| |
| build_spec = Fuchsia.Build( |
| target=target, |
| build_type=build_type, |
| packages=packages, |
| universe_packages=universe_packages, |
| variants=variants, |
| gn_args=gn_args, |
| ninja_targets=ninja_targets, |
| board=board, |
| run_tests=False, |
| product=product, |
| include_breakpad_symbols=include_breakpad_symbols, |
| include_symbol_archive=include_symbol_archive, |
| exclude_images=exclude_images, |
| ) |
| build = api.build.from_spec( |
| build_spec, |
| checkout, |
| sdk_id=sdk_id, |
| gcs_bucket=gcs_bucket, |
| collect_build_metrics=True) |
| |
| # One may more precisely parametrize a build by using the gen() and ninja() |
| # methods. |
| gn_results = api.build.gen( |
| checkout_root=api.path['start_dir'], |
| fuchsia_build_dir=api.path['start_dir'].join('another', 'out'), |
| target=target, |
| build_type=build_type, |
| board=board, |
| product=product, |
| packages=packages, |
| universe_packages=universe_packages, |
| variants=variants, |
| args=gn_args, |
| export_compdb=True, |
| ) |
| |
| gn_results.filtered_compdb(['third_party']) |
| gn_results.zbi_tests # pylint: disable=pointless-statement |
| |
| api.build.ninja( |
| checkout_root=api.path['start_dir'], |
| gn_results=gn_results, |
| build_generated_sources=True, |
| build_zbi_tests=True, |
| ) |
| |
| # Example of extracing BuildArtifacts from build results. |
| tester_inputs = build.get_artifacts() |
| assert_equal('fuchsia_build_dir', build.fuchsia_build_dir, |
| tester_inputs.fuchsia_build_dir) |
| |
| # Example of isolating tester build inputs |
| tester_inputs.isolate(api) |
| |
| # These statements are just for test coverage, so don't lint them. |
| # pylint: disable=pointless-statement |
| build.variants |
| build.build_type |
| # pylint: enable=pointless-statement |
| |
| # This might raise a step failure if the size > limit. |
| try: |
| build.check_filesystem_sizes() |
| except api.step.StepFailure: |
| pass |
| |
| if debug_symbol_gcs_bucket: |
| build.upload_debug_symbols( |
| debug_symbol_gcs_bucket=debug_symbol_gcs_bucket, gcs_bucket=gcs_bucket) |
| if gcs_bucket: |
| build.upload_results(gcs_bucket=gcs_bucket) |
| |
| # Run the testsharder to collect test specifications and shard them. |
| # Always create shards regardless test_in_shards in true or not. |
| shards = api.testsharder.execute( |
| 'create test shards', |
| testsharder_path=build.tool('testsharder'), |
| build_dir=build.fuchsia_build_dir, |
| tags=[], |
| ) |
| |
| if extract_artifacts: |
| # Extract and manipulate a BuildArtifacts object for test coverage. |
| build_artifacts_no_shards = build.get_artifacts() |
| build_artifacts_no_shards.isolate(api) |
| |
| build_artifacts = build.get_artifacts(shards) |
| assert_equal('fuchsia_build_dir', build.fuchsia_build_dir, |
| build_artifacts.fuchsia_build_dir) |
| api.path.mock_add_paths(build_artifacts.secret_specs) |
| build_artifacts.isolate(api) |
| copy.deepcopy(build_artifacts) |
| # pylint: disable=pointless-statement |
| build_artifacts.ids |
| build_artifacts.secret_specs |
| build_artifacts.board |
| build_artifacts.product |
| # pylint: enable=pointless-statement |
| api.build.BuildArtifacts.download(api, '') |
| |
| # artifacts has to be configured before calling shard_requests(). |
| api.artifacts.gcs_bucket = gcs_bucket |
| api.artifacts.uuid = 'uuid' |
| shard_requests = api.testing_requests.shard_requests( |
| build_artifacts, |
| api.buildbucket.build, |
| 0, |
| 'DUMMY.POOL', |
| 0, |
| 0, |
| True, |
| timeout_secs=0, |
| pave_from_gcs=True, |
| ) |
| test_orchestration_inputs = api.build.TestOrchestrationInputs( |
| build_artifacts.llvm_symbolizer, |
| build_artifacts.minfs, |
| build_artifacts.symbolize_tool, |
| shard_requests, |
| build_artifacts.tests_file, |
| ) |
| test_orchestration_inputs.isolate(api) |
| test_orchestration_inputs.download(api, '') |
| |
| |
| def assert_equal(name, exp, act): |
| assert exp == act, '%s: expected %s but got %s' % (name, exp, act) |
| |
| |
| def GenTests(api): |
| # Test cases that run no tests. |
| yield api.build.test('default') |
| |
| yield api.build.test('default_cq', tryjob=True) |
| |
| # Various property edge cases. |
| yield api.build.test('mac', properties={}) + api.platform.name('mac') |
| yield api.build.test( |
| 'goma_local_cache', |
| properties=dict(goma_local_cache=True), |
| ) |
| |
| yield api.build.test( |
| 'release_with_version', |
| properties=dict( |
| build_type='release', |
| release_version='0.19700101.0.77', |
| product='topaz/products/default.gni', |
| board='topaz/boards/x64.gni', |
| ), |
| ) |
| |
| yield api.build.test( |
| 'lto', |
| properties=dict(variants=['lto']), |
| ) |
| yield api.build.test( |
| 'thinlto', |
| properties=dict(variants=['thinlto']), |
| ) |
| yield api.build.test( |
| 'host_asan', |
| properties=dict(variants=['host_asan']), |
| ) |
| yield api.build.test( |
| 'asan', |
| target='arm64', |
| variants=['host_asan', 'asan'], |
| properties=dict( |
| build_images=True, |
| gcs_bucket='###fuchsia-build###', |
| ), |
| ) |
| yield api.build.test( |
| 'gn_args', |
| properties=dict(gn_args=['super_arg=false', 'less_super_arg=true']), |
| ) |
| yield api.build.test( |
| 'ninja_targets', |
| properties=dict(ninja_targets=['//target:one', '//target:two']), |
| ) |
| yield api.build.test( |
| 'board_with_packages', |
| properties={ |
| 'packages': ['//src/examples:package'], |
| 'include_breakpad_symbols': True, |
| 'include_symbol_archive': True, |
| }, |
| ) |
| |
| yield api.build.test( |
| 'exclude_images', |
| properties={ |
| 'exclude_images': True, |
| }, |
| ) |
| |
| filesystem_sizes_too_large = [{ |
| 'name': 'img-name', |
| 'value': 101, |
| 'limit': 100, |
| 'debug_instructions': 'debug instructions', |
| }] |
| yield (api.build.test( |
| 'storage_sparse_too_large', |
| properties={ |
| 'target': 'x64', # specified to match out dir |
| }) + api.path.exists(api.path['start_dir'].join( |
| 'out', 'default', 'filesystem_sizes.json')) + api.step_data( |
| 'check filesystem sizes.read filesystem sizes', |
| api.file.read_json(json_content=filesystem_sizes_too_large), |
| )) |
| |
| yield api.build.test( |
| 'product_with_universe_packages', |
| properties=dict(universe_packages=['//packages:default'],), |
| ) |
| |
| # Test case for generating build traces and bloaty analysis |
| yield api.build.test( |
| 'upload_build_metrics', |
| build_type='release', |
| properties=dict(run_tests=True,), |
| ) + api.path.exists(api.path['start_dir'].join( |
| 'out', 'default', 'obj', 'build', 'images', 'system.snapshot')) |
| |
| # Test case for uploading filesystem sizes to BigQuery |
| filesystem_sizes = [{ |
| 'name': 'img-name', |
| 'value': 1, |
| 'limit': 1, |
| }, { |
| 'name': 'other-img-name', |
| 'value': 1, |
| 'limit': 0, |
| }] |
| yield api.build.test( |
| 'upload_filesystem_sizes', |
| properties={ |
| 'gcs_bucket': '###fuchsia-build###', |
| 'target': 'x64', # specified to match out dir |
| }, |
| ) + api.path.exists(api.path['start_dir'].join( |
| 'out', 'default', 'filesystem_sizes.json')) + api.step_data( |
| 'check filesystem sizes.read filesystem sizes', |
| api.file.read_json(json_content=filesystem_sizes), |
| ) |
| |
| # Test case for uploading debug symbols from snapshot |
| yield api.build.test( |
| 'upload_debug_symbols_from_snapshot', |
| properties=dict( |
| build_type='release', |
| target='x64', |
| run_tests=True, |
| gcs_bucket='###fuchsia-build###', |
| checkout_snapshot=True, |
| debug_symbol_bucket='debug-symbols', |
| ), |
| ) |
| |
| # Test case for uploading debug symbols from release version |
| yield api.build.test( |
| 'upload_debug_symbols_from_release_version', |
| properties=dict( |
| build_type='release', |
| target='x64', |
| run_tests=True, |
| gcs_bucket='###fuchsia-build###', |
| release_version='0.20190618.0.1', |
| debug_symbol_bucket='debug-symbols', |
| ), |
| ) |
| |
| # Test case for skipping blobstats uploading if the blobstats script fails. |
| yield api.build.test( |
| 'blobstats_fails', |
| properties=dict(gcs_bucket='###fuchsia-build###',), |
| ) + api.step_data( |
| 'upload build results.blobstats', retcode=255) |
| |
| # Test case for custom Clang toolchain. |
| yield api.build.test( |
| 'clang_toolchain_from_cipd', |
| properties={ |
| 'build.clang_toolchain': { |
| 'type': 'cipd', |
| 'instance': api.cipd.make_resolved_version(None), |
| }, |
| }, |
| ) |
| yield api.build.test( |
| 'clang_toolchain_from_isolate', |
| properties={ |
| 'build.clang_toolchain': { |
| 'type': 'isolated', |
| 'instance': 'abc123', |
| }, |
| }, |
| ) |
| |
| # Test case for custom Rust toolchain. |
| yield api.build.test( |
| 'rust_toolchain_from_cipd', |
| properties={ |
| 'build.rust_toolchain': { |
| 'type': 'cipd', |
| 'instance': api.cipd.make_resolved_version(None), |
| }, |
| }, |
| ) |
| yield api.build.test( |
| 'rust_toolchain_from_isolate', |
| properties={ |
| 'build.rust_toolchain': { |
| 'type': 'isolated', |
| 'instance': 'abc123', |
| }, |
| }, |
| ) |
| |
| # Test case for crash in Clang. |
| yield api.build.test( |
| 'clang_crash_in_zircon', |
| status='failure', |
| properties=dict(gcs_bucket='###fuchsia-build###',), |
| create_shards=False, |
| ) + api.step_data( |
| 'build.build fuchsia.ninja.zircon', retcode=1) |
| |
| yield api.build.test( |
| 'clang_crash_in_fuchsia', |
| status='failure', |
| properties=dict(gcs_bucket='###fuchsia-build###',), |
| create_shards=False, |
| ) + api.step_data( |
| 'build.build fuchsia.ninja.fuchsia', retcode=1) |
| |
| # Test case for compilation database. |
| yield api.build.test( |
| 'compdb', |
| properties=dict(export_compdb=True), |
| ) |
| |
| # Test case for building generated sources. |
| yield api.build.test( |
| 'generated_sources', |
| properties=dict(build_generated_sources=True), |
| ) |
| |
| download_orchestration_inputs = api.testing_requests.shards_step_data( |
| step_name='download test orchestration inputs.load test shards', |
| shards=[ |
| api.testsharder.shard( |
| name='QEMU', |
| tests=api.testing_requests.default_tests(), |
| dimensions=dict(device_type='QEMU'), |
| ), |
| ]) + api.testing_requests.task_requests_step_data( |
| [api.testing_requests.task_request_jsonish(legacy_qemu=True)], |
| 'download test orchestration inputs.load task requests', |
| ) |
| |
| yield api.build.test( |
| 'extract_build_artifacts', |
| properties=dict(extract_artifacts=True), |
| test_deps=['some.dep'], |
| ) + download_orchestration_inputs |
| yield api.build.test( |
| 'extract_build_artifacts_with_images', |
| properties={'extract_artifacts': True}, |
| ) + download_orchestration_inputs |
| |
| yield api.build.test( |
| 'zbi_tests', |
| properties=dict(build_generated_sources=True), |
| ) + api.step_data( |
| 'read zbi test manifest', |
| api.json.output([api.build.mock_zbi_test('arm64')]), |
| ) |