blob: 158a3a0729b6ce8f6c9c006e5a3135806acfc706 [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia and running tests."""
from google.protobuf import text_format as textpb
from recipe_engine.config import Enum, List, Set, Single
from recipe_engine.recipe_api import Property
from PB.infra.fuchsia import Fuchsia
TARGETS = ['arm64', 'x64']
BUILD_TYPES = ['debug', 'release', 'thinlto', 'lto']
BUILD_ARTIFACT_HASH_PROPERTY = 'build_artifact_hash'
TEST_MULTIPLIER_KEY = 'MULTIPLY'
DEPS = [
'fuchsia/artifacts',
'fuchsia/build',
'fuchsia/build_input_resolver',
'fuchsia/buildbucket_util',
'fuchsia/checkout',
'fuchsia/experimental',
'fuchsia/fuchsia',
'fuchsia/git',
'fuchsia/gitiles',
'fuchsia/gsutil',
'fuchsia/hash',
'fuchsia/jiri',
'fuchsia/jsonutil',
'fuchsia/tar',
'fuchsia/testsharder',
'fuchsia/testing',
'fuchsia/testing_requests',
'fuchsia/spec',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/isolated',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/swarming',
'recipe_engine/url',
]
PROPERTIES = {
'spec_remote':
Property(
kind=str,
help='URL of the specs git repository',
default='http://fuchsia.googlesource.com/integration'),
}
def RunSteps(api, spec_remote):
tryjob = api.buildbucket_util.is_tryjob
# Resolve the build input to always contain a Gitiles commit.
bb_build = api.buildbucket.build
api.build_input_resolver.resolve(
bb_build.input,
default_project_url='https://fuchsia.googlesource.com/fuchsia')
bb_input = bb_build.input
with api.step.nest('got_revision') as presentation:
# Read by the binary-size Gerrit plugin
presentation.properties['got_revision'] = bb_input.gitiles_commit.id
with api.step.nest('load spec') as bootstrap_step:
try:
bootstrap_step.presentation.step_text = 'loading spec'
spec = api.spec.load_from_build(
build=bb_build,
spec_remote=spec_remote,
Type=Fuchsia,
)
except api.spec.ParseError as e:
raise api.step.StepFailure('failed to parse spec: %s' % str(e))
# Log the textproto to logdog so we can programmatically scrape spec files
# to check into the tree.
bootstrap_step.presentation.logs['textproto'] = textpb.MessageToString(
spec).split('\n')
set_output_properties_from_spec(bootstrap_step, spec)
# Handle illegal settings around secrets.
if spec.test.requires_secrets:
if tryjob or spec.test.device_type != 'QEMU': # pragma: no cover
raise api.step.InfraFailure(
'the secrets pipeline is only supported in CI and on QEMU')
checkout_root = api.path['start_dir'].join('fuchsia')
if spec.checkout.use_snapshot:
if bb_input.gerrit_changes:
assert len(bb_input.gerrit_changes) == 1
checkout = api.checkout.from_patchset_snapshot(
path=checkout_root,
gerrit_change=bb_input.gerrit_changes[0],
attributes=spec.checkout.attributes)
else:
checkout = api.checkout.from_commit_snapshot(
path=checkout_root,
gitiles_commit=bb_input.gitiles_commit,
attributes=spec.checkout.attributes)
else:
assert spec.checkout.manifest
assert spec.checkout.remote
checkout = api.checkout.fuchsia_with_options(
path=checkout_root,
build=bb_build,
manifest=spec.checkout.manifest,
remote=spec.checkout.remote,
project=spec.checkout.project,
rebase_revision=bb_input.gitiles_commit.id,
is_release_version=spec.checkout.is_release_version,
attributes=spec.checkout.attributes,
)
if spec.checkout.upload_results:
assert spec.gcs_bucket, (
'gcs_bucket must be set if checkout.upload_results is')
checkout.upload_results(spec.gcs_bucket)
# We can only derive repo from buildbucket when triggered in CI or CQ;
# validate the checkout then.
# Moreover, skip validation if checking out a snapshot, as that may have been
# triggered by a project not corresponding to a repo in the checkout.
triggered_by_ci_cq = (
len(bb_input.gerrit_changes) > 0 or bb_input.gitiles_commit.project)
if triggered_by_ci_cq and not spec.checkout.use_snapshot:
if bb_input.gerrit_changes:
project = bb_input.gerrit_changes[0].project
else:
project = bb_input.gitiles_commit.project
with api.step.nest('validate checkout'), api.context(cwd=checkout.root_dir):
repo_path = api.jiri.project(projects=[project]).json.output[0]['path']
api.python(
'validate FIDL namespaces',
checkout.root_dir.join('scripts', 'style',
'verify-fidl-libraries.py'),
args=['--repo', repo_path],
)
# Check for properly formatted JSON in commit message before build
multipliers_path = None
if bb_input.gerrit_changes and spec.test.test_in_shards:
with api.step.nest('test multipliers'):
# Get project dir for gerrit change from source manifest
gerrit_change = bb_input.gerrit_changes[0]
project_dir = checkout.root_dir
repo_url = 'https://%s/%s' % (gerrit_change.host.replace(
'-review', ''), gerrit_change.project)
dirs = checkout.source_manifest['directories']
for d in dirs:
if dirs[d]['git_checkout']['repo_url'] == repo_url:
if d != '.':
project_dir = checkout.root_dir.join(d)
break
with api.context(cwd=project_dir):
commit_msg = api.git.get_commit_message(name='get commit msg')
multipliers = api.jsonutil.extract_from_text(
'extract',
text=commit_msg,
key=TEST_MULTIPLIER_KEY,
default_contents=[])
multipliers_path = api.path.mkstemp()
api.file.write_json('write', multipliers_path, multipliers, indent=2)
collect_build_metrics = spec.build.upload_results
archives_to_build = []
# If we are not building images, then we need not build package-related
# targets.
if not spec.build.exclude_images:
archives_to_build.extend(['archive', 'package-archive'])
if spec.build.include_breakpad_symbols:
archives_to_build.append('breakpad-symbol-archive')
if spec.build.include_symbol_archive:
archives_to_build.append('symbol-archive')
build_dir = checkout.root_dir.join('out')
# Add infratools to universe packages and ninja_targets to ensure they are built.
spec.build.universe_packages.append('//bundles:infratools')
spec.build.ninja_targets.append('bundles:infratools')
build = api.build.with_options(
build_dir=build_dir,
checkout=checkout,
target=str(spec.build.target),
build_type=str(spec.build.build_type),
packages=spec.build.packages,
universe_packages=spec.build.universe_packages,
variants=spec.build.variants,
gn_args=spec.build.gn_args,
ninja_targets=list(spec.build.ninja_targets),
board=spec.build.board,
product=spec.build.product,
collect_build_metrics=collect_build_metrics,
build_images=not spec.build.exclude_images,
archives_to_build=tuple(archives_to_build),
gcs_bucket=spec.gcs_bucket,
)
check_sizes_result = api.step(
'check sizes', [
build.tool('size_checker'), '--build-dir', build.fuchsia_build_dir,
'--sizes-json-out',
api.json.output()
],
step_test_data=lambda: api.json.test_api.output({'some-file': 123}))
# This property is read by the binary-size Gerrit plugin.
if check_sizes_result.json.output:
check_sizes_result.presentation.properties[
'binary_sizes'] = check_sizes_result.json.output
if spec.debug_symbol_gcs_bucket:
build.upload_debug_symbols(
debug_symbol_gcs_bucket=spec.debug_symbol_gcs_bucket,
gcs_bucket=spec.gcs_bucket if spec.build.upload_results else None,
)
if spec.build.upload_results:
assert spec.gcs_bucket, 'gcs_bucket must be set if build.upload_results is'
build.upload_results(gcs_bucket=spec.gcs_bucket)
shards = []
if spec.test.test_in_shards:
shards = api.testsharder.execute(
'create test shards',
testsharder_path=build.tool('testsharder'),
build_dir=build.fuchsia_build_dir,
max_shard_size=spec.test.max_shard_size,
multipliers=multipliers_path,
tags=spec.build.environment_tags,
)
build_artifacts = build.get_artifacts(shards)
api.artifacts.gcs_bucket = spec.artifact_gcs_bucket
api.artifacts.uuid = api.buildbucket_util.id
orchestration_inputs = None
if spec.build.run_tests:
if spec.test.test_in_shards:
shard_requests = api.testing_requests.shard_requests(
build_artifacts,
bb_build,
spec.test.per_test_timeout_secs,
spec.test.pool,
spec.test.swarming_expiration_timeout_secs,
spec.test.swarming_io_timeout_secs,
spec.test.use_runtests,
timeout_secs=spec.test.timeout_secs)
else:
shard_requests = api.testing_requests.deprecated_shard_requests(
build_artifacts,
api.testing_requests.deprecated_test_cmds(spec.test),
spec.test.device_type,
spec.test.pool,
spec.test.timeout_secs,
spec.test.pave,
requires_secrets=spec.test.requires_secrets,
swarming_expiration_timeout_secs=spec.test
.swarming_expiration_timeout_secs,
swarming_io_timeout_secs=spec.test.swarming_io_timeout_secs,
)
orchestration_inputs = api.build.TestOrchestrationInputs(
build_artifacts.llvm_symbolizer, build_artifacts.minfs,
build_artifacts.symbolize_tool, shard_requests,
build_artifacts.tests_file)
# Must be done after testing.shard_requests() is called, because that
# modifies the filesystem images. TODO(garymm,joshuaseaton): once legacy_qemu
# code paths are removed, remove this comment as it will become false.
api.artifacts.upload('upload artifacts', build=build)
all_results = []
if spec.build.run_tests:
if spec.test.test_in_shards:
all_results = api.testing.test_in_shards(
collect_timeout_secs=spec.test.collect_timeout_secs,
debug_symbol_gcs_bucket=spec.debug_symbol_gcs_bucket,
orchestration_inputs=orchestration_inputs,
max_attempts=spec.test.max_attempts)
else:
all_results = [
api.testing.deprecated_test(
spec.debug_symbol_gcs_bucket,
spec.test.device_type,
orchestration_inputs,
max_attempts=spec.test.max_attempts)
]
if spec.test.upload_results:
assert spec.gcs_bucket, 'gcs_bucket must be set if test.upload_results is'
for test_results in all_results:
test_results.upload_results(
gcs_bucket=spec.gcs_bucket,
# Mixing data from and pre-submit and post-submit in catapult results
# in a confusing UI, so always exclude pre-submit (tryjob). We set
# upload_to_catapult to True for some tryjobs because it makes
# generating our configs easier.
upload_to_catapult=(not tryjob and spec.test.upload_to_catapult),
)
if 'profile' in build_artifacts.variants:
api.testing.process_coverage(
covargs_path=build_artifacts.covargs,
test_results=[
# TODO(fxb/27336): coverage is only supported for tests on Fuchsia.
result for result in all_results if result.from_fuchsia
],
ids_txt=build_artifacts.ids,
llvm_profdata=build_artifacts.llvm_profdata,
llvm_cov=build_artifacts.llvm_cov,
gcs_bucket=spec.gcs_bucket,
)
# Raise test failures
with api.step.defer_results():
api.testing.raise_failures()
for test_results in all_results:
test_results.raise_failures()
if spec.build.enforce_size_limits:
build.check_filesystem_sizes()
def set_output_properties_from_spec(step, spec):
"""Sets certain properties from the spec as output properties on this recipe.
Some builders read data from these output properties, and fail if they are not
present. This is to prevent them from failing.
"""
step.presentation.properties['gcs_bucket'] = spec.gcs_bucket
def GenTests(api):
ci_build = api.buildbucket.ci_build(
project='fuchsia',
git_repo='https://fuchsia.googlesource.com/fuchsia',
)
def try_build(project='fuchsia'):
return api.buildbucket.try_build(
project=project,
git_repo='https://fuchsia.googlesource.com/%s' % project,
)
# Mock gitiles refs fetched when the buildbucket build input is an unresolved
# Gerrit change or Gitiles commit.
default_gitiles_refs_steps = api.gitiles.refs('refs', [
'refs/heads/master',
'deadbeef',
])
def source_manifest_for_cq(use_snapshot=False):
step_name = 'checkout.jiri source-manifest'
return api.step_data(
step_name,
api.json.output(
{
'directories': {
'.': {
'git_checkout': {
'repo_url':
'https://fuchsia.googlesource.com/fuchsia',
'revision':
'4c2b0da3c06341db5cebe4d02c78c93c3b2bd78b',
}
},
'garnet': {
'git_checkout': {
'repo_url':
'https://fuchsia.googlesource.com/garnet',
'revision':
'4c2b0da3c06341db5cebe4d02c78c93c3b2bd78b'
}
}
}
},
name='source manifest'))
def test_step_data(deprecated=False):
if deprecated:
return api.testing.task_step_data([
api.swarming.task_result(
id='610',
name='QEMU',
outputs=['out.tar'],
),
])
return (
api.testing.task_retry_step_data(
[
api.swarming.task_result(
id='610',
name='QEMU',
outputs=['out.tar'],
),
],
iteration=0) +
api.testing_requests.shards_step_data(
step_name='create test shards',
shards=[
api.testsharder.shard(
name='QEMU',
tests=api.testing_requests.default_tests(),
dimensions=dict(device_type='QEMU'),
),
]) +
api.testing.test_step_data(shard_name='QEMU')) # yapf: disable
def spec_data(use_snapshot=False,
variants=(),
build_target='x64',
build_type='debug',
include_symbol_archive=False,
include_breakpad_symbols=False,
enforce_size_limits=False,
max_shard_size=0,
run_tests=True,
test_in_shards=True,
requires_secrets=False,
gcs_bucket=None,
debug_symbol_gcs_bucket='debug-symbols'):
test_spec = None
if run_tests:
test_spec = Fuchsia.Test(
max_shard_size=max_shard_size,
timeout_secs=30 * 60,
pool='fuchsia.tests',
test_in_shards=test_in_shards,
requires_secrets=requires_secrets,
# We should never expire a test task. This is currently 5 hours, but
# should be treated as infinite.
swarming_expiration_timeout_secs=18000,
swarming_io_timeout_secs=5 * 60,
upload_results=bool(gcs_bucket),
use_runtests='profile' in variants,
per_test_timeout_secs=5 * 60,
)
if not test_in_shards:
test_spec.device_type = 'QEMU'
step_data = api.spec.spec_loaded_ok(
step_name='load spec.build_init',
message=Fuchsia(
checkout=Fuchsia.Checkout(
manifest='manifest',
remote='remote',
upload_results=bool(gcs_bucket),
use_snapshot=use_snapshot,
),
build=Fuchsia.Build(
variants=variants,
build_type=build_type,
run_tests=run_tests,
board='boards/x64.gni',
product='products/core.gni',
target=build_target,
include_symbol_archive=include_symbol_archive,
include_breakpad_symbols=include_breakpad_symbols,
enforce_size_limits=enforce_size_limits,
upload_results=bool(gcs_bucket),
),
test=test_spec,
debug_symbol_gcs_bucket=debug_symbol_gcs_bucket,
gcs_bucket=gcs_bucket,
artifact_gcs_bucket='fuchsia-infra-artifacts',
),
)
if run_tests:
step_data += test_step_data(deprecated=not test_in_shards)
return step_data
def test(name, tryjob=False, project='fuchsia', use_snapshot=False):
if tryjob:
return api.test(name) + try_build(
project) + default_gitiles_refs_steps + source_manifest_for_cq(
use_snapshot)
else:
return api.test(name) + ci_build
# ParseError. Should result in a failure.
yield (test('spec_parse_error') +
api.spec.spec_parse_error(step_name='load spec.build_init'))
yield test('ci') + spec_data()
yield (test('cq', tryjob=True) + spec_data() +
api.build_input_resolver.set_gerrit_branch())
yield test('with_uploading') + spec_data(gcs_bucket='fuchsia-build')
yield test('profile') + spec_data(
variants=['profile'], gcs_bucket='fuchsia-build')
yield test('commit_snapshot') + spec_data(use_snapshot=True)
yield (
test('patchset_snapshot',
tryjob=True,
project='garnet',
use_snapshot=True) +
spec_data(use_snapshot=True) +
api.build_input_resolver.set_gerrit_branch()) # yapf: disable
yield (test('release-buid_only') + spec_data(
build_type='release', run_tests=False, enforce_size_limits=True))
yield (test('with_archives') +
spec_data(include_symbol_archive=True, include_breakpad_symbols=True))
yield test('deprecated_test') + spec_data(test_in_shards=False)
yield (test('requires_secrets') +
spec_data(requires_secrets=True, test_in_shards=False))
yield test('max_shard_size') + spec_data(max_shard_size=200)