blob: 246bf1ffd6a6876c2445932e2545c29726726836 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia and isolating build artifacts."""
from google.protobuf import text_format
from PB.infra.fuchsia import Fuchsia
from recipe_engine.recipe_api import Property
TEST_MULTIPLIER_KEY = 'MULTIPLY'
# These represent the location of the isolated hash in the output of this
# recipe when building SDK archives. Must be kept in sync with sdk.py.
ISOLATE_STEP_NAME = 'isolate artifacts'
ISOLATED_OUTPUT_KEY = 'isolated_output_hash'
DEPS = [
'fuchsia/artifacts',
'fuchsia/build',
'fuchsia/buildbucket_util',
'fuchsia/build_input_resolver',
'fuchsia/checkout',
'fuchsia/git',
'fuchsia/gitiles',
'fuchsia/jiri',
'fuchsia/jsonutil',
'fuchsia/spec',
'fuchsia/testing_requests',
'fuchsia/testsharder',
'fuchsia/upload_debug_symbols',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/isolated',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
]
PROPERTIES = {
'parent_id':
Property(
# This is a string because led IDs are not integers.
kind=str,
help='Parent build\'s buildbucket or led id',
default=None),
'spec_remote':
Property(
kind=str,
help='URL of the specs git repository',
default='https://fuchsia.googlesource.com/integration'),
'spec_revision':
Property(
kind=str,
help='The revision of spec_remote to fetch',
default='HEAD'),
}
def RunSteps(api, parent_id, spec_remote, spec_revision):
# Resolve the build input to always contain a Gitiles commit.
bb_build = api.buildbucket.build
api.build_input_resolver.resolve(
bb_build.input,
default_project_url='https://fuchsia.googlesource.com/fuchsia')
commit_remote = 'https://%s/%s' % (bb_build.input.gitiles_commit.host,
bb_build.input.gitiles_commit.project)
if commit_remote == spec_remote:
# If there was no parent build, then spec_revision may not have been
# resolved, so use the revision that we resolved immediately above.
if spec_revision == 'HEAD':
spec_revision = bb_build.input.gitiles_commit.id
# Otherwise the spec_revision was specified by the parent, and we want all
# accesses to that remote to use that revision. In this case,
# build_input_resolver.resolve() is unnecessary since we overwrite the
# revision, but we only use it to pre-populate the gitiles_commit of the
# build input and to keep the conditional logic simpler.
else:
bb_build.input.gitiles_commit.id = spec_revision
with api.step.nest('got_revision') as presentation:
# Read by the binary-size Gerrit plugin
presentation.properties['got_revision'] = bb_build.input.gitiles_commit.id
with api.step.nest('load spec') as presentation:
presentation.step_text = 'loading spec'
try:
spec = api.spec.load_from_build(
build=bb_build,
spec_remote=spec_remote,
Type=Fuchsia,
spec_revision=spec_revision,
)
except api.spec.ParseError as e:
raise api.step.StepFailure('failed to parse spec: %s' % str(e))
presentation.logs['textproto'] = text_format.MessageToString(spec).split(
'\n')
# The artifacts tool relies on this output property.
# This design has been fragile. Please don't add more dependencies on any
# additional output properties.
presentation.properties['gcs_bucket'] = spec.gcs_bucket
checkout = api.checkout.from_spec(spec.checkout)
if spec.checkout.upload_results:
assert spec.gcs_bucket, (
'gcs_bucket must be set if checkout.upload_results is')
checkout.upload_results(spec.gcs_bucket, namespace=parent_id)
# We can only derive repo from buildbucket when triggered in CI or CQ;
# validate the checkout then.
bb_input = api.buildbucket.build_input
if bb_input.gerrit_changes or bb_input.gitiles_commit.project:
if bb_input.gerrit_changes:
project = bb_input.gerrit_changes[0].project
else:
project = bb_input.gitiles_commit.project
with api.step.nest('validate checkout'), api.context(cwd=checkout.root_dir):
repo_path = api.jiri.project(projects=[project]).json.output[0]['path']
api.python(
'validate FIDL namespaces',
checkout.root_dir.join('scripts', 'style',
'verify-fidl-libraries.py'),
args=['--repo', repo_path],
)
collect_build_metrics = False
if spec.build.upload_results:
assert spec.gcs_bucket, (
'gcs_bucket must be set if checkout.upload_results is')
collect_build_metrics = True
# Build infratools.
spec.build.universe_packages.append('//bundles:infratools')
spec.build.ninja_targets.append('bundles:infratools')
# If SDK subbuild, set SDK ID to parent ID.
sdk_id = parent_id if spec.build.sdk_subbuild else None
build = api.build.from_spec(
spec.build,
checkout,
collect_build_metrics,
sdk_id=sdk_id,
gcs_bucket=spec.gcs_bucket)
check_sizes_result = api.step(
'check sizes', [
build.tool('size_checker'), '--build-dir', build.fuchsia_build_dir,
'--sizes-json-out',
api.json.output()
],
step_test_data=lambda: api.json.test_api.output({'some-file': 123}))
# This property is read by the binary-size Gerrit plugin.
if check_sizes_result.json.output:
check_sizes_result.presentation.properties[
'binary_sizes'] = check_sizes_result.json.output
# In SDK subbuild mode, isolate SDK archive and ninja targets.
if spec.build.sdk_subbuild:
sdk_archive_path = build.fuchsia_build_dir.join('sdk', 'archive')
isolated = api.isolated.isolated(sdk_archive_path)
sdk_ninja_targets = [
target for target in spec.build.ninja_targets
if target.startswith('sdk/archive')
]
for ninja_target in sdk_ninja_targets:
isolated.add_file(
path=build.fuchsia_build_dir.join(*ninja_target.split('/')))
sdk_archive_isolated_hash = isolated.archive(ISOLATE_STEP_NAME)
api.step.active_result.presentation.properties[
ISOLATED_OUTPUT_KEY] = sdk_archive_isolated_hash
if spec.debug_symbol_gcs_bucket:
build.upload_debug_symbols(
debug_symbol_gcs_bucket=spec.debug_symbol_gcs_bucket,
gcs_bucket=spec.gcs_bucket if spec.build.upload_results else None,
namespace=parent_id,
)
# TODO(garymm): assert spec.gcs_bucket set if upload_results set.
if spec.gcs_bucket and spec.build.upload_results:
build.upload_results(
gcs_bucket=spec.gcs_bucket,
namespace=parent_id,
)
# Check for properly formatted JSON in commit message before build
multipliers_path = None
if bb_input.gerrit_changes and spec.test.test_in_shards:
with api.step.nest('test multipliers'):
# Get project dir for gerrit change from source manifest
gerrit_change = bb_input.gerrit_changes[0]
project_dir = checkout.root_dir
repo_url = 'https://%s/%s' % (gerrit_change.host.replace(
'-review', ''), gerrit_change.project)
dirs = checkout.source_manifest['directories']
for d in dirs:
if dirs[d]['git_checkout']['repo_url'] == repo_url:
if d != '.':
project_dir = checkout.root_dir.join(d)
break
with api.context(cwd=project_dir):
commit_msg = api.git.get_commit_message(name='get commit msg')
multipliers = api.jsonutil.extract_from_text(
'extract',
text=commit_msg,
key=TEST_MULTIPLIER_KEY,
default_contents=[])
multipliers_path = api.path.mkstemp()
api.file.write_json('write', multipliers_path, multipliers, indent=2)
if spec.build.enforce_size_limits:
build.check_filesystem_sizes()
# Run the testsharder to collect test specifications and shard them.
# Always create shards regardless test_in_shards in true or not.
shards = api.testsharder.execute(
'create test shards',
testsharder_path=build.tool('testsharder'),
build_dir=build.fuchsia_build_dir,
max_shard_size=spec.test.max_shard_size,
multipliers=multipliers_path,
tags=spec.build.environment_tags,
)
# Must be set before testing.shard_requests() is called.
api.artifacts.gcs_bucket = spec.artifact_gcs_bucket
api.artifacts.uuid = parent_id or api.buildbucket_util.id
if spec.build.run_tests:
if parent_id.isdigit():
# Use parent build so that testing task requests refer to
# that build, which actually orchestrates testing.
buildbucket_build = api.buildbucket.get(int(parent_id))
else:
# When the parent was launched by led, it's not possible to retrieve
# the parent build, so we fall back to using our own build.
# This is technically incorrect and any tests that rely on having
# correct buildbucket metadata may fail when run via led. Ideally
# we wouldn't have any tests that knew about buildbucket, but
# for now this is OK since none of those tests run in recipes CQ,
# which uses led to test recipes changes.
buildbucket_build = bb_build
build_artifacts = build.get_artifacts(shards)
if spec.test.test_in_shards:
shard_requests = api.testing_requests.shard_requests(
build_artifacts,
buildbucket_build,
spec.test.per_test_timeout_secs,
spec.test.pool,
spec.test.swarming_expiration_timeout_secs,
spec.test.swarming_io_timeout_secs,
spec.test.use_runtests,
timeout_secs=spec.test.timeout_secs)
else:
shard_requests = api.testing_requests.deprecated_shard_requests(
build_artifacts,
api.testing_requests.deprecated_test_cmds(spec.test),
spec.test.device_type,
spec.test.pool,
spec.test.timeout_secs,
spec.test.pave,
requires_secrets=spec.test.requires_secrets,
swarming_expiration_timeout_secs=spec.test
.swarming_expiration_timeout_secs,
swarming_io_timeout_secs=spec.test.swarming_io_timeout_secs,
)
orchestration_inputs = api.build.TestOrchestrationInputs(
build_artifacts.llvm_symbolizer, build_artifacts.minfs,
build_artifacts.symbolize_tool, shard_requests,
build_artifacts.tests_file)
orchestration_inputs_hash = orchestration_inputs.isolate(api)
step_result = api.step('logging orchestration_inputs_hash', cmd=None)
step_result.presentation.properties[
orchestration_inputs.HASH_PROPERTY] = orchestration_inputs_hash
# Must be done after testing.shard_requests() is called, because that
# modifies the filesystem images. TODO(garymm,joshuaseaton): once legacy_qemu
# code paths are removed, remove this comment as it will become false.
api.artifacts.upload('upload artifacts', build)
def GenTests(api):
def spec_data(use_snapshot=False,
build_type='debug',
ninja_targets=(),
sdk_subbuild=False,
variants=(),
device_type='QEMU',
enforce_size_limits=False,
run_tests=True,
test_in_shards=True,
requires_secrets=False,
gcs_bucket=None,
debug_symbol_gcs_bucket='debug-symbols'):
test_spec = None
if run_tests:
test_spec = Fuchsia.Test(
device_type=device_type,
max_shard_size=0,
timeout_secs=30 * 60,
pool='fuchsia.tests',
test_in_shards=test_in_shards,
requires_secrets=requires_secrets,
swarming_expiration_timeout_secs=10 * 60,
swarming_io_timeout_secs=5 * 60,
)
spec = Fuchsia(
checkout=Fuchsia.Checkout(
manifest='minimal',
project='integration',
remote='https://fuchsia.googlesource.com/manifest',
upload_results=bool(gcs_bucket),
use_snapshot=use_snapshot,
),
build=Fuchsia.Build(
variants=variants,
build_type=build_type,
run_tests=run_tests,
ninja_targets=ninja_targets,
sdk_subbuild=sdk_subbuild,
board='boards/x64.gni',
product='products/core.gni',
target='x64',
include_symbol_archive=False,
include_breakpad_symbols=False,
enforce_size_limits=enforce_size_limits,
upload_results=bool(gcs_bucket),
),
test=test_spec,
debug_symbol_gcs_bucket=debug_symbol_gcs_bucket,
gcs_bucket=gcs_bucket,
artifact_gcs_bucket='fuchsia-infra-artifacts',
)
return api.spec.spec_loaded_ok(
step_name='load spec.build_init', message=spec)
default_gitiles_refs_steps = api.gitiles.refs('refs', [
'refs/heads/master',
'deadbeef',
])
spec_remote = 'https://fuchsia.googlesource.com/integration'
properties = {
# We rely on the buildbucket test API using this same
# ID for ci_build_message and the builds returned by get().
'parent_id': str(api.buildbucket.ci_build_message().id),
'spec_remote': spec_remote,
}
#yapf: disable
yield (
api.checkout.test('default', tryjob=False) +
api.build.test('default') +
spec_data(gcs_bucket='fuchsia-infra', run_tests=True) +
api.properties(**properties)
)
yield (
api.checkout.test('non-numeric-parent-id', tryjob=False) +
api.build.test('default') +
spec_data(gcs_bucket='fuchsia-infra', run_tests=True) +
api.properties(parent_id='not-a-number')
)
yield (
api.checkout.test('default_cq', tryjob=True) +
api.build.test('default_cq', tryjob=True) +
spec_data(run_tests=True) +
api.buildbucket.try_build(
# Values chosen to match the test data in jiri/test_api.py
# example_source_manifest() so that we trigger the test multipliers
# code path.
project='manifest',
git_repo='https://fuchsia.googlesource.com/manifest') +
api.build_input_resolver.set_gerrit_branch('master') +
default_gitiles_refs_steps +
api.properties(**properties)
)
yield (
api.checkout.test('spec_remote_cq', tryjob=True) +
api.build.test('spec_remote_cq', tryjob=True) +
spec_data(run_tests=True) +
api.buildbucket.try_build(git_repo=spec_remote) +
api.build_input_resolver.set_gerrit_branch('master') +
default_gitiles_refs_steps +
api.properties(**properties)
)
yield (
api.checkout.test('spec_remote_cq_with_spec_revision', tryjob=True) +
api.build.test('spec_remote_cq', tryjob=True) +
spec_data(run_tests=True) +
api.buildbucket.try_build(git_repo=spec_remote) +
api.build_input_resolver.set_gerrit_branch('master') +
default_gitiles_refs_steps +
api.properties(spec_revision='deadbeef2', **properties)
)
yield (
api.checkout.test('build_type_release_not_run_tests', tryjob=False) +
api.build.test('default') +
spec_data(build_type='release', gcs_bucket='fuchsia-infra',
run_tests=False, enforce_size_limits=True) +
api.properties(**properties)
)
yield (api.checkout.test('spec_parse_error', tryjob=False, status='failure') +
api.spec.spec_parse_error(step_name='load spec.build_init'))
# yapf: enable
yield (api.checkout.test('sdk', tryjob=False) +
api.build.test('sdk', tryjob=False) + spec_data(
run_tests=False,
ninja_targets=[
'sdk/archive/core.tar.gz', 'sdk/archive/fuchsia_dart.tar.gz'
],
sdk_subbuild=True) + api.properties(parent_id='sdk-id'))
yield (api.checkout.test('not_test_in_shards') + api.build.test('') +
spec_data(run_tests=True, test_in_shards=False) +
api.properties(**properties))