blob: 8d3e65f78ba449b5d807de3f949815014518b4d1 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia and isolating build artifacts."""
from google.protobuf import text_format
from PB.infra.fuchsia import Fuchsia
from recipe_engine.recipe_api import Property
TEST_MULTIPLIER_KEY = 'MULTIPLY'
# These represent the location of the isolated hash in the output of this
# recipe when building SDK archives. Must be kept in sync with sdk.py.
ISOLATE_STEP_NAME = 'isolate artifacts'
ISOLATED_OUTPUT_KEY = 'isolated_output_hash'
DEPS = [
'fuchsia/artifacts',
'fuchsia/build',
'fuchsia/buildbucket_util',
'fuchsia/build_input_resolver',
'fuchsia/checkout',
'fuchsia/fuchsia',
'fuchsia/git',
'fuchsia/gitiles',
'fuchsia/jiri',
'fuchsia/jsonutil',
'fuchsia/spec',
'fuchsia/status_check',
'fuchsia/testing_requests',
'fuchsia/testsharder',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/isolated',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
]
PROPERTIES = {
'parent_id':
Property(
# This is a string because led IDs are not integers.
kind=str,
help='Parent build\'s buildbucket or led id',
default=None),
'spec_remote':
Property(
kind=str,
help='URL of the specs git repository',
default='https://fuchsia.googlesource.com/integration'),
'spec_revision':
Property(
kind=str,
help='The revision of spec_remote to fetch',
default='HEAD'),
}
def RunSteps(api, parent_id, spec_remote, spec_revision):
spec, spec_revision = api.fuchsia.setup_with_spec(spec_remote, spec_revision)
bb_input = api.buildbucket.build.input
with api.step.nest('got_revision') as presentation:
# Read by the binary-size Gerrit plugin
presentation.properties['got_revision'] = bb_input.gitiles_commit.id
# TODO(fxb/39958): Retrying all failed builds in Gerrit launches subbuilds as
# well, which fail because they're not triggered by a parent build. Once
# that's resolved we can get rid of this check.
if spec.build.run_tests and not parent_id:
raise api.python.infra_failing_step(
'no parent_id', 'subbuilds can only be triggered by parent builds')
checkout = api.checkout.from_spec(spec.checkout)
# Load test multipliers before building so we can fail fast if the JSON is
# malformed, which avoids wasting a build.
multipliers_path = None
if bb_input.gerrit_changes and spec.test.test_in_shards:
multipliers_path = api.path.mkstemp()
with api.step.nest('test multipliers'):
write_multipliers(api, multipliers_path, bb_input, checkout)
if spec.checkout.upload_results:
assert spec.gcs_bucket, (
'gcs_bucket must be set if checkout.upload_results is')
checkout.upload_results(spec.gcs_bucket, namespace=parent_id)
# We can only derive repo from buildbucket when triggered in CI or CQ;
# validate the checkout then.
if bb_input.gerrit_changes or bb_input.gitiles_commit.project:
if bb_input.gerrit_changes:
project = bb_input.gerrit_changes[0].project
else:
project = bb_input.gitiles_commit.project
with api.step.nest('validate checkout'), api.context(cwd=checkout.root_dir):
repo_path = api.jiri.project(projects=[project]).json.output[0]['path']
api.python(
'validate FIDL namespaces',
checkout.root_dir.join('scripts', 'style',
'verify-fidl-libraries.py'),
args=['--repo', repo_path],
)
collect_build_metrics = False
if spec.build.upload_results:
assert spec.gcs_bucket, (
'gcs_bucket must be set if checkout.upload_results is')
collect_build_metrics = True
# Build infratools.
spec.build.universe_packages.append('//bundles:infratools')
spec.build.ninja_targets.append('bundles:infratools')
# If SDK subbuild, set SDK ID to parent ID.
sdk_id = parent_id if spec.build.sdk_subbuild else None
build = api.build.from_spec(
spec.build,
checkout,
collect_build_metrics,
pave=spec.test.pave,
sdk_id=sdk_id,
gcs_bucket=spec.gcs_bucket)
check_sizes_result = api.step(
'check sizes',
[
build.tool('size_checker'), '--build-dir', build.fuchsia_build_dir,
'--sizes-json-out',
api.json.output()
],
step_test_data=lambda: api.json.test_api.output({'some-file': 123}),
# If the size checks fail, we still want to write the output to
# the output property below.
ok_ret='any')
# This property is read by the binary-size Gerrit plugin.
if check_sizes_result.json.output:
check_sizes_result.presentation.properties[
'binary_sizes'] = check_sizes_result.json.output
if check_sizes_result.exc_result.retcode:
raise api.step.StepFailure('size checks failed')
# In SDK subbuild mode, isolate SDK archive and ninja targets.
if spec.build.sdk_subbuild:
sdk_archive_path = build.fuchsia_build_dir.join('sdk', 'archive')
isolated = api.isolated.isolated(sdk_archive_path)
sdk_ninja_targets = [
target for target in spec.build.ninja_targets
if target.startswith('sdk/archive')
]
for ninja_target in sdk_ninja_targets:
isolated.add_file(
path=build.fuchsia_build_dir.join(*ninja_target.split('/')))
sdk_archive_isolated_hash = isolated.archive(ISOLATE_STEP_NAME)
api.step.active_result.presentation.properties[
ISOLATED_OUTPUT_KEY] = sdk_archive_isolated_hash
# TODO(garymm): assert spec.gcs_bucket set if upload_results set.
if spec.gcs_bucket and spec.build.upload_results:
build.upload_results(
gcs_bucket=spec.gcs_bucket,
is_release_version=spec.checkout.is_release_version,
namespace=parent_id,
)
if spec.build.enforce_size_limits:
build.check_filesystem_sizes()
# Must be set before testing_requests.task_requests() is called.
api.artifacts.gcs_bucket = spec.artifact_gcs_bucket
api.artifacts.uuid = parent_id or api.buildbucket_util.id
if spec.build.run_tests:
if parent_id.isdigit():
# Use parent build so that testing task requests refer to
# that build, which actually orchestrates testing.
buildbucket_build = api.buildbucket.get(int(parent_id))
# If it's a try build, the parent build will not have its gitiles_commit
# populated (it's populated at runtime by `build_input_resolver`, but
# that doesn't change the input values stored in Buildbucket). So we need
# to populate it with the same commit that `build_input_resolver`
# resolved for the subbuild.
buildbucket_build.input.gitiles_commit.CopyFrom(bb_input.gitiles_commit)
else:
# When the parent was launched by led, it's not possible to retrieve
# the parent build, so we fall back to using our own build.
# This is technically incorrect and any tests that rely on having
# correct buildbucket metadata may fail when run via led. Ideally
# we wouldn't have any tests that knew about buildbucket, but
# for now this is OK since none of those tests run in recipes CQ,
# which uses led to test recipes changes.
buildbucket_build = api.buildbucket.build
if spec.test.test_in_shards:
shards = api.testsharder.execute(
'create test shards',
testsharder_path=build.tool('testsharder'),
build_dir=build.fuchsia_build_dir,
max_shard_size=spec.test.max_shard_size,
target_duration_secs=spec.test.target_shard_duration_secs,
max_shards_per_env=spec.test.max_shards_per_env,
multipliers=multipliers_path,
tags=spec.build.environment_tags,
)
task_requests = api.testing_requests.task_requests(
build,
buildbucket_build,
spec.test.per_test_timeout_secs,
spec.test.pool,
shards,
spec.test.swarming_expiration_timeout_secs,
spec.test.swarming_io_timeout_secs,
spec.test.use_runtests,
spec.test.timeout_secs,
default_service_account=spec.test.default_service_account,
pave=spec.test.pave,
targets_serial=spec.test.targets_serial)
else:
task_requests = api.testing_requests.deprecated_task_requests(
build,
api.testing_requests.deprecated_test_cmds(spec.test),
spec.test.device_type,
spec.test.pool,
spec.test.timeout_secs,
spec.test.pave,
swarming_expiration_timeout_secs=spec.test
.swarming_expiration_timeout_secs,
swarming_io_timeout_secs=spec.test.swarming_io_timeout_secs,
default_service_account=spec.test.default_service_account,
)
orchestration_inputs = api.build.TestOrchestrationInputs.from_build_results(
build, task_requests)
orchestration_inputs_hash = orchestration_inputs.isolate(api)
step_result = api.step('logging orchestration_inputs_hash', cmd=None)
step_result.presentation.properties[
orchestration_inputs.HASH_PROPERTY] = orchestration_inputs_hash
# Must be done after testing_requests.task_requests() is called, because that
# modifies the filesystem images. TODO(garymm,joshuaseaton): once legacy_qemu
# code paths are removed, remove this comment as it will become false.
if spec.artifact_gcs_bucket:
api.artifacts.upload('upload artifacts', build)
def write_multipliers(api, multipliers_path, bb_input, checkout):
# Get project dir for gerrit change from source manifest
gerrit_change = bb_input.gerrit_changes[0]
project_dir = checkout.root_dir
repo_url = 'https://%s/%s' % (gerrit_change.host.replace(
'-review', ''), gerrit_change.project)
dirs = checkout.source_manifest['directories']
for d in dirs:
if dirs[d]['git_checkout']['repo_url'] == repo_url:
if d != '.':
project_dir = checkout.root_dir.join(d)
break
with api.context(cwd=project_dir):
commit_msg = api.git.get_commit_message(name='get commit msg')
multipliers = api.jsonutil.extract_from_text(
'extract', text=commit_msg, key=TEST_MULTIPLIER_KEY, default_contents=[])
api.file.write_json('write', multipliers_path, multipliers, indent=2)
def GenTests(api):
def spec_data(use_snapshot=False,
build_type='debug',
ninja_targets=(),
sdk_subbuild=False,
variants=(),
device_type='QEMU',
enforce_size_limits=False,
run_tests=True,
test_in_shards=True,
gcs_bucket=None,
pave=True):
test_spec = None
if run_tests:
test_spec = Fuchsia.Test(
device_type=device_type,
max_shard_size=0,
target_shard_duration_secs=10 * 60,
max_shards_per_env=8,
timeout_secs=30 * 60,
pool='fuchsia.tests',
test_in_shards=test_in_shards,
swarming_expiration_timeout_secs=10 * 60,
swarming_io_timeout_secs=5 * 60,
default_service_account='service_account',
targets_serial=True,
pave=pave,
)
spec = Fuchsia(
checkout=Fuchsia.Checkout(
manifest='minimal',
project='integration',
remote='https://fuchsia.googlesource.com/manifest',
upload_results=bool(gcs_bucket),
use_snapshot=use_snapshot,
),
build=Fuchsia.Build(
variants=variants,
build_type=build_type,
run_tests=run_tests,
ninja_targets=ninja_targets,
sdk_subbuild=sdk_subbuild,
board='boards/x64.gni',
product='products/core.gni',
target='x64',
include_breakpad_symbols=False,
enforce_size_limits=enforce_size_limits,
upload_results=bool(gcs_bucket),
),
test=test_spec,
gcs_bucket=gcs_bucket,
artifact_gcs_bucket='fuchsia-infra-artifacts',
)
return api.spec.spec_loaded_ok(
step_name='load spec.build_init', message=spec)
default_gitiles_refs_steps = api.gitiles.refs('refs', [
'refs/heads/master',
'deadbeef',
])
spec_remote = 'https://fuchsia.googlesource.com/integration'
properties = {
# We rely on the buildbucket test API using this same
# ID for ci_build_message and the builds returned by get().
'parent_id': str(api.buildbucket.ci_build_message().id),
'spec_remote': spec_remote,
}
#yapf: disable
yield (
api.checkout.test('default', tryjob=False) +
api.build.test('default') +
spec_data(gcs_bucket='fuchsia-infra', run_tests=True) +
api.properties(**properties)
)
yield (
api.checkout.test('non_numeric_parent_id', tryjob=False) +
api.build.test('default') +
spec_data(gcs_bucket='fuchsia-infra', run_tests=True) +
api.properties(parent_id='not-a-number')
)
yield (
api.status_check.test('subbuild_no_parent_id', status='infra_failure') +
spec_data(run_tests=True) +
api.buildbucket.try_build() +
api.build_input_resolver.set_gerrit_branch('master') +
default_gitiles_refs_steps +
api.properties(parent_id='')
)
yield (
api.checkout.test('default_cq', tryjob=True) +
api.build.test('default_cq', tryjob=True) +
spec_data(run_tests=True) +
api.buildbucket.try_build(
# Values chosen to match the test data in jiri/test_api.py
# example_source_manifest() so that we trigger the test multipliers
# code path.
project='manifest',
git_repo='https://fuchsia.googlesource.com/manifest') +
api.build_input_resolver.set_gerrit_branch('master') +
default_gitiles_refs_steps +
api.properties(**properties)
)
yield (
api.checkout.test('build_type_release_not_run_tests', tryjob=False) +
api.build.test('default', create_shards=False) +
spec_data(build_type='release', gcs_bucket='fuchsia-infra',
run_tests=False, enforce_size_limits=True) +
api.properties(**properties)
)
# yapf: enable
yield (api.checkout.test('sdk', tryjob=False) +
api.build.test('sdk', tryjob=False, create_shards=False) + spec_data(
run_tests=False,
ninja_targets=[
'sdk/archive/core.tar.gz', 'sdk/archive/fuchsia_dart.tar.gz'
],
sdk_subbuild=True) + api.properties(parent_id='sdk-id'))
yield (api.checkout.test('not_test_in_shards') +
api.build.test('', create_shards=False) +
spec_data(run_tests=True, test_in_shards=False, pave=True) +
api.properties(**properties))
yield (
api.checkout.test('check_sizes_fails', tryjob=False, status='failure') +
api.build.test(
'check_sizes_fails', create_shards=False, status='failure') +
spec_data(gcs_bucket='fuchsia-infra') + api.properties(**properties) +
api.override_step_data('check sizes', retcode=1))