blob: df9c4a1ea4ed497eb030c2e40decef847b150976 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" fuchsia.py - Builds and optionally tests Fuchsia.
# Execution overview
## Configuration
This recipe uses a protocol buffer message called a spec for most of its
configuration. The only PROPERTIES are those required to acquire the spec.
The recipe fetches the spec from the git repo |spec_remote|. It determines
the correct revision to use from the BuildBucket build input to ensure it
retrieves the correct config for a pending change vs a committed change.
## Checkout + Build
This recipe triggers a child build which runs the fuchsia/build recipe.
That recipe checks out the source code and builds it. This recipe
retrieves the data required to orchestrate tests via Isolate.
## Test
If configured to run tests, this recipe uses the test orchestration data to run tests.
That logic is in the testing recipe module. Under the hood, that module
triggers Swarming tasks that do the actual testing, waits for them, and
reports the results.
"""
from google.protobuf import json_format
from google.protobuf import text_format
from recipe_engine.recipe_api import Property
from PB.go.chromium.org.luci.buildbucket.proto import build as build_pb2
from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2
from PB.go.chromium.org.luci.buildbucket.proto import rpc as rpc_pb2
from PB.infra.fuchsia import Fuchsia
from PB.recipe_modules.recipe_engine.led.properties import InputProperties as LedInputProperties
DEPS = [
'fuchsia/artifacts',
'fuchsia/build',
'fuchsia/build_input_resolver',
'fuchsia/buildbucket_util',
'fuchsia/checkout',
'fuchsia/fuchsia',
'fuchsia/gitiles',
'fuchsia/spec',
'fuchsia/testing',
'fuchsia/testing_requests',
'fuchsia/testsharder',
'recipe_engine/buildbucket',
'recipe_engine/file',
'recipe_engine/isolated',
'recipe_engine/json',
'recipe_engine/led',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/step',
'recipe_engine/swarming',
]
PROPERTIES = {
'spec_remote':
Property(
kind=str,
help='URL of the specs git repository',
default='http://fuchsia.googlesource.com/integration'),
# TODO(ihuh): Remove once the experimental feature is working.
'experimental':
Property(
kind=bool,
help='Whether to use experimental codepath',
default=False),
}
def RunSteps(api, spec_remote, experimental):
# Resolve the build input to always contain a Gitiles commit.
bb_build = api.buildbucket.build
api.build_input_resolver.resolve(
bb_build.input,
default_project_url='https://fuchsia.googlesource.com/fuchsia')
with api.step.nest('load spec') as presentation:
presentation.step_text = 'loading spec'
try:
spec, spec_revision = api.spec.get_spec_revision_from_build(
build=bb_build,
spec_remote=spec_remote,
Type=Fuchsia,
)
except api.spec.ParseError as e:
raise api.step.StepFailure('failed to parse spec: %s' % str(e))
presentation.logs['textproto'] = text_format.MessageToString(spec).split(
'\n')
# The artifacts tool relies on this output property.
# This design has been fragile. Please don't add more dependencies on any
# additional output properties.
presentation.properties['gcs_bucket'] = spec.gcs_bucket
orchestrator_id = api.buildbucket_util.id
if not spec.build.run_tests:
raise api.step.InfraFailure(
'if not running tests, use the fuchsia/build recipe directly')
with api.step.nest('build') as presentation:
child_build = run_build_steps(api, presentation, spec_remote, spec_revision,
orchestrator_id, experimental)
child_props = json_format.MessageToDict(child_build.output.properties)
orchestration_inputs = collect_test_orchestration_inputs(api, child_props)
# Copy to our own properties so the results uploader in google3 can find
# it without knowing about the child.
rev_count_prop = api.checkout.REVISION_COUNT_PROPERTY
if rev_count_prop in child_props:
presentation.properties[rev_count_prop] = child_props[rev_count_prop]
# Configure context of uploaded artifacts for test task construction.
api.artifacts.gcs_bucket = spec.artifact_gcs_bucket
api.artifacts.uuid = orchestrator_id
run_test_steps(api, orchestration_inputs, spec)
def _BuildWithLed(api, builder_name, properties, presentation):
parent = api.buildbucket.build.builder
led_data = api.led(
'get-builder',
'luci.%s.%s:%s' % (parent.project, parent.bucket, builder_name))
edit_args = []
for k, v in properties.items():
edit_args.extend(['-p', '%s=%s' % (k, api.json.dumps(v))])
led_data = led_data.then('edit', *edit_args)
bb_input = api.buildbucket.build_input
if bb_input.gerrit_changes:
gerrit_change = bb_input.gerrit_changes[0]
led_data = led_data.then(
'edit-cr-cl', 'https://%s/c/%s/+/%d' %
(gerrit_change.host, gerrit_change.project, gerrit_change.change))
led_data = api.led.inject_input_recipes(led_data)
launch_res = led_data.then('launch')
task_id = launch_res.result['swarming']['task_id']
swarming_result = api.swarming.collect(
'collect', [task_id], output_dir=api.path['cleanup'])[0]
# Led launch ensures this file is present in the task root dir.
build_proto_path = swarming_result.output_dir.join('build.proto.json')
build_proto_json = api.file.read_text('read build.proto.json',
build_proto_path)
build_proto = build_pb2.Build()
presentation.logs['build.proto.json'] = build_proto_json.splitlines()
try:
json_format.Parse(build_proto_json, build_proto)
except json_format.ParseError as e:
build_proto.status = common_pb2.INFRA_FAILURE
build_proto.summary_markdown = 'Failed to parse build.proto.json: %s' % e
url = 'https://ci.chromium.org/swarming/task/%s?server=%s' % (
task_id, launch_res.result['swarming']['host_name'])
return build_proto, url
def _BuildWithBuildBucket(api, builder_name, properties):
req = api.buildbucket.schedule_request(
builder=builder_name,
properties=properties,
swarming_parent_run_id=api.swarming.task_id,
priority=None, # Leave unset to avoid overriding priority from configs.
)
build_id = api.buildbucket.schedule([req], step_name='schedule')[0].id
# As of 2019-11-18, timeout defaults to something too short.
# We never want this step to time out. We'd rather the whole build time out.
builds_dict = api.buildbucket.collect_builds([build_id],
timeout=24 * 60 * 60,
step_name='collect')
return builds_dict[build_id], 'https://ci.chromium.org/b/%s' % build_id
def run_build_steps(api, presentation, spec_remote, spec_revision,
orchestrator_id, experimental):
if experimental:
parent_properties = api.properties.thaw()
# These are reserved by kitchen and swarming. See
# https://chromium.googlesource.com/infra/infra/+/2c2389a00fcdb93d90a628f941814f2abd34428e/go/src/infra/tools/kitchen/cook.go#266
# and https://chromium.googlesource.com/infra/infra/+/7fcd559afa7a866a5ad039019e6ef6a91922e09c/appengine/cr-buildbucket/validation.py#36.
# We also should not override the 'recipe' of the child builder.
reject_keys = {
'$recipe_engine/path', '$recipe_engine/step', 'bot_id', 'path_config',
'buildbucket', '$recipe_engine/buildbucket', 'buildername', 'branch',
'repository', '$recipe_engine/runtime', 'recipe'
}
properties = {
key: val
for key, val in parent_properties.items()
if key and key not in reject_keys
}
properties.update({
'spec_revision': spec_revision,
'parent_id': orchestrator_id,
})
else:
properties = {
'spec_remote': spec_remote,
'spec_revision': spec_revision,
'parent_id': orchestrator_id,
}
builder_name = '{}-subbuild'.format(api.buildbucket.build.builder.builder)
# If this task was launched by led, we launch the child with led as well.
# This lets us ensure that the parent and child use the same version of
# the recipes code, which is needed for testing.
if api.led.launched_by_led:
output_build, build_url = _BuildWithLed(api, builder_name, properties,
presentation)
else:
output_build, build_url = _BuildWithBuildBucket(api, builder_name,
properties)
presentation.links[builder_name] = build_url
if output_build.status != common_pb2.SUCCESS:
raise api.step.StepFailure('build failed')
return output_build
def run_test_steps(api, orchestration_inputs, spec):
tryjob = api.buildbucket_util.is_tryjob
# Handle illegal settings around secrets.
if spec.test.requires_secrets:
if tryjob or spec.test.device_type != 'QEMU':
raise api.step.InfraFailure(
'the secrets pipeline is only supported in CI and on QEMU')
if spec.test.test_in_shards:
all_results = api.testing.test_in_shards(
collect_timeout_secs=spec.test.collect_timeout_secs,
debug_symbol_gcs_bucket=spec.debug_symbol_gcs_bucket,
orchestration_inputs=orchestration_inputs,
max_attempts=spec.test.max_attempts)
else:
all_results = [
api.testing.deprecated_test(
spec.debug_symbol_gcs_bucket,
spec.test.device_type,
orchestration_inputs,
max_attempts=spec.test.max_attempts)
]
# Upload test results
if spec.test.upload_results:
assert spec.gcs_bucket, 'gcs_bucket must be set if test.upload_results is'
for test_results in all_results:
test_results.upload_results(
gcs_bucket=spec.gcs_bucket,
upload_to_catapult=(not tryjob and spec.test.upload_to_catapult),
)
# TODO(41752): Support processing test coverage.
# Raise test failures
with api.step.defer_results():
api.testing.raise_failures()
for test_results in all_results:
test_results.raise_failures()
def collect_test_orchestration_inputs(api, build_props):
"""Downloads isolated orchestration inputs from a build.
Args:
build_props (dict): The properties of the build that produced the test
orchestration inputs.
Returns:
FuchsiaBuildApi.TestOrchestrationInputs
Raises:
A StepFailure if the required HASH_PROPERTY is not found.
"""
prop_name = api.build.TestOrchestrationInputs.HASH_PROPERTY
orchestration_inputs_hash = build_props.get(prop_name)
if not orchestration_inputs_hash:
raise api.step.StepFailure('no `%s` property found' % prop_name)
return api.build.TestOrchestrationInputs.download(api,
orchestration_inputs_hash)
def GenTests(api):
def ci_build_message(api, output_props=None, **kwargs):
"""Generates a Buildbucket Build message.
Args:
output_props (Dict): output properties to set on the build.
kwargs: Forwarded to BuildbucketApi.ci_build_message.
See BuildBucketTestApi.ci_build_message for full parameter documentation.
"""
msg = api.buildbucket.ci_build_message(**kwargs)
msg.output.properties.update(output_props if output_props else {})
return msg
def child_build_steps(api, build):
"""Generates step data to schedule and collect from a child build
Args:
build (build_pb2.Build): The build to schedule and collect from.
"""
mock_schedule_data = api.buildbucket.simulated_schedule_output(
step_name='build.schedule',
batch_response=rpc_pb2.BatchResponse(
responses=[dict(schedule_build=dict(id=build.id))],),
)
mock_collect_data = api.buildbucket.simulated_collect_output(
step_name='build.collect',
builds=[build],
)
return mock_schedule_data + mock_collect_data
def child_led_steps(api, build, tryjob=False):
"""Generates step data to schedule and collect from a child build
Args:
build (build_pb2.Build): The build to schedule and collect from.
"""
props = {
'gcs_bucket': api.fuchsia.DEFAULT_GCS_BUCKET,
'parent_id': build.id
}
change_prop = {
'$recipe_engine/buildbucket': {
'build': {
'input': {
'gerritChanges': {
'change': '12345',
'host': 'fuchsia-review.googlesource.com',
'project': 'fuchsia'
}
}
}
}
}
led_data = (
api.led.get_builder(api, 'build.led get-builder')
.edit_properties('build.led edit', **props)
) # yapf: disable
if tryjob:
led_data.edit_properties('build.led edit-cr-cl', **change_prop)
led_data.edit_input_recipes(
'build.led edit (2)',
isolated_hash='new hash').launch('build.led launch')
return led_data.step_data + api.step_data(
'build.read build.proto.json',
stdout=api.json.output(json_format.MessageToDict(build)))
def download_step_data():
task_request_jsonish = api.testing.task_request_jsonish(legacy_qemu=True)
return api.testing_requests.shards_step_data(
step_name='build.download test orchestration inputs.load test shards',
shards=[
api.testsharder.shard(
name='QEMU',
tests=api.testing_requests.default_tests(),
dimensions=dict(device_type='QEMU'),
),
]) + api.testing.task_requests_step_data(
[task_request_jsonish],
'build.download test orchestration inputs.load task requests',
)
def test_step_data(test_in_shards=True):
if test_in_shards:
return download_step_data() + (
api.testing.task_retry_step_data(
[
api.swarming.task_result(
id='610',
name='QEMU',
outputs=['out.tar'],
),
]) +
api.testing.test_step_data(shard_name='QEMU')) # yapf: disable
return download_step_data() + (
api.testing.task_step_data([
api.swarming.task_result(
id='610',
name='QEMU',
outputs=['out.tar'],
),
]) +
api.testing.test_step_data()) # yapf: disable
def spec_data(use_snapshot=False,
variants=(),
device_type='QEMU',
run_tests=True,
test_in_shards=True,
requires_secrets=False,
gcs_bucket=None,
debug_symbol_gcs_bucket='debug-symbols'):
test_spec = None
if run_tests:
test_spec = Fuchsia.Test(
device_type=device_type,
max_shard_size=0,
timeout_secs=30 * 60,
pool='fuchsia.tests',
test_in_shards=test_in_shards,
requires_secrets=requires_secrets,
swarming_expiration_timeout_secs=10 * 60,
swarming_io_timeout_secs=5 * 60,
upload_results=bool(gcs_bucket),
use_runtests=True,
)
return api.spec.spec_loaded_ok(
step_name='load spec.build_init',
message=Fuchsia(
checkout=Fuchsia.Checkout(
manifest='manifest',
remote='remote',
upload_results=bool(gcs_bucket),
use_snapshot=use_snapshot,
),
build=Fuchsia.Build(
variants=variants,
build_type='debug',
run_tests=run_tests,
board='boards/x64.gni',
product='products/core.gni',
target='x64',
include_symbol_archive=False,
include_breakpad_symbols=False,
upload_results=bool(gcs_bucket),
),
test=test_spec,
debug_symbol_gcs_bucket=debug_symbol_gcs_bucket,
gcs_bucket=gcs_bucket,
artifact_gcs_bucket=gcs_bucket,
),
)
ci_build = api.buildbucket.ci_build(
project='fuchsia', git_repo='https://fuchsia.googlesource.com/fuchsia')
yield api.fuchsia.test(
'successful_build_and_test',
clear_default_steps=True,
properties={'experimental': True},
steps=[
child_build_steps(
api=api,
build=ci_build_message(
api=api,
output_props={
'integration revision count': 1,
'test_orchestration_inputs_hash': 'abc',
},
status='SUCCESS',
),
),
api.override_step_data(
'launch/collect.0.collect',
api.swarming.collect([
api.swarming.task_result(
id='610',
name='QEMU',
outputs=['out.tar'],
),
])),
]) + spec_data(gcs_bucket='gcs-bucket') + test_step_data()
yield api.fuchsia.test(
'successful_build_and_test_not_in_shards',
clear_default_steps=True,
steps=[
child_build_steps(
api=api,
build=ci_build_message(
api=api,
output_props={
'integration revision count': 1,
'test_orchestration_inputs_hash': 'abc',
},
status='SUCCESS',
),
),
api.override_step_data(
'run tests.attempt 0.collect',
api.swarming.collect([
api.swarming.task_result(
id='610',
name='QEMU',
outputs=['out.tar'],
),
])),
]) + spec_data(
gcs_bucket='gcs-bucket',
test_in_shards=False) + test_step_data(test_in_shards=False)
yield api.fuchsia.test(
'build_only_failed',
status='infra_failure',
clear_default_steps=True,
) + spec_data(run_tests=False)
yield api.fuchsia.test(
'build_with_led',
status='failure',
clear_default_steps=True,
properties={
'$recipe_engine/led':
LedInputProperties(
led_run_id='led/user_example.com/abc123',
isolated_input=LedInputProperties.IsolatedInput(
hash='abc123',
namespace='default-gzip',
server='isolateserver.appspot.com')),
},
steps=[
child_led_steps(
api=api,
build=ci_build_message(
api=api,
output_props={'test_orchestration_inputs_hash': 'abc'},
status='FAILURE',
),
),
]) + spec_data()
yield api.fuchsia.test(
'build_with_led_tryjob',
status='failure',
clear_default_steps=True,
properties={
'$recipe_engine/led':
LedInputProperties(
led_run_id='led/user_example.com/abc123',
isolated_input=LedInputProperties.IsolatedInput(
hash='abc123',
namespace='default-gzip',
server='isolateserver.appspot.com')),
},
tryjob=True,
steps=[
child_led_steps(
api=api,
tryjob=True,
build=ci_build_message(
api=api,
output_props={'test_orchestration_inputs_hash': 'abc'},
status='FAILURE',
),
),
]) + api.build_input_resolver.set_gerrit_branch() + api.gitiles.refs(
'refs', [
'refs/heads/master',
'deadbeef',
]) + spec_data()
yield api.fuchsia.test(
'build_passed_but_hash_is_missing',
status='failure',
clear_default_steps=True,
steps=[
child_build_steps(
api=api,
build=ci_build_message(
api=api,
status='SUCCESS',
),
)
]) + spec_data()
yield api.fuchsia.test(
'requires_secrets_illegally',
status='infra_failure',
clear_default_steps=True,
steps=[
child_build_steps(
api=api,
build=ci_build_message(
api=api,
status='SUCCESS',
output_props={'test_orchestration_inputs_hash': 'abc'},
),
),
]) + download_step_data() + spec_data(
device_type='Not QEMU', requires_secrets=True)
yield (api.test('spec_parse_error') + ci_build +
api.spec.spec_parse_error(step_name='load spec.build_init'))