blob: 9177d846bc9e6ff5bb58b775c4cfcd21db848132 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from PB.infra.fuchsia import Fuchsia
from recipe_engine.recipe_api import Property
DEPS = [
'fuchsia/artifacts',
'fuchsia/build',
'fuchsia/buildbucket_util',
'fuchsia/checkout',
'fuchsia/experimental',
'fuchsia/fuchsia',
'fuchsia/testing',
'fuchsia/testing_requests',
'fuchsia/testsharder',
'recipe_engine/buildbucket',
'recipe_engine/isolated',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/swarming',
]
PROPERTIES = {
'gcs_bucket':
Property(
kind=str,
help='GCS bucket for uploading checkout, build, and test results',
default='fuchsia_infra'),
'build_artifact_hash':
Property(
kind=str,
help='Hash for which build artifact may be downloaded',
default='abc'),
'device_type':
Property(
kind=str,
help='Passed through to spec field Fuchsia.Test.device_type',
default='QEMU'),
'pave':
Property(
kind=bool,
help='Passed through to spec field Fuchsia.Test.pave',
default=True),
'requires_secrets':
Property(
kind=bool,
help='Passed through to spec field Fuchsia.Test.requires_secrets',
default=True),
'test_in_shards':
Property(
kind=bool,
help='Passed through to spec field Fuchsia.Test.test_in_shards',
default=False),
'upload_to_catapult':
Property(
kind=bool,
help='Passed through to spec field Fuchsia.Test.upload_to_catapult',
default=False),
'collect_timeout_secs':
Property(
kind=int,
help=('Passed through to spec field '
'Fuchsia.Test.collect_timeout_secs'),
default=0),
'debug_symbol_gcs_bucket':
Property(
kind=str,
help='Passed through to spec field Fuchsia.debug_symbol_gcs_bucket',
default='debug-symbols'),
'test_async':
Property(
kind=bool,
help='Whether to call the deprecated_test_async method',
default=False),
'per_test_timeout_secs':
Property(
kind=int,
help='Passed through to spec field Fuchsia.Test.per_test_timeout_secs',
default=0),
'use_runtests':
Property(kind=bool, help='Whether to use runtests', default=False),
}
def RunSteps(api, gcs_bucket, build_artifact_hash, device_type, pave,
requires_secrets, test_in_shards, upload_to_catapult,
collect_timeout_secs, debug_symbol_gcs_bucket, test_async,
per_test_timeout_secs, use_runtests):
upload_results = bool(gcs_bucket)
build_artifacts = api.build.BuildArtifacts.download(api, build_artifact_hash)
build_artifacts.isolate(api)
# Configure context of uploaded artifacts for test task construction.
api.artifacts.gcs_bucket = 'fuchsia-artifacts'
api.artifacts.uuid = api.buildbucket_util.id
test_spec = Fuchsia.Test(
device_type=device_type,
pave=pave,
pool='fuchsia.tests',
requires_secrets=requires_secrets,
test_in_shards=test_in_shards,
upload_to_catapult=upload_to_catapult,
collect_timeout_secs=collect_timeout_secs,
per_test_timeout_secs=per_test_timeout_secs,
use_runtests=use_runtests,
)
spec = Fuchsia(
test=test_spec,
debug_symbol_gcs_bucket=debug_symbol_gcs_bucket,
)
if test_in_shards:
shard_requests = api.testing_requests.shard_requests(
build_artifacts,
api.buildbucket.build,
spec.test.per_test_timeout_secs,
spec.test.pool,
spec.test.swarming_expiration_timeout_secs,
spec.test.swarming_io_timeout_secs,
spec.test.use_runtests,
timeout_secs=spec.test.timeout_secs)
orchestration_inputs = api.build.TestOrchestrationInputs(
build_artifacts.llvm_symbolizer, build_artifacts.minfs,
build_artifacts.symbolize_tool, shard_requests,
build_artifacts.tests_file)
all_results = api.testing.test_in_shards(
collect_timeout_secs=spec.test.collect_timeout_secs,
debug_symbol_gcs_bucket=spec.debug_symbol_gcs_bucket,
orchestration_inputs=orchestration_inputs,
max_attempts=spec.test.max_attempts)
else:
shard_requests = api.testing_requests.deprecated_shard_requests(
build_artifacts,
api.testing_requests.deprecated_test_cmds(test_spec),
test_spec.device_type,
test_spec.pool,
test_spec.timeout_secs,
test_spec.pave,
requires_secrets=test_spec.requires_secrets,
swarming_expiration_timeout_secs=spec.test
.swarming_expiration_timeout_secs,
swarming_io_timeout_secs=spec.test.swarming_io_timeout_secs)
orchestration_inputs = api.build.TestOrchestrationInputs(
build_artifacts.llvm_symbolizer, build_artifacts.minfs,
build_artifacts.symbolize_tool, shard_requests,
build_artifacts.tests_file)
if test_async:
result = api.testing.deprecated_test_async(
debug_symbol_gcs_bucket,
device_type,
orchestration_inputs,
)()
else:
result = api.testing.deprecated_test(
debug_symbol_gcs_bucket,
device_type,
orchestration_inputs,
)
all_results = [result]
# Upload test results
if upload_results:
for test_results in all_results:
test_results.upload_results(
gcs_bucket=gcs_bucket,
upload_to_catapult=upload_to_catapult,
)
if 'profile' in build_artifacts.variants:
api.testing.process_coverage(
covargs_path=build_artifacts.covargs,
test_results=[
result for result in all_results if result.from_fuchsia
],
ids_txt=build_artifacts.ids,
llvm_profdata=build_artifacts.llvm_profdata,
llvm_cov=build_artifacts.llvm_cov,
gcs_bucket=gcs_bucket)
# Raise test failures
with api.step.defer_results():
api.testing.raise_failures()
for test_results in all_results:
test_results.raise_failures()
def GenTests(api):
# For coverage
api.testing.task_requests_step_data([api.testing.task_request_jsonish(False)],
'')
# Test cases for running Fuchsia tests as a swarming task.
yield api.testing.test(
'isolated_tests_no_json',
status='failure',
# Test a missing summary.json file. Clear the default steps and manage
# them manually to avoid providing the file, which is usually done by the
# auto-included test_step_data step.
clear_default_steps=True,
steps=[
api.testing.task_step_data([
api.swarming.task_result(
id='1', name='test', outputs=['out.tar']),
]),
api.testing_requests.args_test_data(),
])
yield api.testing.test(
'isolated_test_device_no_pave',
properties={
'device_type': 'Intel NUC Kit NUC6i3SYK',
'pave': False,
},
)
yield api.testing.test(
'isolated_tests_test_failure',
expect_failure=True, # Failure steps injected below.
steps=[
api.testing.task_step_data(
[
api.swarming.task_result(
id='1', name='test', outputs=['out.tar']),
],
iteration=0,
),
api.testing.task_step_data(
[
api.swarming.task_result(
id='2', name='test', outputs=['out.tar']),
],
iteration=1,
),
api.testing.tests_json_data(iteration=0),
api.testing.tests_json_data(iteration=1),
api.testing.test_step_data(failure=True, iteration=0),
api.testing.test_step_data(failure=True, iteration=1),
api.step_data('run tests.attempt 0.task results.symbolize logs',
api.raw_io.stream_output('bt1\nbt2\n')),
api.step_data('run tests.attempt 1.task results.symbolize logs',
api.raw_io.stream_output('bt1\nbt2\n')),
])
# TODO(garymm): Remove retries to simplify this test.
yield api.testing.test(
'isolated_tests_no_resource',
status='infra_failure',
expect_failure=True, # Failure step injected below.
steps=[
api.testing.task_step_data(
[
api.swarming.task_result(
id='1',
name='test',
state=api.swarming.TaskState.NO_RESOURCE,
),
],
iteration=0,
),
api.testing.task_step_data(
[
api.swarming.task_result(
id='2',
name='test',
state=api.swarming.TaskState.NO_RESOURCE,
),
],
iteration=1,
),
])
yield api.testing.test(
'isolated_tests_kernel_panic',
expect_failure=True, # Failure step injected below.
steps=[
api.testing.task_step_data(
[
api.swarming.task_result(
id='1',
name='test',
state=api.swarming.TaskState.TIMED_OUT,
output='KERNEL PANIC',
),
],
iteration=0,
),
api.testing.task_step_data(
[
api.swarming.task_result(
id='2',
name='test',
state=api.swarming.TaskState.TIMED_OUT,
output='KERNEL PANIC',
),
],
iteration=1,
),
],
)
# Test case for generating test coverage
yield api.testing.test(
'upload_test_coverage',
properties={
'gcs_bucket': 'fuchsia-build',
},
steps=[
api.testing_requests.args_test_data('x64', ['profile']),
])
# Test case for ASan.
yield api.testing.test(
'asan_tests',
steps=[
api.testing_requests.args_test_data('x64', ['asan']),
])
# Test cases for testing in shards.
# TODO(fxb/9784): during mass clean-up, move into into api.testing.test_api.
test_task_outputs = [
'syslog.txt',
'serial.txt',
'out.tar',
'benchmark.catapult_json',
]
def test_task_data(*shard_names, **kwargs): # pylint: disable=invalid-name
iteration = kwargs.pop('iteration', 0)
assert not kwargs
results = []
step_data = api.step_data(None)
for idx, name in enumerate(shard_names):
results.append(
api.swarming.task_result(
id=str(idx), name=name, outputs=test_task_outputs))
step_data += api.testing.test_step_data(
shard_name=name, qemu='EMU' in name)
step_data += api.testing.task_retry_step_data(results, iteration=iteration)
return step_data
# TODO(garymm): Remove retries to simplify this test.
yield api.testing.test(
'sharded_kernel_panic',
expect_failure=True, # Failure step injected below.
properties={'test_in_shards': True},
steps=[
api.testing_requests.shards_step_data(shards=[
api.testsharder.shard(
name='Vim2',
tests=[
api.testsharder.test(
name='test',
label='//path/to/test:test(//toolchain)',
os='linux',
path='/path/to/test',
)
],
dimensions=dict(device_type='Khadas Vim2 Max'),
),
]),
test_task_data('Vim2'),
api.testing.task_retry_step_data([
api.swarming.task_result(
id='1',
name='Vim2',
outputs=test_task_outputs,
output='KERNEL PANIC',
),
],
iteration=0),
api.testing.task_retry_step_data([
api.swarming.task_result(
id='2',
name='Vim2',
outputs=test_task_outputs,
output='KERNEL PANIC',
),
],
iteration=1),
],
)
# TODO(garymm): Remove retries to simplify this test.
yield api.testing.test(
'sharded_failure_string',
expect_failure=True, # Failure step injected below.
properties={'test_in_shards': True},
steps=[
api.testing_requests.shards_step_data(shards=[
api.testsharder.shard(
name='Vim2',
tests=[
api.testsharder.test(
name='test',
label='//path/to/test:test(//toolchain)',
os='linux',
path='/path/to/test',
)
],
dimensions=dict(device_type='Khadas Vim2 Max'),
),
]),
test_task_data('Vim2'),
api.testing.task_retry_step_data(
task_results=[
api.swarming.task_result(
id='1',
name='Vim2',
outputs=test_task_outputs,
),
],
iteration=0,
),
api.testing.task_retry_log_data(
iteration=0,
task_name='Vim2',
log_name='serial.txt',
log_contents='ASSERT FAILED'),
api.testing.task_retry_step_data(
task_results=[
api.swarming.task_result(
id='2',
name='Vim2',
outputs=test_task_outputs,
),
],
iteration=1,
),
api.testing.task_retry_log_data(
iteration=1,
task_name='Vim2',
log_name='serial.txt',
log_contents='DEVICE SUSPEND TIMED OUT'),
],
)
yield (api.testing.test(
'test_with_no_shards',
clear_default_steps=True,
properties={
'test_in_shards': True,
}))
yield (api.testing.test(
'test_with_shards_arm64_serial_failure',
status='failure',
clear_default_steps=True,
properties={
'test_in_shards': True,
},
steps=[
api.testing_requests.shards_step_data(shards=[
api.testsharder.shard(
name='Vim2',
tests=[
api.testsharder.test(
name='test',
label='//path/to/test:test(//toolchain)',
os='linux',
path='/path/to/test',
)
],
dimensions=dict(device_type='Khadas Vim2 Max'),
),
]),
test_task_data('Vim2'),
api.testing_requests.args_test_data('arm64'),
api.step_data('check log Vim2:serial.txt.read serial.txt',
api.raw_io.output_text('...DEVICE SUSPEND TIMED OUT\n'))
]))
# fuchsia-0000 passes the first time.
# fuchsia-0001 has tests that always fail.
# fuchsia-0002 always times out.
# fuchsia-0003 has tests that fail the first time but pass the second time.
yield api.testing.test(
'test_in_shards_mixed_failure',
status='failure',
clear_default_steps=True,
properties={
'test_in_shards': True,
# Here to get coverage for this path without adding another test.
'per_test_timeout_secs': 1,
},
steps=[
api.testing_requests.shards_step_data(shards=[
api.testsharder.shard(
name='fuchsia-0000',
tests=api.testing_requests.default_tests(),
dimensions=dict(device_type='QEMU'),
),
api.testsharder.shard(
name='fuchsia-0001',
tests=[
api.testsharder.test(
name='test1',
label='//path/to/test1:test1(//toolchain)',
os='fuchsia',
path='/path/to/test1',
)
],
dimensions=dict(device_type='NUC'),
),
api.testsharder.shard(
name='fuchsia-0002',
tests=api.testing_requests.default_tests(),
dimensions=dict(device_type='QEMU'),
),
api.testsharder.shard(
name='fuchsia-0003',
tests=[
api.testsharder.test(
name='test3',
label='//path/to/test3:test3(//toolchain)',
os='fuchsia',
path='/path/to/test3',
)
],
dimensions=dict(device_type='NUC'),
),
]),
api.testing.task_retry_step_data([
api.swarming.task_result(
id='610',
name='fuchsia-0000',
outputs=test_task_outputs,
),
api.swarming.task_result(
id='710',
name='fuchsia-0001',
outputs=test_task_outputs,
),
api.swarming.task_result(
id='810',
name='fuchsia-0002',
state=api.swarming.TaskState.TIMED_OUT,
outputs=['serial.txt', 'syslog.txt'],
),
api.swarming.task_result(
id='910',
name='fuchsia-0003',
outputs=test_task_outputs,
),
], iteration=0),
api.testing.task_retry_step_data([
api.swarming.task_result(
id='711',
name='fuchsia-0001',
outputs=test_task_outputs,
),
api.swarming.task_result(
id='811',
name='fuchsia-0002',
state=api.swarming.TaskState.TIMED_OUT,
outputs=['serial.txt', 'syslog.txt'],
),
api.swarming.task_result(
id='911',
name='fuchsia-0003',
outputs=test_task_outputs,
),
], iteration=1),
api.testing.test_step_data(
shard_name='fuchsia-0000', iteration=0),
api.testing.test_step_data(
shard_name='fuchsia-0001', qemu=False, failure=True, iteration=0),
api.testing.test_step_data(
shard_name='fuchsia-0001', failure=True, iteration=1),
api.testing.test_step_data(
shard_name='fuchsia-0003', qemu=False, failure=True, iteration=0),
api.testing.test_step_data(
shard_name='fuchsia-0003', qemu=False, iteration=1),
api.testing_requests.args_test_data(),
]) # yapf: disable
yield api.testing.test(
'test_in_shards_single_attempt',
status='failure',
clear_default_steps=True,
properties={
'test_in_shards': True,
'per_test_timeout_secs': 1,
},
steps=[
api.testing_requests.shards_step_data(shards=[
api.testsharder.shard(
name='multiplied:fuchsia-0000',
tests=api.testing_requests.default_tests(),
dimensions=dict(device_type='QEMU'),
),
]),
api.testing.task_retry_step_data([
api.swarming.task_result(
id='610',
name='multiplied:fuchsia-0000',
outputs=test_task_outputs,
),
], iteration=0),
api.testing.test_step_data(
shard_name='multiplied:fuchsia-0000', failure=True, iteration=0),
api.testing_requests.args_test_data(),
]) # yapf: disable
# TODO(garymm): combine this with another test. Just set collect_timeout_secs on some
# other test's properties to get the same coverage.
yield api.testing.test(
'fail_then_timeout',
status='failure',
clear_default_steps=True,
properties={
'test_in_shards': True,
'collect_timeout_secs': 2,
},
steps=[
api.testing_requests.shards_step_data(shards=[
api.testsharder.shard(
name='fuchsia-0000',
tests=api.testing_requests.default_tests(),
dimensions=dict(device_type='QEMU'),
),
]),
api.testing.task_retry_step_data([
api.swarming.task_result(
id='610',
name='fuchsia-0000',
outputs=test_task_outputs,
),
], iteration=0),
api.testing.task_retry_step_data([
api.swarming.task_result(
id='611',
name='fuchsia-0000',
state=api.swarming.TaskState.TIMED_OUT,
outputs=['serial.txt', 'syslog.txt'],
),
], iteration=1),
api.testing.test_step_data(
shard_name='fuchsia-0000', iteration=0, failure=True),
api.testing_requests.args_test_data(),
]) # yapf: disable
# TODO(garymm): combine this with another test.
yield (api.testing.test(
'upload_to_catapult',
clear_default_steps=True,
properties={
'test_in_shards': True,
'upload_to_catapult': True,
},
steps=[
api.testing_requests.shards_step_data(shards=[
api.testsharder.shard(
name='QEMU',
tests=api.testing_requests.default_tests(),
dimensions=dict(device_type='QEMU'),
),
]),
test_task_data('QEMU'),
api.testing_requests.args_test_data('arm64'),
]))
yield api.testing.test(
'async',
# TODO(garymm): remove requires_secrets.
# It's only here to minimize diffs during an unrelated change.
properties={
'requires_secrets': False,
'test_async': True
},
enable_retries=False,
)