| # Copyright 2019 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for testing Recipes.""" |
| |
| import os |
| |
| import attr |
| from PB.go.chromium.org.luci.buildbucket.proto import build as build_pb2 |
| from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2 |
| from PB.go.chromium.org.luci.buildbucket.proto import rpc as rpc_pb2 |
| from recipe_engine.recipe_api import Property |
| |
| DEPS = [ |
| 'infra/build_input_resolver', |
| 'infra/checkout', |
| 'infra/commit_queue', |
| 'infra/git', |
| 'infra/gitiles', |
| 'infra/spec', |
| 'recipe_engine/buildbucket', |
| 'recipe_engine/cipd', |
| 'recipe_engine/context', |
| 'recipe_engine/file', |
| 'recipe_engine/json', |
| 'recipe_engine/led', |
| 'recipe_engine/path', |
| 'recipe_engine/properties', |
| 'recipe_engine/python', |
| 'recipe_engine/raw_io', |
| 'recipe_engine/step', |
| 'recipe_engine/swarming', |
| ] |
| |
| PROPERTIES = { |
| 'project': |
| Property(kind=str, help='Jiri remote manifest project', default=None), |
| 'manifest': |
| Property(kind=str, help='Jiri manifest to use'), |
| 'remote': |
| Property(kind=str, help='Remote manifest repository'), |
| 'unittest_only': |
| Property(kind=bool, help='Finish after unit tests', default=False), |
| } |
| |
| BROKEN_BUILDERS = { |
| # TODO(SCN-1286) Re-enable when working. |
| 'fuchsia/try/topaz-x64-perf-dawson_canyon', |
| # TODO(LE-740) Re-enable when working. |
| 'fuchsia/try/peridot-x64-perf-dawson_canyon', |
| } |
| |
| MAX_ATTEMPTS = 2 |
| COLLECT_TIMEOUT = '5m' |
| |
| |
| def get_affected_recipes(api): |
| """Collect affected recipes. For now assume we care about all recipes.""" |
| |
| with api.step.nest('get_affected_recipes') as parent_step: |
| recipes_root = api.path['start_dir'].join('infra').join('recipes') |
| recipes_dir = recipes_root.join('recipes') |
| recipe_files = api.file.listdir('ls-recipes', recipes_dir, recursive=True) |
| |
| recipes = [] |
| for recipe_file in recipe_files: |
| path = os.path.relpath( |
| api.path.realpath(recipe_file), api.path.realpath(recipes_dir)) |
| # Files inside folders that end in ".resources" are never recipes. |
| if os.path.dirname(path).endswith('.resources'): |
| continue |
| |
| name, ext = os.path.splitext(path) |
| if ext == '.py': |
| recipes.append(name) |
| |
| parent_step.presentation.logs['all recipes'] = recipes |
| |
| # Note that this command looks at the differences present in the most |
| # recent commit. It ignores any local changes. |
| # TODO(IN-1239) do something intelligent if local changes are present |
| with api.context(cwd=recipes_root): |
| changed_files = api.git.get_changed_files(commit='HEAD') |
| parent_step.presentation.logs['changed files (raw)'] = changed_files |
| |
| def is_expected_json(path): |
| # We want to ignore expected JSON files--they won't affect how recipes |
| # run. It's possible there are JSON files used as data for recipes |
| # instead of as expected test outputs, so determine which files to |
| # ignore very narrowly. |
| return (os.path.splitext(path)[1] == '.json' and |
| os.path.dirname(path).endswith('.expected')) |
| |
| filtered_changed_files = [ |
| x for x in changed_files if not is_expected_json(x) |
| ] |
| parent_step.presentation.logs['changed files (filtered)'] = ( |
| filtered_changed_files or ['no changed files']) |
| |
| res = api.step('recipes-analyze', [ |
| recipes_root.join('recipes.py'), 'analyze', |
| api.json.input({ |
| 'recipes': recipes, |
| 'files': filtered_changed_files |
| }), |
| api.json.output() |
| ]) |
| |
| affected_recipes = res.json.output['recipes'] |
| |
| if 'infra/config/recipes.cfg' in changed_files: |
| step = api.step('recipes.cfg in changed files', None) |
| step.presentation.step_summary_text = 'marking all recipes as affected' |
| affected_recipes = recipes |
| |
| parent_step.presentation.logs['affected recipes'] = affected_recipes |
| |
| return affected_recipes |
| |
| |
| @attr.s |
| class Attempt(object): |
| index = attr.ib() |
| swarming_host = attr.ib() |
| swarming_id = attr.ib() |
| result = attr.ib(default=None) |
| |
| @property |
| def name(self): |
| return 'attempt {}'.format(self.index) |
| |
| @property |
| def in_progress(self): |
| return self.result is None |
| |
| @property |
| def swarming_link(self): |
| # Grabbing link from substep to show in unexpanded main step. The led |
| # module has very similar code, but getting to that link requires |
| # expanding this step in the UI. |
| # https://chromium.googlesource.com/infra/luci/recipes-py/+/c311cdcc/recipe_modules/led/api.py#83 |
| return 'https://{}/task?id={}'.format(self.swarming_host, self.swarming_id) |
| |
| @property |
| def success(self): |
| return self.result and self.result.success |
| |
| @attr.s |
| class Build(object): |
| name = attr.ib() |
| _data = attr.ib() |
| recipe = attr.ib() |
| attempts = attr.ib(default=attr.Factory(list)) |
| |
| def include_cl(self, cl): |
| self._data = self._data.then('edit-cr-cl', cl) |
| |
| def include_recipe_bundle(self): |
| self._data = self._data.then('edit-recipe-bundle') |
| |
| def launch(self): |
| res = self._data.then('launch') |
| attempt = Attempt( |
| # TODO(mohrr) Remove "+ 1", only there to minimize diff in this CL. |
| index=len(self.attempts) + 1, |
| swarming_host=res.result['swarming']['host_name'], |
| swarming_id=res.result['swarming']['task_id'], |
| ) |
| self.attempts.append(attempt) |
| return attempt |
| |
| @property |
| def success(self): |
| return self.attempts and self.attempts[-1].success |
| |
| @property |
| def in_progress(self): |
| return self.attempts and self.attempts[-1].in_progress |
| |
| |
| def create_build(api, name): |
| predicate = rpc_pb2.BuildPredicate() |
| (predicate.builder.project, |
| predicate.builder.bucket, |
| predicate.builder.builder) = name.split('/') # yapf:disable |
| predicate.status = common_pb2.SUCCESS |
| |
| builds = api.buildbucket.search(predicate, limit=1) |
| assert len(builds) == 1 |
| |
| data = api.led('get-build', builds[0].id) |
| |
| data.result['top_level']['name'] = name |
| |
| recipes = set() |
| for slice_ in data.result['job_slices']: |
| recipes.add(slice_['userland']['recipe_properties']['recipe']) |
| assert len(recipes) == 1 |
| |
| build = Build(name=name, data=data, recipe=recipes.pop()) |
| |
| # If this build is being triggered from a change to this recipe, we need |
| # to explicitly pass a CL. The most recent passing run of |
| # fuchsia.try/recipes could take anywhere from about three to 75 minutes, |
| # and the three minute version does not test much of what this recipe |
| # actually does. In that case alter the build to run on a specific CL |
| # that modifies the cobalt recipe alone. (The cobalt build usually takes |
| # less than 10 minutes.) |
| if build.recipe == 'recipes': |
| build.include_cl('https://fuchsia-review.googlesource.com/c/303171') |
| |
| return build |
| |
| |
| def RunSteps(api, project, manifest, remote, unittest_only): |
| # Resolve the build input to always contain a Gitiles commit. |
| bb_build = api.buildbucket.build |
| api.build_input_resolver.resolve(bb_build.input) |
| |
| with api.context(infra_steps=True): |
| api.checkout.with_options( |
| path=api.path['start_dir'], |
| manifest=manifest, |
| remote=remote, |
| project=project, |
| build_input=bb_build.input, |
| ) |
| |
| # Run the recipe unit tests. |
| recipes_path = api.path['start_dir'].join('infra', 'recipes') |
| with api.context(cwd=recipes_path): |
| api.python('test', api.context.cwd.join('recipes.py'), args=['test', 'run']) |
| |
| if unittest_only: |
| return |
| |
| builders = sorted( |
| x for x in api.commit_queue.all_tryjobs() if x not in BROKEN_BUILDERS) |
| step = api.step('normalized_tryjobs', None) |
| step.presentation.logs['tryjobs'] = builders |
| |
| affected_recipes = get_affected_recipes(api) |
| |
| builds = {} |
| with api.step.nest('get builders') as nest: |
| with api.context(cwd=recipes_path): |
| for b in builders: |
| with api.step.nest(b) as parent_step: |
| build = create_build(api, b) |
| parent_step.presentation.logs['recipe_used'] = [build.recipe] |
| |
| if build.recipe in affected_recipes: |
| parent_step.step_summary_text = 'SELECTED' |
| builds[build.name] = build |
| else: |
| parent_step.step_summary_text = 'skipped' |
| nest.step_summary_text = 'selected {} builds'.format(len(builds)) |
| |
| # Configure child builds. |
| with api.step.nest('configure builds') as nest: |
| for _, build in sorted(builds.iteritems()): |
| with api.step.nest(build.name) as parent_step: |
| with api.context(cwd=recipes_path): |
| build.include_recipe_bundle() |
| |
| def is_complete(result): |
| # At the moment results have a bunch of fields set to None if incomplete. |
| # On the assumption this will be changed at some point I'm also checking |
| # the state explicitly. |
| if result.name is None: |
| return False |
| |
| return result.state not in { |
| api.swarming.TaskState.RUNNING, |
| api.swarming.TaskState.PENDING, |
| } |
| |
| def add_result_link(step, result): |
| """Add a swarming link with the build name to the given step.""" |
| build = builds_by_id[result.id] |
| attempt = build.attempts[-1] |
| name = '{} ({})'.format(build.name, attempt.name) |
| step.presentation.links[name] = attempt.swarming_link |
| if is_complete(result): |
| attempt.result = result |
| |
| def add_result_step(build): |
| """Create a new step with the swarming link.""" |
| step = api.step(build.name, None) |
| for attempt in build.attempts: |
| name = '{} ({})'.format( |
| attempt.name, 'pass' if attempt.success else 'fail') |
| step.presentation.links[name] = attempt.swarming_link |
| if build.success: |
| step.presentation.status = api.step.SUCCESS |
| else: |
| step.presentation.status = api.step.FAILURE |
| |
| def launch_collect(n): |
| """Launch necessary builds and process the ones that complete. |
| |
| Launch any builds that are not currently running, have not passed, |
| and have not exceeded MAX_ATTEMPTS. |
| |
| After launching builds, wait for all builds to complete (those just |
| launched as well as those that have been running for awhile), but |
| timeout after COLLECT_TIMEOUT. Summarize the jobs that have just |
| passed or failed as well as those still running (with swarming |
| links). |
| |
| Args: |
| n (int): Number of times launch_collect() has previously been |
| called. |
| |
| Returns: |
| Number of jobs still running or to be relaunched. As long as this |
| is positive the caller should continue calling launch_collect(). |
| """ |
| |
| with api.step.nest(str(n)) as n_step: |
| summary = [] |
| # Launch builds |
| with api.step.nest('launch'): |
| num_launches = 0 |
| for _, build in sorted(builds.iteritems()): |
| if build.in_progress: |
| continue |
| |
| if build.success: |
| continue |
| |
| if len(build.attempts) >= MAX_ATTEMPTS: |
| continue |
| |
| with api.step.nest(build.name) as build_step: |
| attempt = build.launch() |
| builds_by_id[attempt.swarming_id] = build |
| num_launches += 1 |
| |
| build_step.presentation.links['Swarming task'] = ( |
| attempt.swarming_link) |
| |
| if num_launches: |
| summary.append('{} launched'.format(num_launches)) |
| |
| # TODO(IN-1239) Create build steps and use api.cq to collect results. |
| # For now, faking the results of swarming.collect into looking like steps. |
| # Can't use cq.record_triggered_build_ids because the swarming hash is not |
| # a build id. Eventually should be able to create builds on the fly that |
| # then launch swarming jobs. |
| with api.step.nest('collect'): |
| results = [] |
| task_ids = [ |
| x.attempts[-1].swarming_id |
| for x in builds.itervalues() |
| if x.in_progress] |
| if task_ids: |
| results = api.swarming.collect( |
| 'collect', task_ids, timeout=COLLECT_TIMEOUT) |
| |
| completed = [result for result in results if is_complete(result)] |
| incomplete = [result for result in results if not is_complete(result)] |
| failed = [result for result in completed if not result.success] |
| passed = [result for result in completed if result.success] |
| |
| """ |
| with open('/dev/tty', 'w') as outs: |
| import pprint |
| outs.write(''' |
| completed: {} |
| incomplete: {} |
| failed: {} |
| passed: {} |
| '''.format( |
| pprint.pformat(completed), |
| pprint.pformat(incomplete), |
| pprint.pformat(failed), |
| pprint.pformat(passed), |
| )) |
| """ |
| |
| def process_build_list(name, build_list): |
| """Create a step with swarming links to each build.""" |
| if build_list: |
| with api.step.nest('{} builds'.format(name)) as step: |
| for result in build_list: |
| add_result_link(step, result) |
| summary.append('{} {}'.format(len(build_list), name)) |
| |
| process_build_list('passed', passed) |
| process_build_list('failed', failed) |
| process_build_list('incomplete', incomplete) |
| |
| n_step.presentation.step_summary_text = ', '.join(summary) |
| |
| to_be_relaunched = [ |
| x for x in builds.itervalues() |
| if not x.success and len(x.attempts) < MAX_ATTEMPTS |
| ] |
| return len(to_be_relaunched) + len(incomplete) |
| |
| with api.step.nest('launch/collect'): |
| builds_by_id = {} |
| n = 0 |
| while launch_collect(n): |
| n += 1 |
| |
| passed = [x for x in builds.itervalues() if x.success] |
| failed = [x for x in builds.itervalues() if not x.success] |
| |
| with api.step.nest('passes') as step: |
| for build in passed: |
| add_result_step(build) |
| step.presentation.step_summary_text = '{} passed'.format(len(passed)) |
| |
| with api.step.nest('failures') as step: |
| for build in failed: |
| add_result_step(build) |
| step.presentation.step_summary_text = '{} failed'.format(len(failed)) |
| |
| if failed: |
| raise api.step.StepFailure('subbuild(s) failed: {}'.format(', '.join( |
| x.name for x in failed))) |
| |
| else: |
| api.step('all builds passed', None) |
| |
| |
| def GenTests(api): |
| # yapf:disable |
| def build_data(name, recipe): |
| result = api.buildbucket.simulated_search_results( |
| [build_pb2.Build(id=37, status=common_pb2.SUCCESS)], |
| 'get builders.{}.buildbucket.search'.format(name)) |
| |
| builder_data = { |
| 'top_level': { |
| 'name': 'led: bb-1-{}'.format(name), |
| }, |
| 'job_slices': [ |
| { |
| 'userland': { |
| 'recipe_properties': { |
| 'recipe': recipe, |
| }, |
| }, |
| }, |
| ], |
| } |
| |
| result += api.step_data( |
| 'get builders.{}.led get-build'.format(name), |
| stdout=api.json.output(builder_data)) |
| |
| return result |
| |
| def led_data(name, task_id=0, n=0): |
| launch_data = { |
| 'swarming': { |
| 'host_name': 'chromium-swarm.appspot.com', |
| 'task_id': task_id or 0, |
| } |
| } |
| |
| return api.step_data( |
| 'launch/collect.{}.launch.{}.led launch'.format(n, name), |
| stdout=api.json.output(launch_data)) |
| |
| def affected_recipes_data(affected_recipes, |
| recipe_files=None, |
| changed_files=None, |
| error=None, |
| invalid_recipes=(), |
| step_name='get_affected_recipes.recipes-analyze'): |
| if not recipe_files: |
| recipe_files = ['foo', 'fuchsia.py', 'recipes.py', 'sdk.expected'] |
| res = api.step_data( |
| 'get_affected_recipes.ls-recipes', |
| stdout=api.raw_io.output( |
| ''.join('{}\n'.format(x) for x in recipe_files))) |
| |
| if not changed_files: |
| changed_files = [ |
| 'recipes/fuchsia.py', |
| 'recipes/foo', |
| 'recipes/non_expected_json_file.json', |
| 'recipe_modules/foo/full.expected/bar.json', |
| ] |
| res += api.step_data( |
| 'get_affected_recipes.git diff-tree', |
| stdout=api.raw_io.output( |
| ''.join('{}\0'.format(x) for x in changed_files))) |
| |
| output = { |
| 'recipes': list(affected_recipes), |
| 'error': error or '', |
| 'invalidRecipes': list(invalid_recipes), |
| } |
| retcode = -1 if error else 0 |
| res += api.step_data(step_name, api.json.output(output), retcode=retcode) |
| |
| return res |
| |
| yield (api.test('cq_try') + |
| api.gitiles.refs('refs', ['refs/heads/master', 'c' * 40]) + |
| api.commit_queue.test_data() + |
| affected_recipes_data(['none']) + |
| build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia') + |
| build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia') + |
| build_data('fuchsia/try/cobalt-x64-linux', 'cobalt') + |
| api.buildbucket.try_build( |
| git_repo='https://fuchsia.googlesource.com/infra/recipes') + |
| api.properties.tryserver( |
| manifest='manifest/minimal', |
| remote='https://fuchsia.googlesource.com/infra/recipes', |
| )) |
| |
| def props(unittest_only=False): |
| return api.properties( |
| project='garnet', |
| manifest='manifest/garnet', |
| remote='https://fuchsia.googlesource.com/garnet', |
| import_in='manifest/third_party', |
| import_from='zircon', |
| unittest_only=unittest_only) |
| |
| ci_build = api.buildbucket.ci_build( |
| project='infra/recipes', |
| git_repo='https://fuchsia.googlesource.com/infra/recipes', |
| ) |
| |
| def incomplete(task_id): |
| return task_result(task_id, None) |
| |
| def task_result(task_id, name, failed=False): |
| return api.swarming.task_result( |
| id=task_id, |
| name=name, |
| state=None if not name else api.swarming.TaskState.COMPLETED, |
| failure=failed, |
| ) |
| |
| def collect_step_data(results, n=0): |
| return api.override_step_data( |
| 'launch/collect.{}.collect.collect'.format(n), |
| api.swarming.collect(results)) |
| |
| yield ( |
| api.test('recursive_ls') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data('empty') + |
| affected_recipes_data( |
| affected_recipes=[], |
| recipe_files=['fuchsia/fuchsia.py', 'abc.resources/bar.py', 'abc.py'], |
| )) |
| |
| yield ( |
| api.test('recipes_cfg') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data('empty') + |
| affected_recipes_data( |
| affected_recipes=[], |
| recipe_files=['a.py', 'b.py', 'c.py', 'd.py', 'e.py'], |
| changed_files=['infra/config/recipes.cfg'], |
| )) |
| |
| yield ( |
| api.test('two_pass_one_skip') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data() + |
| affected_recipes_data(['fuchsia']) + |
| build_data('fuchsia/try/cobalt-x64-linux', 'cobalt') + |
| build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-x64-debug', task_id='deadbeef') + |
| build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-arm64-debug', task_id='defaced') + |
| collect_step_data([ |
| task_result('deadbeef', 'fuchsia/try/fuchsia-x64-debug'), |
| task_result('defaced', 'fuchsia/try/fuchsia-arm64-debug'), |
| ]) |
| ) |
| |
| yield ( |
| api.test('one_pass_one_fail_one_skip') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data() + |
| affected_recipes_data(['fuchsia']) + |
| build_data('fuchsia/try/cobalt-x64-linux', 'cobalt') + |
| build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-x64-debug', task_id='x1') + |
| build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-arm64-debug', task_id='a1', n=0) + |
| collect_step_data([ |
| task_result('x1', 'fuchsia/try/fuchsia-x64-debug'), |
| task_result('a1', 'fuchsia/try/fuchsia-arm64-debug', failed=True), |
| ]) + |
| led_data('fuchsia/try/fuchsia-arm64-debug', task_id='a2', n=1) + |
| collect_step_data([ |
| task_result('a2', 'fuchsia/try/fuchsia-arm64-debug', failed=True), |
| ], n=1) |
| ) |
| |
| yield ( |
| api.test('one_pass_one_flake_one_skip') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data() + |
| affected_recipes_data(['fuchsia']) + |
| build_data('fuchsia/try/cobalt-x64-linux', 'cobalt') + |
| build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-x64-debug', task_id='x1') + |
| build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-arm64-debug', task_id='a1', n=0) + |
| collect_step_data([ |
| task_result('x1', 'fuchsia/try/fuchsia-x64-debug'), |
| task_result('a1', 'fuchsia/try/fuchsia-arm64-debug', failed=True), |
| ]) + |
| led_data('fuchsia/try/fuchsia-arm64-debug', task_id='a2', n=1) + |
| collect_step_data([ |
| task_result('a2', 'fuchsia/try/fuchsia-arm64-debug'), |
| ], n=1) |
| ) |
| |
| yield ( |
| api.test('fuchsia_recipe_unaffected') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data() + |
| affected_recipes_data(['qemu']) + |
| build_data('fuchsia/try/cobalt-x64-linux', 'cobalt') + |
| build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia') + |
| build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia') |
| ) |
| |
| yield ( |
| api.test('recipes') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data('recipes-only') + |
| affected_recipes_data(['recipes']) + |
| build_data('fuchsia/try/recipes', 'recipes') + |
| led_data('fuchsia/try/recipes', task_id='deadbeef') + |
| collect_step_data([task_result('deadbeef', 'fuchsia/try/recipes')]) |
| ) |
| |
| yield ( |
| api.test('unittest_only') + |
| props(unittest_only=True) + |
| ci_build |
| ) |
| |
| yield ( |
| api.test('long_test_with_flakes') + |
| props() + |
| ci_build + |
| api.commit_queue.test_data('only-fuchsia-debug') + |
| affected_recipes_data(['fuchsia']) + |
| build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-x64-debug', task_id='x1', n=0) + |
| build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia') + |
| led_data('fuchsia/try/fuchsia-arm64-debug', task_id='a1', n=0) + |
| collect_step_data([], n=0) + |
| collect_step_data([], n=1) + |
| collect_step_data([incomplete('a1'), incomplete('x1')], n=2) + |
| collect_step_data([], n=3) + |
| collect_step_data([ |
| task_result('x1', 'fuchsia/try/fuchsia-x64-debug', failed=True) |
| ], n=4) + |
| led_data('fuchsia/try/fuchsia-x64-debug', task_id='x2', n=5) + |
| collect_step_data([], n=5) + |
| collect_step_data([ |
| task_result('x2', 'fuchsia/try/fuchsia-x64-debug', failed=True), |
| ], n=6) + |
| collect_step_data([], n=7) + |
| collect_step_data([ |
| task_result('a1', 'fuchsia/try/fuchsia-arm64-debug', failed=True), |
| ], n=8) + |
| led_data('fuchsia/try/fuchsia-arm64-debug', task_id='a2', n=9) + |
| collect_step_data([ |
| task_result('a2', 'fuchsia/try/fuchsia-arm64-debug'), |
| ], n=9) |
| ) |
| |
| |
| # yapf:enable |