blob: de25d88ac7e3f84fa368d05c259760d0ad0f5844 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for testing Recipes."""
import functools
import os
from PB.go.chromium.org.luci.buildbucket.proto import build as build_pb2
from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2
from PB.go.chromium.org.luci.buildbucket.proto import rpc as rpc_pb2
from recipe_engine.recipe_api import Property
DEPS = [
'fuchsia/build_input_resolver',
'fuchsia/checkout',
'fuchsia/commit_queue',
'fuchsia/gerrit',
'fuchsia/git',
'fuchsia/gitiles',
'fuchsia/status_check',
'fuchsia/swarming_retry',
'fuchsia/spec',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/led',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/swarming',
'recipe_engine/time',
]
PROPERTIES = {
'project':
Property(kind=str, help='Jiri remote manifest project', default=None),
'manifest':
Property(kind=str, help='Jiri manifest to use'),
'remote':
Property(kind=str, help='Remote manifest repository'),
# If this build is being triggered from a change to this recipe, we need
# to explicitly pass a CL. The most recent passing run of
# fuchsia.try/recipes could take anywhere from about three to 200 minutes,
# and the three minute version does not test much of what this recipe
# actually does. In that case alter the recipes build to run on a specific
# CL that modifies the cobalt recipe alone. (The cobalt build usually
# takes less than 10 minutes.)
'selftest_cl':
Property(
kind=str,
help='CL to test with when this file changes',
default='https://fuchsia-review.googlesource.com/c/303171'),
'unittest_only':
Property(kind=bool, help='Finish after unit tests', default=False),
}
ONE_DAY = 24 * 60 * 60
MAX_BUILD_AGE_SECONDS = 4 * 7 * ONE_DAY
def get_affected_recipes(api):
"""Collect affected recipes. For now assume we care about all recipes."""
with api.step.nest('get_affected_recipes') as parent_step:
recipes_root = api.path['start_dir'].join('infra').join('recipes')
recipes_dir = recipes_root.join('recipes')
recipe_files = api.file.listdir('ls-recipes', recipes_dir, recursive=True)
recipes = []
for recipe_file in recipe_files:
path = os.path.relpath(
api.path.realpath(recipe_file), api.path.realpath(recipes_dir))
# Files inside folders that end in ".resources" are never recipes.
if os.path.dirname(path).endswith('.resources'):
continue
name, ext = os.path.splitext(path)
if ext == '.py':
recipes.append(name)
parent_step.presentation.logs['all recipes'] = recipes
# Note that this command looks at the differences present in the most
# recent commit. It ignores any local changes.
# TODO(IN-1239) do something intelligent if local changes are present
with api.context(cwd=recipes_root):
changed_files = api.git.get_changed_files(commit='HEAD')
parent_step.presentation.logs['changed files (raw)'] = changed_files
def is_expected_json(path):
# We want to ignore expected JSON files--they won't affect how recipes
# run. It's possible there are JSON files used as data for recipes
# instead of as expected test outputs, so determine which files to
# ignore very narrowly.
return (os.path.splitext(path)[1] == '.json' and
os.path.dirname(path).endswith('.expected'))
def is_python_test_file(path):
# We want to ignore test_api.py files--they won't affect how recipes
# run in led, they only affect how recipes run in
# './recipes test run', and we test that every time any recipe is
# changed.
if (os.path.basename(path) == 'test_api.py' and
os.path.dirname(os.path.dirname(path)) == 'recipe_modules'):
return True
# Also ignore test definitions themselves. By convention these are
# given the filename 'full.py' in Fuchsia, but there is no
# guarantee this will remain the case.
if (os.path.splitext(path)[1] == '.py' and
os.path.basename(os.path.dirname(path)) in ('tests', 'examples')):
return True
return False
def is_ignored_file(path):
return is_expected_json(path) or is_python_test_file(path)
filtered_changed_files = [
x for x in changed_files if not is_ignored_file(x)
]
parent_step.presentation.logs['changed files (filtered)'] = (
filtered_changed_files or ['no changed files'])
res = api.step('recipes-analyze', [
recipes_root.join('recipes.py'), 'analyze',
api.json.input({
'recipes': recipes,
'files': filtered_changed_files
}),
api.json.output()
])
affected_recipes = res.json.output['recipes']
if 'infra/config/recipes.cfg' in changed_files:
step = api.step('recipes.cfg in changed files', None)
step.presentation.step_summary_text = 'marking all recipes as affected'
affected_recipes = recipes
parent_step.presentation.logs['affected recipes'] = affected_recipes
return affected_recipes
def get_last_green_build(api, builder):
"""Returns the build proto for a builder's most recent successful build.
If no build younger than `MAX_BUILD_AGE_SECONDS` is found, returns None.
"""
predicate = rpc_pb2.BuildPredicate()
(predicate.builder.project,
predicate.builder.bucket,
predicate.builder.builder) = builder.split('/') # yapf:disable
predicate.status = common_pb2.SUCCESS
# "infra.recipe" is not returned by default, so we have to specify it.
required_fields = {'infra.recipe'}.union(api.buildbucket.DEFAULT_FIELDS)
builds = api.buildbucket.search(predicate, limit=1, fields=required_fields)
if not builds:
return None
assert len(builds) == 1
build = builds[0]
age_seconds = api.time.time() - build.end_time.seconds
if age_seconds > MAX_BUILD_AGE_SECONDS:
return None
return build
def create_led_build(api, orig_build, selftest_cl):
class Build(api.swarming_retry.LedTask):
def include_cl(self, cl):
self._led_data = self._led_data.then('edit-cr-cl', cl)
def include_recipe_bundle(self):
self._led_data = self._led_data.then('edit-recipe-bundle')
builder = orig_build.builder.builder
led_data = api.led('get-build', orig_build.id)
led_data.result['top_level']['name'] = 'recipes-cq:%s' % builder
build = Build(api=api, name=builder, led_data=led_data)
if orig_build.infra.recipe.name == 'recipes':
build.include_cl(selftest_cl)
elif orig_build.input.gerrit_changes:
orig_cl = orig_build.input.gerrit_changes[0]
cl_id = get_latest_cl(api, orig_cl.host, orig_cl.project)
build.include_cl('https://%s/c/%d' % (orig_cl.host, cl_id))
return build
def memoize(func):
"""A decorator to cache the return values of a function by args/kwargs."""
cache = {}
@functools.wraps(func)
def wrapper(*args, **kwargs):
key = (args, frozenset(kwargs.iteritems()))
if key not in cache:
cache[key] = func(*args, **kwargs)
return cache[key]
return wrapper
@memoize
def get_latest_cl(api, gerrit_host, project):
"""Returns the integer change number for a project's most recently landed CL.
Args:
gerrit_host (str): E.g., fuchsia-review.googlesource.com
project (str): The name of the project in gerrit, e.g. "fuchsia"
Returns:
The integer change number for the CL corresponding to the commit at the
tip of the master branch.
"""
gitiles_host = gerrit_host.replace('-review', '')
remote = 'https://%s/%s' % (gitiles_host, project)
log = api.gitiles.log(
remote, 'refs/heads/master', limit=1, step_name='log %s' % project)
assert len(log) == 1
commit_hash = log[0]['id']
change = api.gerrit.change_details(
'latest change details for %s' % project,
commit_hash,
test_data=api.json.test_api.output({'_number': 12345}),
ok_ret='any')
return change['_number']
def RunSteps(api, project, manifest, remote, selftest_cl, unittest_only):
# Resolve the build input to always contain a Gitiles commit.
bb_build = api.buildbucket.build
api.build_input_resolver.resolve(bb_build.input)
with api.context(infra_steps=True):
api.checkout.with_options(
path=api.path['start_dir'],
manifest=manifest,
remote=remote,
project=project,
build_input=bb_build.input,
)
# Run the recipe unit tests.
recipes_path = api.path['start_dir'].join('infra', 'recipes')
with api.context(cwd=recipes_path):
api.python('test', api.context.cwd.join('recipes.py'), args=['test', 'run'])
if unittest_only:
return
builders = sorted(x for x in api.commit_queue.all_tryjobs())
step = api.step('normalized_tryjobs', None)
step.presentation.logs['tryjobs'] = builders
affected_recipes = get_affected_recipes(api)
builds = []
with api.step.nest('get builders') as nest:
with api.context(cwd=recipes_path):
for builder in builders:
with api.step.nest(builder) as parent_step:
orig_build = get_last_green_build(api, builder)
if not orig_build:
parent_step.step_summary_text = 'no recent builds found'
continue
recipe = orig_build.infra.recipe.name
assert recipe
if recipe in affected_recipes:
parent_step.step_summary_text = 'SELECTED'
builds.append(create_led_build(api, orig_build, selftest_cl))
else:
parent_step.step_summary_text = 'skipped'
parent_step.presentation.logs['recipe_used'] = recipe
nest.step_summary_text = 'selected {} builds'.format(len(builds))
# Configure child builds.
with api.step.nest('configure builds') as nest:
for build in builds:
with api.step.nest(build.name) as parent_step:
with api.context(cwd=recipes_path):
build.include_recipe_bundle()
if builds:
with api.swarming_retry.retry(tasks=builds) as retry:
retry.run_tasks()
retry.present_tasks()
def GenTests(api):
# yapf:disable
def build_data(name, recipe, age_seconds=ONE_DAY, cl_cached=False,
skip=False):
# This time is taken from the time recipe_engine module. I see no way
# of getting it programmatically.
curr_time = 1337000000
end_time = curr_time - age_seconds
build = build_pb2.Build(id=37, status=common_pb2.SUCCESS)
build.end_time.seconds = end_time
build.builder.builder = name
build.infra.recipe.name = recipe
cl = build.input.gerrit_changes.add()
cl.host = 'fuchsia-review.googlesource.com'
cl.project = 'fuchsia'
result = api.buildbucket.simulated_search_results(
[build],
'get builders.{}.buildbucket.search'.format(name))
if skip or age_seconds > MAX_BUILD_AGE_SECONDS:
return result
builder_data = {
'top_level': {
'name': 'led: bb-1-{}'.format(name),
},
'job_slices': [
{
'userland': {
'recipe_properties': {
'recipe': recipe,
},
},
},
],
}
result += api.step_data(
'get builders.{}.led get-build'.format(name),
stdout=api.json.output(builder_data))
if recipe != 'recipes' and not cl_cached:
result += api.gitiles.log(
'get builders.{}.log {}'.format(name, cl.project), 'A', n=1)
return result
def no_build(name):
return api.buildbucket.simulated_search_results(
[],
'get builders.{}.buildbucket.search'.format(name))
def affected_recipes_data(affected_recipes,
recipe_files=None,
changed_files=None,
error=None,
invalid_recipes=(),
step_name='get_affected_recipes.recipes-analyze'):
if not recipe_files:
recipe_files = ['foo', 'fuchsia.py', 'recipes.py', 'sdk.expected']
res = api.step_data(
'get_affected_recipes.ls-recipes',
stdout=api.raw_io.output(
''.join('{}\n'.format(x) for x in recipe_files)))
if not changed_files:
changed_files = [
'recipes/fuchsia.py',
'recipes/foo',
'recipes/non_expected_json_file.json',
'recipe_modules/foo/examples/full.expected/bar.json',
'recipe_modules/foo/examples/full.py',
'recipe_modules/foo/test_api.py',
]
res += api.step_data(
'get_affected_recipes.git diff-tree',
stdout=api.raw_io.output(
''.join('{}\0'.format(x) for x in changed_files)))
output = {
'recipes': list(affected_recipes),
'error': error or '',
'invalidRecipes': list(invalid_recipes),
}
retcode = -1 if error else 0
res += api.step_data(step_name, api.json.output(output), retcode=retcode)
return res
yield (api.status_check.test('cq_try') +
api.build_input_resolver.set_gerrit_branch() +
api.gitiles.refs('refs', ['refs/heads/master', 'c' * 40]) +
api.commit_queue.test_data() +
affected_recipes_data(['none']) +
build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia', skip=True) +
build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia', skip=True) +
build_data('fuchsia/try/cobalt-x64-linux', 'cobalt', skip=True) +
api.buildbucket.try_build(
git_repo='https://fuchsia.googlesource.com/infra/recipes') +
api.properties(
manifest='manifest/minimal',
remote='https://fuchsia.googlesource.com/infra/recipes',
))
def props(unittest_only=False):
return api.properties(
project='garnet',
manifest='manifest/garnet',
remote='https://fuchsia.googlesource.com/garnet',
import_in='manifest/third_party',
import_from='zircon',
unittest_only=unittest_only)
ci_build = api.buildbucket.ci_build(
project='infra/recipes',
git_repo='https://fuchsia.googlesource.com/infra/recipes',
)
def task_result(task_id, name, failed=False):
return api.swarming.task_result(
id=task_id,
name='recipes-cq:%s' % name,
state=None if not name else api.swarming.TaskState.COMPLETED,
failure=failed,
)
yield (
api.status_check.test('recursive_ls') +
props() +
ci_build +
api.commit_queue.test_data('empty') +
affected_recipes_data(
affected_recipes=[],
recipe_files=['fuchsia/fuchsia.py', 'abc.resources/bar.py', 'abc.py'],
)
)
yield (
api.status_check.test('recipes_cfg') +
props() +
ci_build +
api.commit_queue.test_data('empty') +
affected_recipes_data(
affected_recipes=[],
recipe_files=['a.py', 'b.py', 'c.py', 'd.py', 'e.py'],
changed_files=['infra/config/recipes.cfg'],
)
)
yield (
api.status_check.test('no_build_old_build_ignored_build') +
props() +
ci_build +
api.commit_queue.test_data() +
affected_recipes_data(['fuchsia']) +
build_data('fuchsia/try/cobalt-x64-linux', 'cobalt',
age_seconds=MAX_BUILD_AGE_SECONDS - ONE_DAY, skip=True) +
build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia',
age_seconds=MAX_BUILD_AGE_SECONDS + ONE_DAY) +
no_build('fuchsia/try/fuchsia-arm64-debug')
)
yield (
api.status_check.test('two_pass_one_skip') +
props() +
ci_build +
api.commit_queue.test_data() +
affected_recipes_data(['fuchsia']) +
build_data('fuchsia/try/cobalt-x64-linux', 'cobalt', skip=True) +
build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia') +
api.swarming_retry.led_data('fuchsia/try/fuchsia-arm64-debug',
task_id=200) +
build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia', cl_cached=True) +
api.swarming_retry.led_data('fuchsia/try/fuchsia-x64-debug',
task_id=100) +
api.swarming_retry.collect_data([
task_result(100, 'fuchsia/try/fuchsia-x64-debug'),
task_result(200, 'fuchsia/try/fuchsia-arm64-debug'),
])
)
yield (
api.status_check.test('fuchsia_recipe_unaffected') +
props() +
ci_build +
api.commit_queue.test_data() +
affected_recipes_data(['qemu']) +
build_data('fuchsia/try/cobalt-x64-linux', 'cobalt', skip=True) +
build_data('fuchsia/try/fuchsia-x64-debug', 'fuchsia', skip=True) +
build_data('fuchsia/try/fuchsia-arm64-debug', 'fuchsia', skip=True)
)
yield (
api.status_check.test('recipes') +
props() +
ci_build +
api.commit_queue.test_data('recipes-only') +
affected_recipes_data(['recipes']) +
build_data('fuchsia/try/recipes', 'recipes') +
api.swarming_retry.led_data('fuchsia/try/recipes', task_id=100) +
api.swarming_retry.collect_data(
[task_result(100, 'fuchsia/try/recipes')])
)
yield (
api.status_check.test('unittest_only') +
props(unittest_only=True) +
ci_build
)
# yapf:enable