blob: 253319ebbc968a14191a9734a666ab7440c55fc3 [file] [log] [blame]
# Copyright 2020 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Code for testing recipes."""
import datetime
import fnmatch
from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2
from recipe_engine import recipe_api
from RECIPE_MODULES.fuchsia.swarming_retry import api as swarming_retry_api
from RECIPE_MODULES.fuchsia.utils.api import memoize
class Build(swarming_retry_api.LedTask):
def include_cl(self, cl):
self._led_data = self._led_data.then('edit-cr-cl', cl)
def include_recipe_bundle(self):
self._led_data = self._led_data.then('edit-recipe-bundle')
class RecipeTestingApi(recipe_api.RecipeApi):
"""API for running tests and processing test results."""
def __init__(self, *args, **kwargs):
super(RecipeTestingApi, self).__init__(*args, **kwargs)
self.max_build_age_seconds = int(
datetime.timedelta(days=28).total_seconds())
self.project = 'fuchsia'
def _get_affected_recipes(self):
"""Collect affected recipes. For now assume we care about all recipes."""
with self.m.step.nest('get_affected_recipes') as parent_step:
recipes_root = self.m.path['start_dir'].join('infra').join('recipes')
recipes_dir = recipes_root.join('recipes')
recipe_files = self.m.file.listdir(
'ls-recipes', recipes_dir, recursive=True)
all_recipes = []
for recipe_file in recipe_files:
path = self.m.path.relpath(
self.m.path.realpath(recipe_file),
self.m.path.realpath(recipes_dir))
# Files inside folders that end in ".resources" are never recipes.
if self.m.path.dirname(path).endswith('.resources'):
continue
name, ext = self.m.path.splitext(path)
if ext == '.py':
all_recipes.append(name)
parent_step.logs['all recipes'] = all_recipes
with self.m.context(cwd=recipes_root):
changed_files = self.m.git.get_changed_files(commit='HEAD')
parent_step.logs['changed files (raw)'] = changed_files
def is_expected_json(path):
# We want to ignore expected JSON files--they won't affect how recipes
# run. It's possible there are JSON files used as data for recipes
# instead of as expected test outputs, so determine which files to
# ignore very narrowly.
return (self.m.path.splitext(path)[1] == '.json' and
self.m.path.dirname(path).endswith('.expected'))
def is_python_test_file(path):
"""Return True if this is a test file that we should ignore."""
# We want to ignore test_api.py files--they won't affect how recipes
# run in led, they only affect how recipes run in
# './recipes test run', and we test that every time any recipe is
# changed.
if (self.m.path.basename(path) == 'test_api.py' and
self.m.path.dirname(self.m.path.dirname(path)) == 'recipe_modules'):
return True
# Also ignore test definitions themselves. By convention these are
# given the filename 'full.py' in Fuchsia, but there is no
# guarantee this will remain the case.
test_dir_names = ('tests', 'examples')
if (self.m.path.splitext(path)[1] == '.py' and
self.m.path.basename(self.m.path.dirname(path)) in test_dir_names):
return True
return False
def is_ignored_file(path):
return is_expected_json(path) or is_python_test_file(path)
filtered_changed_files = [
x for x in changed_files if not is_ignored_file(x)
]
parent_step.logs['changed files (filtered)'] = (
filtered_changed_files or ['no changed files'])
res = self.m.step('recipes-analyze', [
recipes_root.join('recipes.py'), 'analyze',
self.m.json.input({
'recipes': all_recipes,
'files': filtered_changed_files
}),
self.m.json.output()
])
affected_recipes = res.json.output['recipes']
def should_test_all_recipes(path):
globs = (
'infra/config/recipes.cfg',
# We particularly care about running CQ for fuchsia.proto changes.
'recipe_proto/*.proto',
)
return any(fnmatch.fnmatch(path, glob) for glob in globs)
special_changed_files = [
f for f in changed_files if should_test_all_recipes(f)
]
if special_changed_files:
step = self.m.step('mark all recipes as affected', None)
step.presentation.step_summary_text = (
'because these files were changed:')
step.presentation.step_text = '\n' + '\n'.join(special_changed_files)
affected_recipes = all_recipes
parent_step.logs['affected recipes'] = affected_recipes
return affected_recipes
def _get_last_green_build(self, builder):
"""Returns the build proto for a builder's most recent successful build.
If no build younger than `self.max_build_age_seconds` is found, returns
None.
Args:
builder: builder protobuf object
"""
project, bucket, builder = builder.split('/')
# "infra.recipe" is not returned by default, so we have to specify it.
required_fields = {'infra.recipe'}.union(self.m.buildbucket.DEFAULT_FIELDS)
build = self.m.buildbucket_util.last_build(
project,
bucket,
builder,
fields=required_fields,
status=common_pb2.SUCCESS)
if not build:
return None
age_seconds = self.m.time.time() - build.end_time.seconds
if age_seconds > self.max_build_age_seconds:
return None
return build
def _create_led_build(self, orig_build, selftest_cl):
builder = orig_build.builder.builder
led_data = self.m.led('get-build', orig_build.id)
led_data.result['top_level']['name'] = 'recipes-cq:%s' % builder
build = Build(api=self.m, name=builder, led_data=led_data)
if orig_build.infra.recipe.name == 'recipes':
build.include_cl(selftest_cl)
elif orig_build.input.gerrit_changes:
orig_cl = orig_build.input.gerrit_changes[0]
cl_id = self._get_latest_cl(orig_cl.host, orig_cl.project)
# Setting the CL to a more recent CL helps avoid rebase errors, but if
# unable to find a recent CL, keep the original. It usually works.
if cl_id:
build.include_cl('https://%s/c/%d' % (orig_cl.host, cl_id))
return build
@memoize
def _get_latest_cl(self, gerrit_host, project):
"""Returns the integer number for a project's most recently landed CL.
Args:
gerrit_host (str): E.g., fuchsia-review.googlesource.com
project (str): The name of the project in gerrit, e.g. "fuchsia"
Returns:
The integer change number for the CL corresponding to the commit at the
tip of the master branch.
"""
gitiles_host = gerrit_host.replace('-review', '')
remote = 'https://%s/%s' % (gitiles_host, project)
log = self.m.gitiles.log(
remote, 'refs/heads/master', limit=10, step_name='log %s' % project)
for log_entry in log:
commit_hash = log_entry['id']
change = self.m.gerrit.change_details(
'latest change details for %s' % project,
commit_hash,
test_data=self.m.json.test_api.output({'_number': 12345}),
ok_ret='any')
# Commits that are committed directly without code review don't have
# Gerrit change details.
if change:
return change['_number']
return None
def run_unit_tests(self, recipes_path):
"""Run the recipe unit tests."""
with self.m.context(cwd=recipes_path):
self.m.python(
'test', self.m.context.cwd.join('recipes.py'), args=['test', 'run'])
def run_led_tests(self, recipes_path, selftest_cl):
"""Launch led jobs for CQ builders."""
builders = sorted(self.m.commit_queue.all_tryjobs(project=self.project))
step = self.m.step('normalized_tryjobs', None)
step.presentation.logs['tryjobs'] = builders
affected_recipes = self._get_affected_recipes()
builds = []
with self.m.step.nest('get builders') as nest:
with self.m.context(cwd=recipes_path):
for builder in builders:
with self.m.step.nest(builder) as parent_step:
orig_build = self._get_last_green_build(builder)
if not orig_build:
parent_step.step_summary_text = 'no recent builds found'
continue
recipe = orig_build.infra.recipe.name
assert recipe
if recipe in affected_recipes:
parent_step.step_summary_text = 'SELECTED'
builds.append(self._create_led_build(orig_build, selftest_cl))
else:
parent_step.step_summary_text = 'skipped'
parent_step.logs['recipe_used'] = recipe
nest.step_summary_text = 'selected {} builds'.format(len(builds))
if not builds:
return
# Configure child builds.
with self.m.step.nest('configure builds') as nest:
for build in builds:
with self.m.step.nest(build.name) as parent_step:
with self.m.context(cwd=recipes_path):
build.include_recipe_bundle()
with self.m.swarming_retry.retry(tasks=builds) as retry:
retry.run_tasks()
retry.present_tasks()