blob: 0859faa6c97a17995d6f3ec9229ba5e6821d9f03 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import attr
from recipe_engine.config_types import Path
# The path to the generated sources dir within an isolate archive.
GENERATED_SOURCES_DIR = "generated_sources"
# The path to the serialized task requests an isolate archive.
TASK_REQUESTS_JSON = "task_requests.json"
TRIAGE_SOURCES_JSON = "triage_sources.json"
def tool_attrib(tool, linux_x64=False, isolate_path=None, **kwargs):
"""Declare a tool to include in the test orchestration inputs.
Args:
tool (str): Will be passed to build_results.tool() to get the path to
the tool when uploading to isolate.
linux_x64 (bool): Whether the tool is only run by the orchestrator
builder, which always runs on a Linux x64 bot.
isolate_path (str): The relative path to the tool in an Isolate
archive. Defaults to the value of `tool`.
**kwargs (dict): Passed through to attr.ib().
"""
if not isolate_path:
isolate_path = tool
tool_kwargs = {"name": tool}
if linux_x64:
# Orchestrator builders always run on x64 Linux, regardless of the
# OS/arch that the subbuild runs on.
tool_kwargs.update(cpu="x64", os="linux")
metadata = {"tool_kwargs": tool_kwargs, "isolate_path": isolate_path}
return attr.ib(metadata=metadata, **kwargs)
@attr.s
class _TestOrchestrationInputs(object):
"""Data used to orchestrate testing."""
_api = attr.ib()
# A list of swarming.TaskRequests that the orchestrator should launch.
# It will be serialized to a JSON file for transfer via Isolate.
task_requests = attr.ib(type=list)
# generated_sources should be a list of file paths relative to the
# generated_sources_root.
generated_sources = attr.ib(type=list)
generated_sources_root = attr.ib(type=Path)
triage_sources = attr.ib(type=list)
triage_sources_root = attr.ib(type=Path)
# Tools that will be copied to the orchestrator via Isolate.
covargs = tool_attrib(tool="covargs")
llvm_cov = tool_attrib(tool="llvm-cov", isolate_path="llvm_cov")
llvm_profdata = tool_attrib(tool="llvm-profdata", isolate_path="llvm_profdata")
llvm_symbolizer = tool_attrib(tool="llvm-symbolizer")
minfs = tool_attrib(tool="minfs")
perfcompare = tool_attrib(tool="perfcompare")
symbolize_tool = tool_attrib(tool="symbolize", isolate_path="symbolize_tool")
resultdb = tool_attrib(tool="resultdb", linux_x64=True)
tefmocheck = tool_attrib(tool="tefmocheck", linux_x64=True)
triage = tool_attrib(tool="triage", linux_x64=True)
# Constant shared between recipes that produce and consume.
# Property that holds the isolate hash of the test orchestration inputs.
HASH_PROPERTY = "test_orchestration_inputs_hash"
# Same as HASH_PROPERTY, but this is used for the "without CL" build of
# Fuchsia (the build without the CL under test being applied) when
# perfcompare mode is enabled.
HASH_PROPERTY_WITHOUT_CL = "test_orchestration_inputs_hash_without_cl"
@classmethod
def from_build_results(
cls, api, build_results, task_requests, include_generated_sources=False
):
kwargs = dict(
api=api,
generated_sources=(
build_results.generated_sources if include_generated_sources else []
),
task_requests=task_requests,
generated_sources_root=build_results.checkout.root_dir,
triage_sources=build_results.triage_sources,
triage_sources_root=build_results.checkout.root_dir,
)
for attrib in attr.fields(cls):
tool_kwargs = attrib.metadata.get("tool_kwargs")
if tool_kwargs is None:
continue
kwargs[attrib.name] = build_results.tool(**tool_kwargs)
return cls(**kwargs)
@classmethod
def download(cls, api, isolated_hash):
"""Downloads an isolated containing TestOrchestrationInputs.
Args:
api (RecipeApi): The api to use for downloading the inputs.
isolated_hash (string): The isolated hash to fetch.
Returns:
A TestOrchestrationInputs object.
"""
kwargs = dict(api=api)
with api.step.nest("download test orchestration inputs"):
download_dir = api.path.mkdtemp("test-orchestration-inputs")
api.isolated.download(
"download test orchestration inputs",
isolated_hash=isolated_hash,
output_dir=download_dir,
)
task_requests_json = api.json.read(
"load task requests", download_dir.join(TASK_REQUESTS_JSON),
).json.output
task_requests_json = task_requests_json or []
kwargs["task_requests"] = [
api.swarming.task_request_from_jsonish(r) for r in task_requests_json
]
generated_sources_root = download_dir.join(GENERATED_SOURCES_DIR)
generated_sources = []
for path in api.file.listdir(
"load generated sources",
generated_sources_root,
recursive=True,
test_data=["out/default/hi.txt", "out/default/bye.txt"],
):
api.path.mock_add_paths(path)
generated_sources.append(api.path.relpath(path, generated_sources_root))
kwargs.update(
generated_sources=generated_sources,
generated_sources_root=generated_sources_root,
)
triage_sources_json = (
api.json.read(
"load triage sources", download_dir.join(TRIAGE_SOURCES_JSON),
).json.output
or []
)
kwargs.update(
triage_sources=[
download_dir.join(path) for path in triage_sources_json
],
triage_sources_root=download_dir,
)
for attrib in attr.fields(cls):
isolate_path = attrib.metadata.get("isolate_path")
if not isolate_path:
continue
kwargs[attrib.name.lstrip("_")] = download_dir.join(isolate_path)
return cls(**kwargs)
def isolate(self):
"""Uploads to isolate.
Returns:
(str) Isolated hash.
"""
with self._api.step.nest("isolate test orchestration inputs"):
root = self._api.path.mkdtemp("test_orchestration_inputs")
symlink_tree = self._api.file.symlink_tree(root)
self._symlink(symlink_tree, root)
symlink_tree.create_links("create_links")
isolated = self._api.isolated.isolated(root)
isolated.add_dir(root)
return isolated.archive("isolate")
def _symlink(self, symlink_tree, root):
"""Adds TestOrchestrationInputs to a SymlinkTree.
Args:
symlink_tree (SymlinkTree): The tree of symlinks to write to.
root (Path): Root of symlink_tree
"""
for path in self.generated_sources:
symlink_tree.register_link(
linkname=root.join(GENERATED_SOURCES_DIR, path),
target=self.generated_sources_root.join(path),
)
for path in self.triage_sources:
symlink_tree.register_link(
linkname=root.join(path), target=self.triage_sources_root.join(path),
)
temp_dir = self._api.path.mkdtemp("test-orchestration-inputs")
task_requests_file = temp_dir.join(TASK_REQUESTS_JSON)
self._api.file.write_json(
"write %s" % TASK_REQUESTS_JSON,
data=[r.to_jsonish() for r in self.task_requests],
dest=task_requests_file,
indent=2,
)
symlink_tree.register_link(
linkname=root.join(TASK_REQUESTS_JSON), target=task_requests_file
)
triage_sources_json = temp_dir.join(TRIAGE_SOURCES_JSON)
self._api.file.write_json(
"write %s" % TRIAGE_SOURCES_JSON,
data=self.triage_sources,
dest=triage_sources_json,
indent=2,
)
symlink_tree.register_link(
linkname=root.join(TRIAGE_SOURCES_JSON), target=triage_sources_json
)
for attrib in attr.fields(type(self)):
isolate_path = attrib.metadata.get("isolate_path")
target_path = getattr(self, attrib.name)
if not target_path or not isolate_path:
continue
self._api.path.mock_add_paths(target_path)
if self._api.path.exists(target_path):
symlink_tree.register_link(
linkname=root.join(isolate_path), target=target_path
)