blob: cc768c9e7b33b9c9f74e0be48219c61ef41966b6 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import attr
from recipe_engine.config_types import Path
from PB.go.fuchsia.dev.fuchsia.tools.integration.fint.proto import (
set_artifacts as fint_set_artifacts_pb2,
)
# The path to the generated sources dir within a CAS archive.
GENERATED_SOURCES_DIR = "generated_sources"
# The path to the serialized task requests a CAS archive.
TASK_REQUESTS_JSON = "task_requests.json"
# The path to skipped test shards withing a CAS archive.
SKIPPED_SHARDS_JSON = "skipped_shards.json"
FINT_SET_METADATA_JSON = "fint_set_metadata.json"
TRIAGE_SOURCES_JSON = "triage_sources.json"
def tool_attrib(tool, linux_x64=False, **kwargs):
"""Declare a tool to include in the test orchestration inputs.
Args:
tool (str): Will be passed to build_results.tool() to get the path to
the tool when uploading to CAS.
linux_x64 (bool): Whether the tool is only run by the orchestrator
builder, which always runs on a Linux x64 bot.
**kwargs (dict): Passed through to attr.ib().
"""
tool_kwargs = {"name": tool}
if linux_x64:
# Orchestrator builders always run on x64 Linux, regardless of the
# OS/arch that the subbuild runs on.
tool_kwargs.update(cpu="x64", os="linux")
metadata = {"tool_kwargs": tool_kwargs}
return attr.ib(metadata=metadata, **kwargs)
@attr.s
class SkippedShard:
"""Represents a skipped Fuchsia test shard.
Attributes:
name (str): The name of the shard.
env_name (str): The test environment the shard would have run on.
from_fuchsia (bool): Whether the shard would have run on Fuchsia.
summary (dict): A test summary containin the results of the shard.
"""
name = attr.ib(type=str)
env_name = attr.ib(type=str)
from_fuchsia = attr.ib(type=bool)
summary = attr.ib(type=dict)
@classmethod
def from_jsonish(cls, jsond):
"""Creates a SkippedShard from a JSON-compatible dictionary."""
return cls(
name=jsond["name"],
env_name=jsond["env_name"],
from_fuchsia=jsond["from_fuchsia"],
summary=jsond["summary"],
)
def to_jsonish(self):
"""Generates a JSON-compatible dictionary from this SkippedShard."""
return attr.asdict(self)
@attr.s
class _TestOrchestrationInputs:
"""Data used to orchestrate testing."""
_api = attr.ib()
# A list of swarming.TaskRequests that the orchestrator should launch.
# It will be serialized to a JSON file for transfer via CAS.
task_requests = attr.ib(type=list)
fint_set_metadata = attr.ib(type=fint_set_artifacts_pb2.SetArtifacts.Metadata)
# generated_sources should be a list of file paths relative to the
# generated_sources_root.
generated_sources = attr.ib(type=list)
generated_sources_root = attr.ib(type=Path)
triage_sources = attr.ib(type=list)
triage_sources_root = attr.ib(type=Path)
# A list of SkippedShards.
skipped_shards = attr.ib(type=list)
# Tools that will be copied to the orchestrator via CAS.
# These should really all be linux_x64=True, but on Mac builders we don't have
# access to linux_x64 versions of some of the tools, and we get away with it
# because the tools aren't actually needed by the orchestrator.
covargs = tool_attrib(tool="covargs", linux_x64=True)
llvm_cov = tool_attrib(tool="llvm-cov")
llvm_profdata = tool_attrib(tool="llvm-profdata")
llvm_symbolizer = tool_attrib(tool="llvm-symbolizer")
perfcompare = tool_attrib(tool="perfcompare")
symbolizer_tool = tool_attrib(tool="symbolizer")
resultdb = tool_attrib(tool="resultdb", linux_x64=True)
tefmocheck = tool_attrib(tool="tefmocheck", linux_x64=True)
triage = tool_attrib(tool="triage", linux_x64=True)
# Constant shared between recipes that produce and consume.
# Property that holds the CAS digest of the test orchestration inputs.
DIGEST_PROPERTY = "test_orchestration_inputs_digest"
# Same as DIGEST_PROPERTY, but this is used for the "without CL" build of
# Fuchsia (the build without the CL under test being applied) when
# perfcompare mode is enabled.
DIGEST_PROPERTY_WITHOUT_CL = "test_orchestration_inputs_digest_without_cl"
@classmethod
def from_build_results(
cls,
api,
build_results,
task_requests,
shards,
include_generated_sources=False,
):
skipped_shards = [
SkippedShard(
name=shard.name,
env_name=f"{shard.device_type or shard.os}-{build_results.set_metadata.target_arch}",
from_fuchsia=shard.targets_fuchsia,
summary=shard.summary,
)
for shard in shards
if shard.should_skip
]
kwargs = dict(
api=api,
generated_sources=(
build_results.generated_sources if include_generated_sources else []
),
task_requests=task_requests,
skipped_shards=skipped_shards,
fint_set_metadata=build_results.set_metadata,
generated_sources_root=build_results.checkout.root_dir,
triage_sources=build_results.triage_sources,
triage_sources_root=build_results.checkout.root_dir,
)
for attrib in attr.fields(cls):
tool_kwargs = attrib.metadata.get("tool_kwargs")
if tool_kwargs is None:
continue
kwargs[attrib.name] = build_results.tool(**tool_kwargs)
return cls(**kwargs)
@classmethod
def download(cls, api, digest):
"""Downloads an archive containing TestOrchestrationInputs.
Args:
api (RecipeApi): The api to use for downloading the inputs.
digest (string): The CAS digest to fetch.
Returns:
A TestOrchestrationInputs object.
"""
kwargs = dict(api=api)
with api.step.nest("download test orchestration inputs"):
download_dir = api.path.mkdtemp("test-orchestration-inputs")
api.cas_util.download(
step_name="download test orchestration inputs",
digest=digest,
output_dir=download_dir,
)
skipped_shards_json = (
api.file.read_json(
"load skipped shards",
download_dir.join(SKIPPED_SHARDS_JSON),
)
or []
)
kwargs["skipped_shards"] = [
SkippedShard.from_jsonish(s) for s in skipped_shards_json
]
task_requests_json = (
api.file.read_json(
"load task requests",
download_dir.join(TASK_REQUESTS_JSON),
)
or []
)
kwargs["task_requests"] = [
api.swarming.task_request_from_jsonish(r) for r in task_requests_json
]
kwargs["fint_set_metadata"] = api.file.read_proto(
"load fint set metadata",
download_dir.join(FINT_SET_METADATA_JSON),
fint_set_artifacts_pb2.SetArtifacts.Metadata,
codec="JSONPB",
test_proto=fint_set_artifacts_pb2.SetArtifacts.Metadata(
target_arch="x64",
board="board.gni",
product="product.gni",
optimize="debug",
),
)
generated_sources_root = download_dir.join(GENERATED_SOURCES_DIR)
generated_sources = []
for path in api.file.listdir(
"load generated sources",
generated_sources_root,
recursive=True,
test_data=["out/default/hi.txt", "out/default/bye.txt"],
):
api.path.mock_add_paths(path)
generated_sources.append(api.path.relpath(path, generated_sources_root))
kwargs.update(
generated_sources=generated_sources,
generated_sources_root=generated_sources_root,
)
triage_sources_json = (
api.file.read_json(
"load triage sources",
download_dir.join(TRIAGE_SOURCES_JSON),
)
or []
)
kwargs.update(
triage_sources=[
download_dir.join(path) for path in triage_sources_json
],
triage_sources_root=download_dir,
)
for attrib in attr.fields(cls):
tool_kwargs = attrib.metadata.get("tool_kwargs")
if not tool_kwargs:
continue
kwargs[attrib.name.lstrip("_")] = download_dir.join(tool_kwargs["name"])
return cls(**kwargs)
def upload(self):
"""Uploads to CAS.
Returns:
(str) CAS digest.
"""
with self._api.step.nest("upload test orchestration inputs"):
root = self._construct_file_tree()
digest = self._api.cas_util.upload(root)
# When using CAS, files are copied into the tree instead of
# linked, so remove the tree after uploading it to save disk
# space.
self._api.file.rmtree("remove archive tree", root)
return digest
def _construct_file_tree(self):
"""Writes orchestration inputs to disk.
Returns the path to the directory to which all the orchestration inputs
were written. This directory will be uploaded to CAS and eventually
downloaded and "deserialized" by `download()`.
"""
root = self._api.path.mkdtemp("test_orchestration_inputs")
symlink_tree = self._api.file.symlink_tree(root)
for path in self.generated_sources:
symlink_tree.register_link(
linkname=root.join(GENERATED_SOURCES_DIR, path),
target=self.generated_sources_root.join(path),
)
for path in self.triage_sources:
symlink_tree.register_link(
linkname=root.join(path),
target=self.triage_sources_root.join(path),
)
self._api.file.write_json(
f"write {SKIPPED_SHARDS_JSON}",
data=[s.to_jsonish() for s in self.skipped_shards],
dest=root.join(SKIPPED_SHARDS_JSON),
indent=2,
)
self._api.file.write_json(
f"write {TASK_REQUESTS_JSON}",
data=[r.to_jsonish() for r in self.task_requests],
dest=root.join(TASK_REQUESTS_JSON),
indent=2,
)
self._api.file.write_proto(
f"write {FINT_SET_METADATA_JSON}",
proto_msg=self.fint_set_metadata,
dest=root.join(FINT_SET_METADATA_JSON),
codec="JSONPB",
)
self._api.file.write_json(
f"write {TRIAGE_SOURCES_JSON}",
data=self.triage_sources,
dest=root.join(TRIAGE_SOURCES_JSON),
indent=2,
)
for attrib in attr.fields(type(self)):
tool_kwargs = attrib.metadata.get("tool_kwargs")
target_path = getattr(self, attrib.name)
if not target_path or not tool_kwargs:
continue
self._api.path.mock_add_paths(target_path)
if self._api.path.exists(target_path):
symlink_tree.register_link(
linkname=root.join(tool_kwargs["name"]), target=target_path
)
symlink_tree.create_links("create links")
return root