blob: 9bca7dc5e859c98f4828306946d4e0adf715f80a [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Pylint is not smart enough to infer the return type of methods with a custom
# property decorator like @cached_property, so we have to disable some spurious
# warnings from cached property accesses. See
# https://github.com/PyCQA/pylint/issues/3484
#
# pylint: disable=no-member, not-an-iterable, not-context-manager
# pylint: disable=unsubscriptable-object
import contextlib
import copy
import attr
from google.protobuf import json_format as jsonpb
from recipe_engine import recipe_api
from .orchestration_inputs import _TestOrchestrationInputs
from PB.go.chromium.org.luci.buildbucket.proto import builder as builder_pb2
from PB.go.fuchsia.dev.fuchsia.tools.integration.fint.proto import (
context as context_pb2,
set_artifacts as fint_set_artifacts_pb2,
build_artifacts as fint_build_artifacts_pb2,
)
from RECIPE_MODULES.fuchsia.utils import cached_property, memoize
# `fint set` and `fint build`, respectively, will write these files to the
# artifact directory specified by the context spec.
FINT_SET_ARTIFACTS = "set_artifacts.json"
FINT_BUILD_ARTIFACTS = "build_artifacts.json"
# Manifests produced by the build.
CIPD_ASSEMBLY_ARTIFACTS_JSON = "cipd_assembly_artifacts.json"
CTS_ARTIFACTS_JSON = "cts_artifacts.json"
RBE_CONFIG_JSON = "rbe_config.json"
TOOL_PATHS_JSON = "tool_paths.json"
TRIAGE_SOURCES_JSON = "triage_sources.json"
# Name of BigQuery project and table for uploading artifacts.
BIGQUERY_PROJECT = "fuchsia-infra"
BIGQUERY_ARTIFACTS_DATASET = "artifacts"
# The private and authorized SSH keys pulled down in the checkout, relative to
# the fuchsia root.
CHECKOUT_AUTHORIZED_KEY = ".ssh/authorized_keys"
CHECKOUT_PRIVATE_KEY = ".ssh/pkey"
# The path to a public key used to sign release builds. Only set on release
# builders.
RELEASE_PUBKEY_PATH = "/etc/release_keys/release_key_pub.pem"
# The name of the public key file uploaded in release builds.
RELEASE_PUBKEY_FILENAME = "publickey.pem"
# Name of compdb generated by GN, it is expected in the root of the build
# directory.
GN_COMPDB_FILENAME = "compile_commands.json"
# Name of the log containing the build failure summary.
FAILURE_SUMMARY_LOG = "failure summary"
# Set as an output property and consumed by the
# go/cq-incremental-builder-monitor_dev dashboard.
BUILD_FAILED_PROPERTY = "build_failed"
class NoSuchTool(Exception):
def __init__(self, name, cpu, os):
super(NoSuchTool, self).__init__(
"no such tool in %s: (name=%r, cpu=%r, os=%r)"
% (TOOL_PATHS_JSON, name, cpu, os)
)
@attr.s
class _FuchsiaBuildResults(object):
"""Represents the outputs of a completed build of Fuchsia."""
_api = attr.ib()
checkout = attr.ib()
build_dir = attr.ib()
gn_results = attr.ib()
images = attr.ib(factory=list)
archives = attr.ib(factory=list)
_size_check_failed = attr.ib(init=False, default=False)
ninja_targets = attr.ib(factory=list)
zedboot_images = attr.ib(factory=list)
zbi_test_qemu_kernel_images = attr.ib(factory=dict)
fint_build_artifacts = attr.ib(
type=fint_build_artifacts_pb2.BuildArtifacts, default=None
)
def __deepcopy__(self, memodict):
# Shallow copy first.
new = copy.copy(self)
# Only images needs to be a real deep copy.
new.images = copy.deepcopy(new.images, memodict)
return new
@property
def set_metadata(self):
return self.gn_results.fint_set_artifacts.metadata
@property
def affected_tests(self):
"""Returns a list of names of tests affected by the current gerrit change."""
if self._api.build.can_exit_early_if_unaffected(self.checkout):
return self.fint_build_artifacts.affected_tests
return []
@property
def no_work(self):
"""Returns whether all testing can be skipped."""
if self._api.build.can_exit_early_if_unaffected(self.checkout):
return self.fint_build_artifacts.build_not_affected
return False
@property
def authorized_key(self):
return self.checkout.root_dir.join(CHECKOUT_AUTHORIZED_KEY)
@property
def private_key(self):
return self.checkout.root_dir.join(CHECKOUT_PRIVATE_KEY)
@cached_property
def _binary_sizes(self):
"""The binary sizes data produced by the build.
Returns:
Tuple of (sizes dict, stdout str).
"""
cmd = [
self.tool("size_checker"),
"--build-dir",
self.build_dir,
"--sizes-json-out",
self._api.json.output(),
]
check_sizes_result = self._api.step(
"size_checker",
cmd,
step_test_data=lambda: self._api.json.test_api.output({"component": 1}),
ok_ret="any",
stderr=self._api.raw_io.output_text(),
)
self._size_check_failed = bool(check_sizes_result.exc_result.retcode)
if self._size_check_failed:
check_sizes_result.presentation.status = self._api.step.FAILURE
return check_sizes_result.json.output, check_sizes_result.stderr
@cached_property
def _codesize_report(self):
"""The codesize report produced by the build.
Returns:
Tuple of (sizes dict, stdout str).
"""
cmd = [
self.tool("codesize"),
"--checkout-dir",
self.checkout.root_dir,
"--build-dir",
self.build_dir,
"--output",
self._api.json.output(),
"--format",
"json",
]
codesize_result = self._api.step(
"codesize",
cmd,
step_test_data=lambda: self._api.json.test_api.output({"foo": 1}),
stderr=self._api.raw_io.output_text(),
)
return codesize_result.json.output, codesize_result.stderr
@cached_property
def cts_artifacts(self):
"""The paths to CTS artifacts relative to the checkout root."""
relpaths = self._api.file.read_json(
"read cts artifacts manifest",
self.build_dir.join(CTS_ARTIFACTS_JSON),
test_data=["foo.far", "bar.far"],
)
return [
self._api.path.abs_to_path(
self._api.path.realpath(self.build_dir.join(relpath))
)
for relpath in relpaths
]
@cached_property
def cipd_assembly_artifacts(self):
"""Paths to files to include in an assembly artifacts CIPD package.
cipd_assembly_artifacts.json contains a list of paths to manifests
produced by the build, where each manifest itself contains a list of
dictionaries corresponding to files required for product assembly.
Returns a dictionary mapping from relative path within the CIPD package
to the absolute path to the file that should be copied into that
destination. This dictionary is the result of merging all the manifests
referenced by cipd_assembly_inputs.json.
"""
def abspath(relpath_in_build_dir):
return self._api.path.abs_to_path(
self._api.path.realpath(self.build_dir.join(relpath_in_build_dir))
)
assembly_manifests = [
m["path"]
for m in self._api.file.read_json(
"read %s" % CIPD_ASSEMBLY_ARTIFACTS_JSON,
abspath(CIPD_ASSEMBLY_ARTIFACTS_JSON),
test_data=[{"path": "obj/assembly_inputs.json"}],
)
]
copy_mapping = {}
for manifest in assembly_manifests:
inputs = self._api.file.read_json(
"read %s" % manifest,
abspath(manifest),
test_data=[
{
"source": "host_x64/tool",
"destination": "host_x64/tool",
},
{
"source": "../../prebuilt/other-tool",
"destination": "prebuilt/other-tool",
},
],
)
for inp in inputs:
source, dest = abspath(inp["source"]), inp["destination"]
if dest in copy_mapping: # pragma: no cover
raise self.m.step.StepFailure(
"Multiple files have the same destination %s: %s, %s"
% (dest, copy_mapping[dest], source)
)
copy_mapping[dest] = source
return copy_mapping
@cached_property
def generated_sources(self):
"""The paths to the generated source files relative to the checkout root."""
generated_sources = []
for path in self.gn_results.generated_sources:
try:
abspath = self._api.path.abs_to_path(
self._api.path.realpath(self.build_dir.join(path.lstrip("/")))
)
except ValueError: # pragma: no cover
raise self._api.step.StepFailure(
"Invalid path in generated_sources.json: %s" % path
)
self._api.path.mock_add_paths(abspath)
if self._api.path.exists(abspath):
generated_sources.append(
self._api.path.relpath(abspath, self.checkout.root_dir)
)
return generated_sources
@cached_property
def triage_sources(self):
"""The paths to the triage sources relative to the checkout root."""
return [
self._api.path.relpath(f, self.checkout.root_dir)
for f in self.gn_results.triage_sources
]
@property
def compdb_path(self):
return self.gn_results.compdb_path
def filtered_compdb(self, *args, **kwargs):
return self.gn_results.filtered_compdb(*args, **kwargs)
def tool(self, name, cpu="x64", os=None, **kwargs):
"""The path to a tool of the given name and cpu."""
return self.gn_results.tool(name, cpu, os, **kwargs)
def check_binary_sizes(self):
"""Checks that binary sizes are less than the maximum allowed.
Raises:
StepFailure if the size exceeds the maximum.
"""
with self._api.step.nest("check binary sizes") as presentation:
sizes_json, size_checker_logs = self._binary_sizes
presentation.logs["size_checker logs"] = size_checker_logs
if sizes_json:
# This property is read by the binary-size Gerrit plugin.
presentation.properties["binary_sizes"] = sizes_json
if self._size_check_failed:
presentation.logs["size_checker JSON output"] = self._api.json.dumps(
sizes_json, indent=2
)
raise self._api.step.StepFailure("binary size checks failed")
return sizes_json
def compute_size_diff(
self,
gitiles_remote,
base_commit,
ci_bucket,
binary_sizes,
ci_builder=None,
):
"""Attempts to compute the size diff against the matching CI builder.
Args:
gitiles_remote (str): Gitiles remote for base commit.
base_commit (str): Base commit as sha1.
ci_bucket (str): Bucket name of the CI builder to inspect.
binary_sizes (dict): Input binary sizes object.
ci_builder (str): Builder name of the CI builder to inspect.
Defaults to the current builder's name.
Returns:
dict: Binary size diff.
"""
with self._api.step.nest("compute size diff"):
return self._api.size_diff.ci(
"diff ci",
gitiles_remote,
base_commit,
builder_pb2.BuilderID(
project=self._api.buildbucket.build.builder.project,
bucket=ci_bucket,
builder=ci_builder or self._api.buildbucket.build.builder.builder,
),
self._api.json.input(binary_sizes),
)
def generate_codesize_report(self):
"""Generates a codesize report and presents the output JSON."""
with self._api.step.nest("generate codesize report") as presentation:
codesize_report, codesize_logs = self._codesize_report
presentation.logs["codesize logs"] = codesize_logs
# TODO(fxbug.dev/84202): Merge this report into the size_checker
# output once we confirm that codesize is working properly.
presentation.properties["codesize report"] = codesize_report
def upload(self, gcs_bucket, is_release_version=False, namespace=None):
"""Uploads artifacts from the build to Google Cloud Storage.
Args:
gcs_bucket (str): GCS bucket name to upload build results to.
is_release_version (bool): True if checkout is a release version.
namespace (str|None): A unique namespace for the GCS upload
location; if None, the current build's ID is used.
"""
assert gcs_bucket
with self._api.step.nest("upload build results"):
self._api.build._upload_build_results(
self, gcs_bucket, is_release_version, namespace
)
self._api.build._upload_package_snapshot(self, gcs_bucket, namespace)
def upload_tracing_data(self, gcs_bucket, namespace):
"""Uploads GN and ninja tracing results for this build to GCS.
Args:
gcs_bucket (str): GCS bucket name to upload build results to.
namespace (str|None): A unique namespace for the GCS upload
location; if None, the current build's ID is used.
"""
assert gcs_bucket
self._api.build._upload_tracing_data(self, gcs_bucket, namespace)
@attr.s(frozen=True)
class _GNResults(object):
"""_GNResults represents the result of a `gn gen` invocation in the fuchsia build.
It exposes the API of the build, which defines how one can invoke
ninja.
"""
_api = attr.ib()
build_dir = attr.ib()
fint_set_artifacts = attr.ib(
# Optional for convenience when writing tests. Production recipe code
# should always populate this field.
default=None,
# Proto objects are not hashable.
hash=False,
)
_compdb_cache = attr.ib(factory=dict, init=False)
# The following attributes are private because they are only intended to be
# used from within this recipe module, not by any recipes that use this
# recipe module.
_fint_path = attr.ib(default=None)
_fint_params_path = attr.ib(default=None)
_fint_context = attr.ib(default=None)
def __attrs_post_init__(self):
# Eagerly read in the tools manifest so that it always has the same
# step name regardless of when the caller first accesses the manifest.
self._tools # pylint: disable=pointless-statement
@property
def skip_build(self):
"""Whether it's safe to skip doing a full build."""
return self.fint_set_artifacts.skip_build
@property
def gn_trace_path(self):
"""The path to a GN trace file produced by `fint set`."""
return self.fint_set_artifacts.gn_trace_path
@cached_property
def generated_sources(self):
"""Returns the generated source files (list(str)) from the fuchsia build.
The returned paths are relative to the fuchsia build directory.
"""
return self._api.file.read_json(
"read generated sources",
self.build_dir.join("generated_sources.json"),
test_data=["generated_header.h"],
)
def tool(self, name, cpu="x64", os=None, mock_for_tests=True):
"""Returns the path to the specified tool provided from the tool_paths
manifest.
Args:
name (str): The short name of the tool, as it appears in
tool_paths.json (usually the same as the basename of the
executable).
cpu (str): The arch of the machine the tool will run on.
os (str): The OS of the machine the tool will run on.
mock_for_tests (bool): Whether to mock the tool info if it's not
found in tool_paths.json. Ignored in production, only
useful in recipe unit test mode for getting code coverage of
the missing tool code path.
"""
os = os or self._api.platform.name
try:
tool = self._tools[name, cpu, os]
except KeyError:
# If we're in recipe unit testing mode, just return some mock info
# for the tool if it's not in tool_paths.json. Requiring that every
# tool used by the recipe shows up in the mock tool_paths.json is a
# maintenance burden, since adding a dependency on a new tool also
# requires modifying the mock tool_paths.json. It would also
# create much more noise in expectation files.
if self._api.build._test_data.enabled and mock_for_tests:
tool = {"path": "%s_%s/%s" % (os, cpu, name)}
else:
raise NoSuchTool(name, cpu, os)
relpath = tool["path"]
try:
return self._api.path.abs_to_path(
self._api.path.realpath(self.build_dir.join(relpath))
)
except ValueError: # pragma: no cover
raise self._api.step.StepFailure(
"Invalid path in tool_paths.json: %s" % relpath
)
@cached_property
def _tools(self):
tools = {}
tool_paths_manifest = self._api.file.read_json(
"read tool_paths manifest",
self.build_dir.join(TOOL_PATHS_JSON),
test_data=[
{"name": "foo", "cpu": "x64", "os": "linux", "path": "linux_x64/foo"}
],
)
for tool in tool_paths_manifest:
key = (tool["name"], tool["cpu"], tool["os"])
assert key not in tools, (
"only one tool with (name, cpu, os) == (%s, %s, %s) is allowed" % key
)
tools[key] = tool
return tools
@cached_property
def triage_sources(self):
"""Returns the absolute paths of the files defined in the triage_sources
manifest."""
triage_sources_manifest = self._api.file.read_json(
"read triage_sources manifest",
self.build_dir.join(TRIAGE_SOURCES_JSON),
test_data=self._api.build.test_api.mock_triage_sources_manifest(),
)
return [
self._api.path.abs_to_path(
self._api.path.realpath(self.build_dir.join(source))
)
for source in triage_sources_manifest
]
@cached_property
def rbe_config_path(self):
"""Returns the checkout relative path to the RBE config specified in
the Fuchsia source tree."""
rbe_config_manifest = self._api.file.read_json(
"read rbe_config manifest",
self.build_dir.join(RBE_CONFIG_JSON),
test_data=[{"path": "../../path/to/rbe/config.cfg"}],
)
assert len(rbe_config_manifest) == 1
rbe_config_path = self._api.path.abs_to_path(
self._api.path.realpath(self.build_dir.join(rbe_config_manifest[0]["path"]))
)
return rbe_config_path
@property
def compdb_path(self):
return self.build_dir.join(GN_COMPDB_FILENAME)
def filtered_compdb(self, filters=()):
"""The path to a merged compilation database, filtered via the passed
filters."""
filters = tuple(filters) # Ensure hashability.
if filters not in self._compdb_cache:
self._compdb_cache[filters] = self._filtered_compdb(filters)
return self._compdb_cache[filters]
def _filtered_compdb(self, filters):
def keep_in_compdb(entry):
# Filenames are relative to the build directory, and the build
# directory is absolute.
build_dir = self._api.path.abs_to_path(entry["directory"])
abspath = self._api.path.realpath(
self._api.path.join(build_dir, entry["file"])
)
try:
path = self._api.path.abs_to_path(abspath)
except ValueError:
# This happens if `abspath` is not rooted in one of the path
# module's known paths, which is the case for paths in `/bin`,
# for example. That also implies that the path isn't in the
# build directory, so skip it.
return False
if build_dir.is_parent_of(path):
return False
segments = entry["file"].split(self._api.path.sep)
if any(bad_segments in segments for bad_segments in filters):
return False
return True
compdb_filtered = [entry for entry in self._compdb if keep_in_compdb(entry)]
compdb_path = self._api.path["cleanup"].join(GN_COMPDB_FILENAME)
self._api.file.write_json(
"write filtered compdb",
compdb_path,
compdb_filtered,
# This file is generally massive, so we don't want to upload it to
# Logdog because that would take ages.
include_log=False,
)
return compdb_path
@cached_property
def _compdb(self):
# Assumes that the current builder is configured to generate compdbs.
return self._api.file.read_json(
"read compdb",
self.compdb_path,
test_data=[
{
"directory": "[START_DIR]/out/not-default",
"file": "../../foo.cpp",
"command": "clang++ foo.cpp",
},
{
"directory": "[START_DIR]/out/not-default",
"file": "../../third_party/foo.cpp",
"command": "clang++ third_party/foo.cpp",
},
{
"directory": "[START_DIR]/out/not-default",
"file": "../../out/not-default/foo.cpp",
"command": "clang++ foo.cpp",
},
{
"directory": "[START_DIR]/out/not-default",
"file": "/bin/ln",
"command": "/bin/ln -s foo bar",
},
],
include_log=False,
)
@cached_property
def zbi_tests(self):
"""Returns the ZBI tests from the Fuchsia build directory."""
return self._api.file.read_json(
"read zbi test manifest",
self.build_dir.join("zbi_tests.json"),
test_data=[],
)
class FuchsiaBuildApi(recipe_api.RecipeApi):
"""APIs for building Fuchsia."""
FINT_PARAMS_PROPERTY = "fint_params"
FINT_PARAMS_PATH_PROPERTY = "fint_params_path"
NoSuchTool = NoSuchTool
def __init__(self, props, *args, **kwargs):
super(FuchsiaBuildApi, self).__init__(*args, **kwargs)
self._clang_toolchain = props.clang_toolchain
self._clang_toolchain_dir = ""
self._gcc_toolchain = props.gcc_toolchain
self._gcc_toolchain_dir = ""
self._rust_toolchain = props.rust_toolchain
self._rust_toolchain_dir = ""
self._emitted_ninja_duration = False
@property
def clang_toolchain_dir(self):
if not self._clang_toolchain_dir and self._clang_toolchain.version:
self._clang_toolchain_dir = self._download_toolchain(
"clang", self._clang_toolchain, "third_party/clang"
)
return self._clang_toolchain_dir
@clang_toolchain_dir.setter
def clang_toolchain_dir(self, value):
self._clang_toolchain_dir = value
@property
def gcc_toolchain_dir(self):
if not self._gcc_toolchain_dir and self._gcc_toolchain.version:
self._gcc_toolchain_dir = self._download_toolchain(
"gcc", self._gcc_toolchain, "third_party/gcc"
)
return self._gcc_toolchain_dir
@gcc_toolchain_dir.setter
def gcc_toolchain_dir(self, value):
self._gcc_toolchain_dir = value
@property
def rust_toolchain_dir(self):
if not self._rust_toolchain_dir and self._rust_toolchain.version:
self._rust_toolchain_dir = self._download_toolchain(
"rust", self._rust_toolchain, "third_party/rust"
)
return self._rust_toolchain_dir
@rust_toolchain_dir.setter
def rust_toolchain_dir(self, value):
self._rust_toolchain_dir = value
def with_options(
self,
checkout,
fint_params_path,
build_dir=None,
collect_coverage=False,
incremental=False,
sdk_id=None,
**kwargs
):
"""Builds Fuchsia from a Jiri checkout.
Depending on the fint parameters, may or may not build
Fuchsia-the-operating-system (i.e. the Fuchsia images).
Args:
checkout (CheckoutResult): The Fuchsia checkout result.
fint_params_path (str): The path, relative to the checkout root,
of a platform spec textproto to pass to fint.
build_dir (Path): The build output directory.
collect_coverage (bool): Whether to build for collecting
coverage.
incremental (bool): Whether or not to build incrementally.
sdk_id (str): If specified, set sdk_id in GN.
**kwargs (dict): Passed through to _build().
Returns:
A FuchsiaBuildResults, representing the build.
"""
if incremental:
cache_ctx = self.m.cache.guard("incremental")
else:
cache_ctx = contextlib.nullcontext()
with self.m.step.nest("build") as presentation, self.m.macos_sdk(), cache_ctx:
fint_params = self.m.file.read_text(
"read fint params",
checkout.root_dir.join(fint_params_path),
test_data='field: "value"',
)
presentation.logs["fint_params"] = fint_params
# Write the fint params file to output properties so it can be
# logged by the orchestrator build if we're running in subbuild
# mode.
presentation.properties[self.FINT_PARAMS_PROPERTY] = fint_params
# Write the fint params path to output properties so `fx repro`
# knows which file to use.
presentation.properties[self.FINT_PARAMS_PATH_PROPERTY] = fint_params_path
gn_results = self.gen(
checkout=checkout,
fint_params_path=fint_params_path,
build_dir=build_dir,
sdk_id=sdk_id,
collect_coverage=collect_coverage,
presentation=presentation,
)
# Upload tests.json to Logdog so it's available to aid in debugging.
self.m.file.read_text(
"read tests.json", gn_results.build_dir.join("tests.json")
)
if self.can_exit_early_if_unaffected(checkout) and gn_results.skip_build:
return None
return self._build(
checkout=checkout,
gn_results=gn_results,
presentation=presentation,
**kwargs
)
def gen(
self,
checkout,
fint_params_path,
build_dir=None,
sdk_id=None,
collect_coverage=False,
presentation=None,
):
"""Sets up and calls `gn gen`.
Args:
checkout (CheckoutApi.CheckoutResults): The checkout results.
fint_params_path (str): The path to a build params textproto to
pass to fint, relative to the checkout directory.
build_dir (Path): The build directory. This defaults to "out/not-default".
Note that changing the build dir will invalidate Goma caches so ideally
should be only used when Goma is disabled.
sdk_id (str): The current SDK ID
collect_coverage (bool): Whether to build for collecting coverage.
presentation (StepPresentation): Step to attach logs to.
Returns:
A _GNResults object.
"""
assert fint_params_path, "fint_params_path must be set"
if not build_dir:
# Some parts of the build require the build dir to be two
# directories nested underneath the checkout dir.
# We choose the path to be intentionally different from
# "out/default" because that is what most developers use locally and we
# want to prevent the build from relying on those directory names.
build_dir = checkout.root_dir.join("out", "not-default")
# with_options() normally handles setting up the macOS SDK, but we need
# to ensure it's set up here to support recipes that call `gen()`
# directly.
with self.m.macos_sdk(), self.m.context(cwd=checkout.root_dir):
context = self._fint_context(
checkout=checkout,
build_dir=build_dir,
sdk_id=sdk_id,
collect_coverage=collect_coverage,
)
fint_path = self._fint_path(checkout)
# We call this step `gn gen` even though it runs `fint set`
# to avoid confusion about when the recipe runs `gn gen`.
step = self._run_fint(
"gn gen",
"set",
fint_path,
checkout.root_dir.join(fint_params_path),
context,
)
# Artifacts should be produced even if some `fint set` steps
# failed, as the artifacts may contain useful logs.
artifacts = self.m.file.read_proto(
"read fint set artifacts",
self.m.path.join(context.artifact_dir, FINT_SET_ARTIFACTS),
fint_set_artifacts_pb2.SetArtifacts,
"JSONPB",
test_proto=fint_set_artifacts_pb2.SetArtifacts(
gn_trace_path=self.m.path.join(
context.artifact_dir, "mock-gn-trace.json"
),
use_goma=True,
enable_rbe=True,
metadata=dict(
board="boards/x64.gni",
optimize="debug",
product="products/core.gni",
target_arch="x64",
variants=["asan"],
),
),
)
# Log the full failure summary so the entire text is visible even if
# it's truncated in the summary markdown.
if artifacts.failure_summary and presentation:
presentation.logs[
FAILURE_SUMMARY_LOG
] = artifacts.failure_summary.splitlines()
if step.retcode:
if artifacts.failure_summary:
msg = self.m.buildbucket_util.summary_message(
artifacts.failure_summary,
"(failure summary truncated, see the '%s' log for "
"full failure details)" % FAILURE_SUMMARY_LOG,
)
else:
msg = "Unrecognized fint set failure, see stdout for details"
raise self.m.step.StepFailure(msg)
elif artifacts.failure_summary:
raise self.m.step.StepFailure(
"`fint set` emitted a failure summary but had a retcode of zero"
)
return self.gn_results(
build_dir,
artifacts,
fint_path=fint_path,
fint_params_path=checkout.root_dir.join(fint_params_path),
fint_context=context,
)
def _build(
self,
checkout,
gn_results,
presentation,
allow_dirty=False,
gcs_bucket=None,
stats_gcs_bucket=None,
upload_namespace=None,
timeout_secs=90 * 60,
):
"""Runs `fint build`.
Fint build consumes the GN build APIs from the build directory as
well as the fint params path to determine what targets to build.
Args:
checkout (CheckoutResult): The Fuchsia checkout result.
gn_results (_GNResults): GN gen results.
presentation (StepPresentation): Presentation to attach important
logs to.
allow_dirty (bool): Skip the ninja no op check.
gcs_bucket (str or None): A GCS bucket to upload crash reports
to.
stats_gcs_bucket (str): GCS bucket name to upload build stats to.
timeout_secs (str): The timeout for running `fint build`.
upload_namespace (str): The namespace within the build stats GCS
bucket to upload to.
Returns:
A FuchsiaBuildResults, representing the build.
"""
if gn_results.fint_set_artifacts.use_goma:
self.m.goma.set_path(self.m.path.dirname(gn_results.tool("goma_ctl")))
goma_context = self.m.goma.build_with_goma()
else:
goma_context = contextlib.nullcontext()
if gn_results.fint_set_artifacts.enable_rbe:
self.m.rbe.set_path(self.m.path.dirname(gn_results.tool("rewrapper")))
self.m.rbe.set_config_path(gn_results.rbe_config_path)
rbe_context = self.m.rbe()
else:
rbe_context = contextlib.nullcontext()
context = copy.deepcopy(gn_results._fint_context)
context.skip_ninja_noop_check = allow_dirty
with goma_context, rbe_context:
fint_build_step = self._run_fint(
step_name="ninja",
command="build",
fint_path=gn_results._fint_path,
static_path=gn_results._fint_params_path,
context=context,
timeout=timeout_secs,
)
# Artifacts should be produced even if some `fint build` steps failed,
# as the artifacts may contain useful logs that will help understand
# the cause of failure.
fint_build_artifacts = self.m.file.read_proto(
"read fint build artifacts",
self.m.path.join(context.artifact_dir, FINT_BUILD_ARTIFACTS),
fint_build_artifacts_pb2.BuildArtifacts,
"JSONPB",
test_proto=self.test_api.fint_build_artifacts_proto(
# This isn't super realistic because tests sometimes aren't
# included in the build graph, in which case there would never
# be any affected tests even in CQ with changed files. But it's
# simplest if we just configure the mock affected tests here.
affected_tests=["test1", "test2"] if checkout.changed_files() else [],
built_targets=["target1", "target2"],
built_images=[{"name": "foo", "type": "blk", "path": "foo.img"}],
),
)
# Log the full failure summary so the entire text is visible even if
# it's truncated in the summary markdown.
if fint_build_artifacts.failure_summary:
presentation.logs[
FAILURE_SUMMARY_LOG
] = fint_build_artifacts.failure_summary.splitlines()
if fint_build_artifacts.log_files:
with self.m.step.nest("read fint log files"):
for name, path in fint_build_artifacts.log_files.items():
presentation.logs[name] = self.m.file.read_text(
"read %s" % self.m.path.basename(path), path, include_log=False
).splitlines()
# Only emit the ninja duration once even if we build multiple times.
# The builders for which we care about tracking the Ninja duration all
# only build once. For builders like perfcompare that build twice,
# the second build is incremental, so it is very fast and its duration
# is less meaningful than the original build's duration.
if not self._emitted_ninja_duration:
presentation.properties[
"ninja_duration_seconds"
] = fint_build_artifacts.ninja_duration_seconds
self._emitted_ninja_duration = True
if stats_gcs_bucket and upload_namespace:
try:
self._upload_buildstats_output(
gn_results, stats_gcs_bucket, upload_namespace, fint_build_artifacts
)
except Exception as e:
self.m.step.empty("upload buildstats failure").presentation.logs[
"exception"
] = str(e).splitlines()
if fint_build_step.retcode:
if fint_build_artifacts.failure_summary:
msg = self.m.buildbucket_util.summary_message(
fint_build_artifacts.failure_summary,
"(failure summary truncated, see the '%s' log for "
"full failure details)" % FAILURE_SUMMARY_LOG,
)
else:
msg = "Unrecognized fint build failure, see stdout for details"
with self.m.step.nest("clang-crashreports"), self.m.context(
infra_steps=True
):
self._upload_crash_report(
gn_results.build_dir, gcs_bucket, self.m.buildbucket.build.id
)
raise self.m.step.StepFailure(msg)
elif fint_build_artifacts.failure_summary:
raise self.m.step.StepFailure(
"`fint build` emitted a failure summary but had a retcode of zero"
)
# Recursively convert to a dict so that we don't have to deal with
# proto Struct objects, which have some annoying properties like
# not showing the missing key value when raising a KeyError.
artifacts_dict = jsonpb.MessageToDict(
fint_build_artifacts,
including_default_value_fields=True,
preserving_proto_field_name=True,
)
return self.build_results(
build_dir=gn_results.build_dir,
checkout=checkout,
gn_results=gn_results,
images=artifacts_dict["built_images"],
archives=artifacts_dict["built_archives"],
ninja_targets=artifacts_dict["built_targets"],
zedboot_images=artifacts_dict["built_zedboot_images"],
zbi_test_qemu_kernel_images=artifacts_dict["zbi_test_qemu_kernel_images"],
fint_build_artifacts=fint_build_artifacts,
)
def can_exit_early_if_unaffected(self, checkout):
"""Returns whether or not we can safely skip building (and testing)."""
return (
# No changed_files -> CI, which we always want to test. fint should
# also take this into account and never indicate that testing can be
# skipped if there are no changed files, but we add an extra check
# here to be safe.
checkout.changed_files()
# Changes to integration, in particular for both infra configs and
# jiri manifests, can affect any stage of the build but generally
# won't cause changes to the build graph.
and not checkout.contains_integration_patch
# Recipe changes can also impact all stages of a build, but recipes
# aren't included in the build graph.
and not self.m.recipe_testing.enabled
)
def gn_results(self, *args, **kwargs):
return _GNResults(self.m, *args, **kwargs)
def build_results(self, *args, **kwargs):
return _FuchsiaBuildResults(self.m, *args, **kwargs)
def download_test_orchestration_inputs(self, digest):
return _TestOrchestrationInputs.download(self.m, digest)
def test_orchestration_inputs_from_build_results(self, *args, **kwargs):
return _TestOrchestrationInputs.from_build_results(self.m, *args, **kwargs)
def test_orchestration_inputs_property_name(self, without_cl):
if without_cl:
return _TestOrchestrationInputs.DIGEST_PROPERTY_WITHOUT_CL
else:
return _TestOrchestrationInputs.DIGEST_PROPERTY
@memoize
def _fint_context(self, checkout, build_dir, sdk_id, collect_coverage):
"""Assembles a fint Context spec."""
return context_pb2.Context(
checkout_dir=str(checkout.root_dir),
build_dir=str(build_dir),
artifact_dir=str(self.m.path.mkdtemp("fint_artifacts")),
sdk_id=sdk_id or "",
changed_files=[
context_pb2.Context.ChangedFile(
path=self.m.path.relpath(path, checkout.root_dir)
)
for path in checkout.changed_files()
],
cache_dir=str(self.m.path["cache"]),
release_version=checkout.release_version,
clang_toolchain_dir=str(self.clang_toolchain_dir),
gcc_toolchain_dir=str(self.gcc_toolchain_dir),
rust_toolchain_dir=str(self.rust_toolchain_dir),
collect_coverage=collect_coverage,
goma_job_count=self.m.goma.jobs,
)
@memoize
def _fint_path(self, checkout):
"""Builds and returns the path to a fint executable."""
# TODO(olivernewman): Rebuild fint if the checkout hash changes (e.g.
# for perfcompare).
fint_path = self.m.path.mkdtemp("fint").join("fint")
bootstrap_path = checkout.root_dir.join("tools", "integration", "bootstrap.sh")
self.m.step("bootstrap fint", [bootstrap_path, "-o", fint_path])
return fint_path
def _run_fint(
self,
step_name,
command,
fint_path,
static_path,
context,
timeout=None,
**kwargs
):
context_textproto = self.m.proto.encode(context, "TEXTPB")
step = self.m.step(
step_name,
[
fint_path,
command,
"-static",
static_path,
"-context",
self.m.raw_io.input(context_textproto),
],
ok_ret="any",
timeout=timeout,
**kwargs
)
if step.retcode or step.exc_result.had_timeout:
step.presentation.status = self.m.step.FAILURE
step.presentation.properties[BUILD_FAILED_PROPERTY] = True
step.presentation.step_text = "run by `fint %s`" % command
step.presentation.logs["context.textproto"] = context_textproto
if step.exc_result.had_timeout:
timeout_string = "%d seconds" % timeout
if timeout > 60:
timeout_string = "%d minutes" % (timeout / 60)
raise self.m.step.StepFailure(
"`%s` timed out after %s." % (step_name, timeout_string)
)
return step
def _upload_crash_report(self, build_dir, gcs_bucket, build_id):
crashreports_dir = build_dir.join("clang-crashreports")
self.m.path.mock_add_paths(crashreports_dir)
if not gcs_bucket or not self.m.path.exists(crashreports_dir):
return # pragma: no cover
temp = self.m.path.mkdtemp("reproducers")
reproducers = self.m.file.glob_paths(
"find reproducers",
crashreports_dir,
"*.sh",
test_data=[crashreports_dir.join("foo.sh")],
)
for reproducer in reproducers:
base = self.m.path.splitext(self.m.path.basename(reproducer))[0]
files = self.m.file.glob_paths(
"find %s files" % base,
crashreports_dir,
base + ".*",
test_data=[
crashreports_dir.join("foo.sh"),
crashreports_dir.join("foo.cpp"),
],
)
tgz_basename = "%s.tar.gz" % base
tgz_path = temp.join(tgz_basename)
archive = self.m.tar.create(tgz_path, compression="gzip")
for f in files:
archive.add(f, crashreports_dir)
archive.tar("create %s" % tgz_basename)
self.m.gsutil.upload_namespaced_file(
source=tgz_path,
bucket=gcs_bucket,
subpath=tgz_basename,
namespace=build_id,
)
def _upload_buildstats_output(
self, gn_results, gcs_bucket, build_id, fint_build_artifacts
):
"""Runs the buildstats command for Fuchsia and uploads the output files to GCS."""
buildstats_binary_path = gn_results.tool("buildstats")
self.m.path.mock_add_paths(buildstats_binary_path)
if not self.m.path.exists(buildstats_binary_path): # pragma: no cover
# We might be trying to run buildstats after catching a build
# failure, in which case ninja may not even have gotten as far as
# building the buildstats tool.
raise Exception("The build did not produce the buildstats tool")
output_name = "fuchsia-buildstats.json"
output_path = self.m.path.mkdtemp("buildstats").join(output_name)
command = [
buildstats_binary_path,
"--ninjalog",
fint_build_artifacts.ninja_log_path,
"--compdb",
fint_build_artifacts.ninja_compdb_path,
"--graph",
fint_build_artifacts.ninja_graph_path,
"--output",
output_path,
]
with self.m.context(cwd=gn_results.build_dir):
self.m.step("fuchsia buildstats", command)
self.m.gsutil.upload_namespaced_file(
source=output_path,
bucket=gcs_bucket,
subpath=output_name,
namespace=build_id,
)
def _download_toolchain(self, name, toolchain, cipd_package):
"""Downloads a prebuilt toolchain from CAS or CIPD.
Args:
name (str): Name of the toolchain (e.g. "clang").
toolchain (CustomToolchain): Information about where to
download the toolchain from.
cipd_package (str): The name of the CIPD package to download if
toolchain_info["type"] is "cipd".
Returns: A Path to the root of a temporary directory where the
toolchain was downloaded.
"""
with self.m.step.nest("%s_toolchain" % name), self.m.context(infra_steps=True):
root_dir = self.m.path.mkdtemp(name)
if toolchain.source == "cipd":
pkgs = self.m.cipd.EnsureFile()
pkgs.add_package(
"fuchsia/%s/${platform}" % cipd_package, toolchain.version
)
self.m.cipd.ensure(root_dir, pkgs)
elif toolchain.source == "isolated":
self.m.cas_util.download(
step_name="download",
digest=toolchain.version,
output_dir=root_dir,
)
else: # pragma: no cover
raise KeyError(
'%s_toolchain source "%s" not recognized' % (name, toolchain.source)
)
return root_dir
def _upload_build_results(
self, build_results, gcs_bucket, is_release_version, namespace
):
assert gcs_bucket
# Upload archives.
for archive in build_results.archives:
metadata = None
path = build_results.build_dir.join(archive["path"])
# Try and sign the build archive. If the we are on a release build and the
# signing script returns a signature, add it to the metadata and
# upload the public key for verification.
# TODO(fxb/51162): Remove once this is no longer used.
if is_release_version and archive["name"] == "archive":
signature = self._try_sign_archive(path)
if signature:
# Add the signature to the metadata.
metadata = {
"x-goog-meta-signature": signature,
}
# Upload the public key to GCS.
# Note that RELEASE_PUBKEY_PATH should always exist because a
# signature should only be generated if RELEASE_PUBKEY_PATH exists.
self.m.gsutil.upload_namespaced_file(
source=RELEASE_PUBKEY_PATH,
bucket=gcs_bucket,
subpath=RELEASE_PUBKEY_FILENAME,
namespace=namespace,
)
# Upload the archive
self.m.gsutil.upload_namespaced_file(
source=path,
bucket=gcs_bucket,
subpath=self.m.path.basename(path),
namespace=namespace,
metadata=metadata,
)
self._upload_binary_sizes(build_results)
self._upload_blobstats_output(build_results, gcs_bucket, namespace)
def _try_sign_archive(self, archive_path):
args = [
"--archive-file",
archive_path,
]
return self.m.step(
"run signing script",
cmd=[
"vpython",
"-vpython-spec",
self.resource("sign.py.vpython"),
"-u",
self.resource("sign.py"),
]
+ args,
stdout=self.m.raw_io.output_text(),
).stdout
def _upload_package_snapshot(self, build_results, gcs_bucket, build_id):
assert gcs_bucket
snapshot_path = build_results.build_dir.join(
"obj", "build", "images", "system.snapshot"
)
if not self.m.path.exists(snapshot_path):
return
# Upload a system snapshot data to GCS.
snapshot = self.m.file.read_text("read package snapshot file", snapshot_path)
self.m.gsutil.upload_namespaced_file(
source=self.m.json.input(
{
"build_id": self.m.buildbucket.build.id,
"snapshot": snapshot,
}
),
bucket=gcs_bucket,
subpath="system.snapshot.json",
namespace=build_id,
)
# Upload a new table row describing this particular build. Other tables' rows
# are linked into this table using the build id as a foreign key.
builds_entry = {
"bucket": self.m.buildbucket.bucket_v1,
"builder": self.m.buildbucket.builder_name,
"build_id": self.m.buildbucket.build.id,
"gitiles_commit": [self.m.buildbucket.gitiles_commit.id],
"datetime": str(self.m.buildbucket.build.create_time.ToDatetime()),
"start_time": str(self.m.buildbucket.build.start_time.ToDatetime()),
"repo": self.m.buildbucket.build_input.gitiles_commit.project,
"arch": build_results.set_metadata.target_arch,
"product": build_results.set_metadata.product,
"board": build_results.set_metadata.board,
"channel": [""],
}
self.m.bqupload.insert(
step_name="add table row: %s/%s/builds_beta"
% (BIGQUERY_PROJECT, BIGQUERY_ARTIFACTS_DATASET),
project=BIGQUERY_PROJECT,
dataset=BIGQUERY_ARTIFACTS_DATASET,
table="builds_beta",
rows=[builds_entry],
)
def _upload_binary_sizes(self, build_results):
"""Uploads size checks to BigQuery.
The upload also includes metadata about this build so that the
data can be used to create a self-contained BigQuery table.
"""
if not build_results._binary_sizes[0]: # pragma: no cover
return
metadata = {
"builder_name": self.m.buildbucket.builder_name,
# This field is set to a string in the BQ table schema because it's just
# an opaque ID. The conversion from int to string that happens on the
# BigQuery side is not what we want, so convert here.
"build_id": str(self.m.buildbucket.build.id),
"build_create_time_seconds": self.m.buildbucket.build.create_time.seconds,
"gitiles_commit_host": self.m.buildbucket.gitiles_commit.host,
"gitiles_commit_id": self.m.buildbucket.gitiles_commit.id,
"gitiles_commit_project": self.m.buildbucket.gitiles_commit.project,
}
rows = []
for component, item in build_results._binary_sizes[0].items():
# The size_checker output includes additional meta-data for
# binary sizes beyond just the size/budget values themselves. Some
# of this (e.g component owner URL) is encoded as strings, which
# do not fit with the BQ table schema for binary_sizes (and which
# we don't really want to persist anyway). Given that we are bound
# to the existing JSON structure (for compatibility with
# binary-sizes gerrit plugin) filter the non-conforming items out.
if isinstance(item, (int, float)):
row = metadata.copy()
row["component"] = component
row["size"] = item
rows.append(row)
self.m.bqupload.insert(
step_name="upload size_checker output",
project=BIGQUERY_PROJECT,
dataset=BIGQUERY_ARTIFACTS_DATASET,
table="binary_sizes",
rows=rows,
)
def _upload_blobstats_output(self, build_results, gcs_bucket, build_id):
"""Runs the blobstats command and uploads the output files to GCS."""
dir_name = "blobstats"
blobstats_output_dir = self.m.path["cleanup"].join(dir_name)
with self.m.context(cwd=build_results.build_dir):
result = self.m.step(
"blobstats",
[build_results.tool("blobstats"), "--output=%s" % blobstats_output_dir],
ok_ret="any",
)
# If blobstats failed, it's probably because the build intentionally
# didn't produce the input files that blobstats requires. Blobstats is
# generally just a nice-to-have anyway, so either way it's probably okay
# to silently continue without uploading results if blobstats fails.
if result.retcode != 0:
return
self.m.gsutil.upload_namespaced_directory(
source=blobstats_output_dir,
bucket=gcs_bucket,
subpath=dir_name,
namespace=build_id,
rsync=False,
)
def _upload_tracing_data(self, build_results, gcs_bucket, build_id):
"""Uploads GN and ninja tracing results for this build to GCS."""
paths_to_upload = []
if build_results.gn_results.gn_trace_path:
paths_to_upload += [
("fuchsia_gn_trace.json", build_results.gn_results.gn_trace_path),
]
try:
ninja_trace = self._extract_ninja_tracing_data(build_results)
except self.m.step.StepFailure:
pass
else:
paths_to_upload += [("fuchsia_ninja_trace.json", ninja_trace)]
with self.m.step.nest("upload traces") as presentation:
for filename, path in paths_to_upload:
step = self.m.gsutil.upload_namespaced_file(
source=path,
bucket=gcs_bucket,
subpath=filename,
namespace=build_id,
)
# Perfetto needs an unauthenticated URL.
# TODO(fxbug.dev/66249): Perfetto cannot load non-public traces.
# Consider hiding this link in such cases.
step.presentation.links[
"perfetto_ui"
] = "https://ui.perfetto.dev/#!?url=%s" % (
self.m.gsutil.unauthenticated_url(step.presentation.links[filename])
)
# This is shown as a workaround to hint users on how to load
# non-public traces.
presentation.links[
"fuchsia.dev guide"
] = "https://fuchsia.dev/fuchsia-src/development/tracing/tutorial/converting-visualizing-a-trace#html-trace"
def _extract_ninja_tracing_data(self, build_results):
"""Extracts the tracing data by combining .ninjalog, compdb and graph.
Args:
build_results (_FuchsiaBuildResults): The results of the build.
"""
trace = self.m.path.mkdtemp("ninja-trace").join("fuchsia_ninja_trace.json")
self.m.step(
"ninjatrace",
[
build_results.tool("ninjatrace"),
"-ninjalog",
build_results.fint_build_artifacts.ninja_log_path,
"-compdb",
build_results.fint_build_artifacts.ninja_compdb_path,
"-graph",
build_results.fint_build_artifacts.ninja_graph_path,
"-critical-path",
"-trace-json",
trace,
],
stdout=self.m.raw_io.output_text(leak_to=trace),
)
return trace