blob: c82ced4c056642b015a33fd8d9bfc21d5106a309 [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Clang toolchain."""
import contextlib
from recipe_engine.recipe_api import Property
from recipe_engine.config import Enum
from PB.go.chromium.org.luci.common.proto.srcman.manifest import Manifest
import re
GIT_VERSION = "version:2@2.35.1.chromium.8"
DEPS = [
"fuchsia/buildbucket_util",
"fuchsia/cas_util",
"fuchsia/cipd_util",
"fuchsia/git",
"fuchsia/git_checkout",
"fuchsia/go",
"fuchsia/goma",
"fuchsia/gsutil",
"fuchsia/macos_sdk",
"fuchsia/ninja",
"fuchsia/python3",
"fuchsia/rbe",
"fuchsia/status_check",
"fuchsia/tar",
"fuchsia/toolchain",
"fuchsia/windows_sdk",
"recipe_engine/buildbucket",
"recipe_engine/cipd",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/path",
"recipe_engine/platform",
"recipe_engine/properties",
"recipe_engine/raw_io",
"recipe_engine/resultdb",
"recipe_engine/step",
]
TARGET_TO_ARCH = {
"x64": "x86_64",
"arm64": "aarch64",
}
TARGETS = TARGET_TO_ARCH.keys()
LIBXML2_GIT = "https://fuchsia.googlesource.com/third_party/libxml2"
ZLIB_GIT = "https://fuchsia.googlesource.com/third_party/zlib"
ZSTD_GIT = "https://fuchsia.googlesource.com/third_party/zstd"
BORINGSSL_GIT = "https://boringssl.googlesource.com/boringssl"
CURL_GIT = "https://fuchsia.googlesource.com/third_party/curl"
YASM_GIT = "https://fuchsia.googlesource.com/third_party/yasm"
CIPD_SERVER_HOST = "chrome-infra-packages.appspot.com"
# TODD(fxbug.dev/91157): Restore the file name once
# path length issue is properly fixed.
RESULTDB_JSON = "r.j"
NINJA_LOG = ".ninja_log"
# TODO: migrate properties to .proto
PROPERTIES = {
"repository": Property(
kind=str,
help="Git repository URL",
default="https://llvm.googlesource.com/llvm-project",
),
"revision": Property(kind=str, help="Git revision", default="refs/heads/main"),
"platform": Property(kind=str, help="CIPD platform for the target", default=None),
"do_2stage": Property(kind=bool, help="Do a 2-stage build", default=None),
"enable_lto": Property(kind=bool, help="Enable LTO", default=None),
"lto_mode": Property(kind=Enum("full", "thin"), help="LTO mode", default="full"),
"enable_lld": Property(kind=bool, help="Enable LLD linker", default=None),
"use_rbe": Property(
kind=bool,
help="Use reclient for remote C++ builds (takes precedence over Goma)",
default=False,
),
"enable_assertions": Property(kind=bool, help="Enable assertions", default=False),
"enable_backtraces": Property(kind=bool, help="Enable backtraces", default=False),
"use_inliner_model": Property(kind=bool, help="Use inliner model", default=True),
"upload_package": Property(kind=bool, help="Upload package to CIPD", default=None),
"artifact_gcs_bucket": Property(
kind=str,
help="GCS Bucket for uploading build log and traces",
default="fuchsia-artifacts",
),
"builders": Property(
kind=dict,
help=(
"Mapping from platform name to list of builders to trigger "
"(Format: '<project>/<trigger name>')"
),
default=None,
),
"tot_builders": Property(
kind=dict,
help="Mapping from platform name to list of builders to trigger that build tip of tree Fuchsia",
default=None,
),
}
def slashes(api, path):
return path.replace("\\", "/") if api.platform.is_win else path
def checkout_source(api, git, ref, src_dir, manifest):
_, revision = api.git_checkout(git, path=src_dir, revision=ref)
git_checkout = manifest.directories[str(src_dir)].git_checkout
git_checkout.repo_url = git
git_checkout.revision = revision
def build_zlib(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
zlib_dir = api.path["start_dir"].join("zlib")
src_dir = zlib_dir.join("src")
checkout_source(api, ZLIB_GIT, "refs/tags/v1.2.11", src_dir, manifest)
build_dir = zlib_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-DCMAKE_BUILD_TYPE=Release"]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [src_dir],
)
api.ninja(
"build",
["-j%d" % ninja_jobs],
)
with api.context(env={"DESTDIR": destdir}):
api.ninja(
"install",
["install"],
)
def build_zstd(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
zstd_dir = api.path["start_dir"].join("zstd")
src_dir = zstd_dir.join("src")
checkout_source(api, ZSTD_GIT, "refs/tags/v1.5.2", src_dir, manifest)
build_dir = zstd_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ [
"-DCMAKE_BUILD_TYPE=Release",
"-DCMAKE_INSTALL_PREFIX=%s" % destdir,
"-DZSTD_BUILD_SHARED=OFF",
]
+ [
formatted
for formatted in (
slashes(api, option.format(**arguments)) for option in options
)
if not formatted.startswith("-DCMAKE_INSTALL_PREFIX=")
]
+ [src_dir.join("build", "cmake")],
)
api.ninja(
"build",
["-j%d" % ninja_jobs],
)
api.ninja(
"install",
["install"],
)
@contextlib.contextmanager
def resultdb_context(api, build_dir):
try:
yield
finally:
upload_to_resultdb(api, build_dir)
def upload_to_resultdb(api, build_dir):
if not api.resultdb.enabled:
return # pragma: no cover
test_data = "%s\n%s\n" % (
build_dir.join(RESULTDB_JSON),
build_dir.join("test", RESULTDB_JSON),
)
# api.path.glob does not handle long path on
# windows, use a python3 script to mitigate
# this issue. fxbug.dev/98099
results = api.python3(
"collect results.json",
[
api.resource("find_files.py"),
"--dir",
build_dir,
"--pattern",
"**/%s" % RESULTDB_JSON,
],
step_test_data=lambda: api.raw_io.test_api.stream_output_text(test_data),
stdout=api.raw_io.output_text(add_output_log=True),
)
results_paths = results.stdout.splitlines()
if not results_paths:
return # pragma: no cover
resultdb_base_variant = {
"bucket": api.buildbucket.build.builder.bucket,
"builder": api.buildbucket.build.builder.builder,
}
cmd = ["vpython", api.resource("resultdb.py")]
cmd.extend("--json=%s" % p for p in results_paths)
api.step(
"resultdb",
api.resultdb.wrap(cmd, base_variant=resultdb_base_variant.copy(), include=True),
)
def build_libxml2(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
# libxml2 requires CMAKE_INSTALL_PREFIX to be set to a valid path so replace
# an empty prefix with the destination directory.
install_prefix = "-DCMAKE_INSTALL_PREFIX="
options = [
install_prefix + str(destdir) if option == install_prefix else option
for option in options
]
libxml2_dir = api.path["start_dir"].join("libxml2")
src_dir = libxml2_dir.join("src")
checkout_source(api, LIBXML2_GIT, "refs/tags/v2.9.12", src_dir, manifest)
build_dir = libxml2_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-DCMAKE_BUILD_TYPE=Release"]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [
"-DBUILD_SHARED_LIBS=OFF",
"-DLIBXML2_WITH_ICONV=OFF",
"-DLIBXML2_WITH_ICU=OFF",
"-DLIBXML2_WITH_LZMA=OFF",
"-DLIBXML2_WITH_PYTHON=OFF",
"-DLIBXML2_WITH_TESTS=OFF",
# TODO(phosek): lld only links libxml2 and not zlib, so when
# zlib support in libxml2 is enabled, this fails in the case of
# static linking.
"-DLIBXML2_WITH_ZLIB=OFF",
]
+ [src_dir],
)
api.ninja(
"build",
["-j%d" % ninja_jobs],
)
api.ninja(
"install",
["install"],
)
return destdir.join("lib", "cmake", "libxml2-2.9.10")
def build_boringssl(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
base_dir = api.path["start_dir"].join("boringssl")
src_dir = base_dir.join("src")
checkout_source(
api,
BORINGSSL_GIT,
"48f794765b0df3310649e6a6c6f71c5cd845f445",
src_dir,
manifest,
)
build_dir = base_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-DCMAKE_BUILD_TYPE=Release"]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [src_dir],
)
api.ninja(
"build",
["-j%d" % ninja_jobs],
)
with api.context(env={"DESTDIR": destdir}):
api.ninja(
"install",
["install"],
)
return destdir.join("lib", "cmake", "OpenSSL")
def build_curl(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
base_dir = api.path["start_dir"].join("curl")
src_dir = base_dir.join("src")
checkout_source(api, CURL_GIT, "refs/tags/curl-7_82_0", src_dir, manifest)
build_dir = base_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-DCMAKE_BUILD_TYPE=Release"]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [
"-DBUILD_SHARED_LIBS=OFF",
"-DCURL_USE_OPENSSL=ON",
]
+ [src_dir],
)
api.ninja(
"build",
["-j%d" % ninja_jobs],
)
with api.context(env={"DESTDIR": destdir}):
api.ninja(
"install",
["install"],
)
return destdir.join("lib", "cmake", "CURL")
def build_yasm(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
base_dir = api.path["start_dir"].join("yasm")
src_dir = base_dir.join("src")
checkout_source(api, YASM_GIT, "refs/tags/v1.3.0", src_dir, manifest)
build_dir = base_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-DCMAKE_BUILD_TYPE=Release"]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [src_dir],
)
api.ninja(
"build",
["-j%d" % ninja_jobs],
)
with api.context(env={"DESTDIR": destdir}):
api.ninja(
"install",
["install"],
)
# //zircon/public/gn/toolchain/clang.gni:clang_runtime sets the JSON schema.
#
# This function makes the prototype spec; debug and breakpad info is added by
# runtimes.py.
def make_runtimes_spec(clang_version):
# TODO(fxbug.dev/27110): Ideally this would be done by the cmake build itself.
runtimes = []
for mode_ldflags in [[], ["-static-libstdc++"]]:
for arch, mode_sanitizer_cflags, mode_sanitizer_multilibs, mode_runtimes in [
("x86_64", [], [], []),
("x86_64", ["-fsanitize=address"], ["asan"], ["libclang_rt.asan.so"]),
(
"x86_64",
["-fsanitize=undefined"],
[],
["libclang_rt.ubsan_standalone.so"],
),
("aarch64", [], [], []),
("aarch64", ["-fsanitize=address"], ["asan"], ["libclang_rt.asan.so"]),
(
"aarch64",
["-fsanitize=undefined"],
[],
["libclang_rt.ubsan_standalone.so"],
),
(
"aarch64",
["-fsanitize=hwaddress"],
["hwasan"],
["libclang_rt.hwasan.so"],
),
]:
target_triple = "{arch}-unknown-fuchsia".format(arch=arch)
target = [target_triple, "{arch}-fuchsia".format(arch=arch)]
runtime_dir = "clang/{version}/lib/{target}".format(
version=clang_version, target=target_triple
)
stdlib_dir = "{target}".format(target=target_triple)
mode_cflags = mode_sanitizer_cflags
mode_multilib = "+".join(mode_sanitizer_multilibs)
if mode_multilib:
mode_multilib += "/"
runtime = [{"dist": runtime_dir + "/" + soname} for soname in mode_runtimes]
if not mode_ldflags:
stdlib = [
{
"dist": stdlib_dir + "/" + mode_multilib + soname,
"name": soname.split(".")[0],
}
for soname in [
"libc++.so.2",
"libc++abi.so.1",
"libunwind.so.1",
]
]
runtime.extend(stdlib)
runtimes.append(
{
"target": target,
"cflags": mode_cflags,
"ldflags": mode_ldflags,
"runtime": runtime,
}
)
return runtimes
def extract_trace_filename(api, build_dir, trace):
if str(build_dir) not in trace:
# trace file should be always in build_dir.
# But if it happens, just upload it anyway
# instead of throw an exception.
return api.path.basename(trace) # pragma: no cover
relative_path = str(api.path.relpath(trace, build_dir))
relative_path = relative_path.replace(api.path.sep, "_")
return relative_path
def upload_crash_reproducer(api, crashreports_dir, gcs_bucket, build_id):
with api.step.nest("upload crash reproducer"), api.context(infra_steps=True):
temp = api.path.mkdtemp("reproducers")
reproducers = api.file.glob_paths(
"find reproducers",
crashreports_dir,
"*.sh",
test_data=(crashreports_dir.join("foo.sh"),),
)
for reproducer in reproducers:
base = api.path.splitext(api.path.basename(reproducer))[0]
files = api.file.glob_paths(
"find %s files" % base,
crashreports_dir,
base + ".*",
test_data=(
crashreports_dir.join("foo.sh"),
crashreports_dir.join("foo.cpp"),
),
)
tgz_basename = "%s.tar.gz" % base
tgz_path = temp.join(tgz_basename)
archive = api.tar.create(tgz_path, compression="gzip")
for f in files:
archive.add(f, crashreports_dir)
archive.tar("create %s" % tgz_basename)
api.gsutil.upload_namespaced_file(
source=tgz_path,
bucket=gcs_bucket,
subpath=tgz_basename,
namespace=build_id,
)
def upload_build_traces(
api, ninjatrace_available, cipd_dir, build_dir, artifact_gcs_bucket, build_id
):
with api.step.nest("ninja log and traces"):
test_data = "%s\n%s\n" % (
build_dir.join(NINJA_LOG),
build_dir.join("test", NINJA_LOG),
)
# api.path.glob does not handle long path on
# windows, use a python3 script to mitigate
# this issue. fxbug.dev/98099
logs = api.python3(
"collect .ninja.log",
[
api.resource("find_files.py"),
"--dir",
build_dir,
"--pattern",
"**/%s" % NINJA_LOG,
],
step_test_data=lambda: api.raw_io.test_api.stream_output_text(test_data),
stdout=api.raw_io.output_text(add_output_log=True),
)
log_paths = logs.stdout.splitlines()
if not ninjatrace_available:
# ninjatrace is not yet available on Linux-arm64 and Windows
# platforms. Upload logs for offline analysis.
with api.step.nest("upload ninja log"):
for log_file in log_paths:
filename = extract_trace_filename(api, build_dir, log_file)
api.gsutil.upload_namespaced_file(
source=log_file,
bucket=artifact_gcs_bucket,
subpath=filename,
namespace=build_id,
)
return
trace_paths = []
with api.step.nest("generate ninja traces"):
for log_file in log_paths:
trace = log_file + ".json"
try:
api.step(
"ninjatrace",
[
cipd_dir.join("ninjatrace", "ninjatrace"),
"-ninjalog",
log_file,
# TODO(haowei): add compdb
"-trace-json",
trace,
],
)
trace_paths.append(trace)
except api.step.StepFailure: # pragma: no cover
pass
with api.step.nest("upload traces"):
for trace in trace_paths:
filename = extract_trace_filename(api, build_dir, trace)
# Upload may fail if service account don't have access to bucket.
# Ignore the error for now since upload failures do not impact the
# toolchain build process.
try:
step = api.gsutil.upload_namespaced_file(
source=trace,
bucket=artifact_gcs_bucket,
subpath=filename,
namespace=build_id,
)
step.presentation.links[
"perfetto_ui"
] = "https://ui.perfetto.dev/#!?url=%s" % (
api.gsutil.unauthenticated_url(
step.presentation.links[filename]
)
)
except api.step.StepFailure: # pragma: no cover
pass
def read_clang_version_from_build(api, build_dir, do_2stage):
step_result = api.file.read_text(
"Version.inc",
build_dir.join(
*(
(["tools", "clang", "stage2-bins"] if do_2stage else [])
+ ["tools", "clang", "include", "clang", "Basic", "Version.inc"]
)
),
test_data='#define CLANG_VERSION_MAJOR_STRING "8"',
)
m = re.search(r'CLANG_VERSION_MAJOR_STRING "([a-zA-Z0-9]+)"', step_result)
assert m, "Cannot determine Clang version"
return m.group(1)
def generate_clang_runtime_json(
api,
pkg_dir,
cipd_dir,
use_breakpad,
clang_version,
):
api.toolchain.strip_runtimes(
"generate runtime.json",
spec=make_runtimes_spec(clang_version),
path=pkg_dir.join("lib"),
build_id_subpath="debug/.build-id",
readelf=cipd_dir.join("bin", "llvm-readelf"),
objcopy=cipd_dir.join("bin", "llvm-objcopy"),
dump_syms=(
cipd_dir.join("breakpad", "dump_syms", "dump_syms")
if use_breakpad
else None
),
)
def generate_clang_license(api, llvm_dir, pkg_dir, llvm_projects, llvm_runtimes):
api.step(
"generate license",
cmd=["python", api.resource("generate_license.py"), "--include"]
+ [
llvm_dir.join(project, "LICENSE.TXT")
for project in llvm_projects + llvm_runtimes
]
+ [
"--extract",
llvm_dir.join("polly", "lib", "External", "isl", "LICENSE"),
"-",
"--extract",
llvm_dir.join(
"llvm", "lib", "Support", "UnicodeNameToCodepointGenerated.cpp"
),
"17-63",
"--extract",
api.path["start_dir"].join("libxml2", "src", "Copyright"),
"-",
"--extract",
api.path["start_dir"].join("zlib", "src", "zlib.h"),
"4-22",
"--extract",
api.path["start_dir"].join("boringssl", "src", "LICENSE"),
"-",
"--extract",
api.path["start_dir"].join("curl", "src", "COPYING"),
"-",
],
stdout=api.raw_io.output_text(leak_to=pkg_dir.join("LICENSE")),
)
def RunSteps(
api,
repository,
revision,
platform,
do_2stage,
enable_lto,
lto_mode,
enable_lld,
enable_assertions,
enable_backtraces,
use_inliner_model,
upload_package,
artifact_gcs_bucket,
builders,
tot_builders,
use_rbe,
):
use_goma = (
not api.platform.arch == "arm" and api.platform.bits == 64
) and not api.platform.is_win
if use_rbe:
api.rbe.ensure()
ninja_jobs = api.goma.jobs # doesn't really use Goma
remote_build_context = api.rbe()
elif use_goma:
api.goma.ensure()
ninja_jobs = api.goma.jobs
remote_build_context = api.goma.build_with_goma()
else:
ninja_jobs = api.platform.cpu_count
remote_build_context = contextlib.nullcontext()
# TODO: builders would ideally set this explicitly
if do_2stage is None:
do_2stage = api.buildbucket.builder_id.bucket == "prod"
if enable_lto is None:
if not do_2stage:
enable_lto = False
if upload_package is None:
upload_package = api.buildbucket.builder_id.bucket == "prod"
host_platform = api.cipd_util.platform_name
target_platform = platform or host_platform
use_breakpad = host_platform == "linux-amd64"
manifest = Manifest()
use_ninjatrace = False
with api.step.nest("ensure_packages"):
with api.context(infra_steps=True):
cipd_dir = api.path["start_dir"].join("cipd")
pkgs = api.cipd.EnsureFile()
# We don't have SDK for linux-arm64 or win, but we only need sysroot.
if (
api.platform.arch == "arm" and api.platform.bits == 64
) or api.platform.is_win:
pkgs.add_package("fuchsia/sdk/core/linux-amd64", "latest", "sdk")
else:
pkgs.add_package("fuchsia/sdk/core/${platform}", "latest", "sdk")
pkgs.add_package("fuchsia/third_party/clang/${platform}", "integration")
# TODO(fxbug.dev/87518) Unify cmake and ninja revisions after test failures are fixed.
if api.platform.is_win:
# boringssl and clang rely on POSIX tools provided in the
# "git for windows" package, but they are not in PATH.
# Bootstrap git again so these tools can be added to PATH.
pkgs.add_package("infra/3pp/tools/git/${platform}", GIT_VERSION)
pkgs.add_package(
"fuchsia/third_party/cmake/${platform}",
"integration",
)
api.ninja.add_package_path(pkgs)
pkgs.add_package(
"fuchsia/third_party/sysroot/linux",
"tp7-Zyo4pv2SVEoK_eaU6yuKmyxJWcR54vtJKTWpTIYC",
"linux",
)
if use_inliner_model and not target_platform in [
"linux-arm64",
"mac-arm64",
]:
pkgs.add_package(
"fuchsia/model/inlining/%s" % target_platform,
"git_revision:0ee1ce61a844b8fd285ff1a2c47c4b8690fd2b7d",
"model",
)
if use_breakpad:
pkgs.add_package(
"fuchsia/tools/breakpad/${platform}", "latest", "breakpad"
)
if api.platform.arch != "arm" and not api.platform.is_win:
use_ninjatrace = True
pkgs.add_package(
"fuchsia/tools/ninjatrace/${platform}", "latest", "ninjatrace"
)
ensured = api.cipd.ensure(cipd_dir, pkgs)
for subdir, pins in ensured.items():
directory = manifest.directories[str(cipd_dir.join(subdir))]
directory.cipd_server_host = CIPD_SERVER_HOST
for pin in pins:
directory.cipd_package[pin.package].instance_id = pin.instance_id
staging_dir = api.path["start_dir"].join("staging")
pkg_dir = staging_dir.join("llvm_install")
api.file.ensure_directory("create pkg dir", pkg_dir)
llvm_dir, revision = api.git_checkout(repository, fallback_ref=revision)
git_checkout = manifest.directories[str(llvm_dir)].git_checkout
git_checkout.repo_url = repository
git_checkout.revision = revision
target_triple = api.toolchain.PLATFORM_TO_TRIPLE[target_platform]
host_triple = api.toolchain.PLATFORM_TO_TRIPLE[host_platform]
cas_digest = ""
clang_version = ""
if api.platform.is_win:
# Add git's tools to PATH.
env_prefixes = {"PATH": [cipd_dir.join("usr", "bin"), cipd_dir.join("bin")]}
else:
env_prefixes = {"PATH": [cipd_dir.join("bin")]}
with api.macos_sdk(
kind="ios"
), api.windows_sdk(), remote_build_context, api.context(env_prefixes=env_prefixes):
if api.platform.name == "linux":
target_sysroot = host_sysroot = cipd_dir.join("linux")
elif api.platform.name == "mac":
# TODO(fxbug.dev/3043): Eventually use our own hermetic sysroot as for Linux.
step_result = api.step(
"xcrun",
["xcrun", "--sdk", "macosx", "--show-sdk-path"],
stdout=api.raw_io.output_text(name="sdk-path", add_output_log=True),
step_test_data=lambda: api.raw_io.test_api.stream_output_text(
"/some/xcode/path"
),
)
target_sysroot = host_sysroot = step_result.stdout.strip()
elif api.platform.name == "win":
target_sysroot = host_sysroot = api.windows_sdk.sdk_dir
else: # pragma: no cover
assert False, "unsupported platform"
arguments = {
"target_triple": target_triple,
"host_triple": host_triple,
"target_sysroot": target_sysroot,
"host_sysroot": host_sysroot,
"linux_sysroot": cipd_dir.join("linux"),
"fuchsia_sdk": cipd_dir.join("sdk"),
}
arguments.update(
{
"win": {
"cc": cipd_dir.join("bin", "clang-cl.exe"),
"cxx": cipd_dir.join("bin", "clang-cl.exe"),
"ar": cipd_dir.join("bin", "llvm-lib.exe"),
"ld": cipd_dir.join("bin", "lld-link.exe"),
"mt": cipd_dir.join("bin", "llvm-mt.exe"),
"nm": cipd_dir.join("bin", "llvm-nm.exe"),
"objcopy": cipd_dir.join("bin", "llvm-objcopy.exe"),
"objdump": cipd_dir.join("bin", "llvm-objdump.exe"),
"ranlib": cipd_dir.join("bin", "llvm-ranlib.exe"),
"rc": cipd_dir.join("bin", "llvm-rc.exe"),
"readelf": cipd_dir.join("bin", "llvm-readelf.exe"),
"strip": cipd_dir.join("bin", "llvm-strip.exe"),
"ninja": cipd_dir.join("ninja.exe"),
},
"mac": {
"cc": cipd_dir.join("bin", "clang"),
"cxx": cipd_dir.join("bin", "clang++"),
"ar": cipd_dir.join("bin", "llvm-ar"),
"ld": "/usr/bin/ld",
"libtool": cipd_dir.join("bin", "llvm-libtool-darwin"),
"lipo": cipd_dir.join("bin", "llvm-lipo"),
"nm": cipd_dir.join("bin", "llvm-nm"),
"objcopy": cipd_dir.join("bin", "llvm-objcopy"),
"objdump": cipd_dir.join("bin", "llvm-objdump"),
"ranlib": cipd_dir.join("bin", "llvm-ranlib"),
"readelf": cipd_dir.join("bin", "llvm-readelf"),
"strip": cipd_dir.join("bin", "llvm-strip"),
"ninja": cipd_dir.join("ninja"),
},
"linux": {
"cc": cipd_dir.join("bin", "clang"),
"cxx": cipd_dir.join("bin", "clang++"),
"ar": cipd_dir.join("bin", "llvm-ar"),
"ld": cipd_dir.join("bin", "ld.lld"),
"nm": cipd_dir.join("bin", "llvm-nm"),
"objcopy": cipd_dir.join("bin", "llvm-objcopy"),
"objdump": cipd_dir.join("bin", "llvm-objdump"),
"ranlib": cipd_dir.join("bin", "llvm-ranlib"),
"readelf": cipd_dir.join("bin", "llvm-readelf"),
"strip": cipd_dir.join("bin", "llvm-strip"),
"ninja": cipd_dir.join("ninja"),
},
}[api.platform.name]
)
if use_rbe:
arguments.update(
{"compiler_wrapper": str(api.rbe.cxx_compiler_wrapper)}
# TODO(https://fxbug.dev/107610): prefer pass a multi-token
# wrapper directly, joining with ';'.
# {"compiler_wrapper": ";".join(api.rbe.cxx_compiler_wrapper_command)}
)
elif use_goma:
arguments.update({"compiler_wrapper": api.goma.goma_dir.join("gomacc")})
options = [
"-GNinja",
"-DCMAKE_MAKE_PROGRAM={ninja}",
"-DCMAKE_INSTALL_PREFIX=",
"-DCMAKE_C_COMPILER={cc}",
"-DCMAKE_CXX_COMPILER={cxx}",
"-DCMAKE_ASM_COMPILER={cc}",
"-DCLANG_REPOSITORY_STRING=%s" % repository,
]
if use_goma or use_rbe:
options.extend(
[
"-DCMAKE_C_COMPILER_LAUNCHER={compiler_wrapper}",
"-DCMAKE_CXX_COMPILER_LAUNCHER={compiler_wrapper}",
"-DCMAKE_ASM_COMPILER_LAUNCHER={compiler_wrapper}",
]
)
options.extend(
{
"linux": [
"-DCMAKE_AR={ar}",
"-DCMAKE_LINKER={ld}",
"-DCMAKE_NM={nm}",
"-DCMAKE_OBJCOPY={objcopy}",
"-DCMAKE_OBJDUMP={objdump}",
"-DCMAKE_RANLIB={ranlib}",
"-DCMAKE_READELF={readelf}",
"-DCMAKE_STRIP={strip}",
],
"mac": [
"-DCMAKE_LIBTOOL={libtool}",
"-DCMAKE_LIPO={lipo}",
],
"win": [
"-DCMAKE_AR={ar}",
"-DCMAKE_LINKER={ld}",
"-DCMAKE_NM={nm}",
"-DCMAKE_OBJCOPY={objcopy}",
"-DCMAKE_OBJDUMP={objdump}",
"-DCMAKE_RANLIB={ranlib}",
"-DCMAKE_READELF={readelf}",
"-DCMAKE_STRIP={strip}",
# TODO(phosek): reenable once we update the host toolchain
# "-DCMAKE_RC_COMPILER={rc}",
# "-DCMAKE_MT={mt}",
],
}[api.platform.name]
)
if api.platform.is_mac and not enable_lld:
options.extend(["-DCMAKE_LINKER={ld}"])
options.extend(["-DCMAKE_SYSROOT={target_sysroot}"])
# TODO(phosek): consider moving these to a cache file
platform_options = []
if api.platform.is_mac:
platform_options.extend(["-DCMAKE_OSX_DEPLOYMENT_TARGET=10.13"])
platform_options.extend(
[
"-DCMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, cipd_dir.join("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
)
if api.platform.is_win:
platform_options.extend(
[
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
# The above flag was added in CMake 3.15, but some
# dependencies have earlier minimum versions. This policy
# switch applies the above flag to these as well.
"-DCMAKE_POLICY_DEFAULT_CMP0091=NEW",
]
)
if host_triple != target_triple:
# TODO(phosek): Switch to api.platform_util and avoid string split.
(os, arch) = target_platform.split("-")
system_name = os.replace("mac", "darwin").title()
system_processor = arch.replace("amd64", "x86_64").replace(
"arm64", "aarch64"
)
toolchain_file = api.path["start_dir"].join("ToolChain.cmake")
api.file.write_text(
"write CMake toolchain file",
toolchain_file,
"\n".join(
[
"set(CMAKE_SYSTEM_NAME %s)" % system_name,
"set(CMAKE_SYSTEM_PROCESSOR %s)" % system_processor,
"set(CMAKE_C_COMPILER_TARGET %s)" % target_triple,
"set(CMAKE_CXX_COMPILER_TARGET %s)" % target_triple,
"set(CMAKE_ASM_COMPILER_TARGET %s)" % target_triple,
"",
]
),
)
platform_options.append("-DCMAKE_TOOLCHAIN_FILE=%s" % toolchain_file)
with api.step.nest("zlib"):
zlib_install_dir = staging_dir.join("zlib_install")
api.file.ensure_directory("create zlib_install_dir", zlib_install_dir)
build_zlib(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX", "ASM"]
]
+ [
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
],
arguments,
zlib_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
use_zlib_options = [
"-DZLIB_INCLUDE_DIR=%s" % zlib_install_dir.join("include"),
"-DZLIB_LIBRARY=%s"
% zlib_install_dir.join(
"lib", "zlibstatic.lib" if api.platform.is_win else "libz.a"
),
]
with api.step.nest("zstd"):
zstd_install_dir = staging_dir.join("zstd_install_prefix")
api.file.ensure_directory("create zstd_install_dir", zstd_install_dir)
build_zstd(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX", "ASM"]
]
+ [
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
],
arguments,
zstd_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
with api.step.nest("libxml2"):
libxml2_install_dir = staging_dir.join("libxml2_install")
api.file.ensure_directory("create libxml2_install_dir", libxml2_install_dir)
libxml2_cmake_dir = build_libxml2(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX", "ASM"]
]
+ use_zlib_options
+ [
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
],
arguments,
libxml2_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
if api.platform.is_win:
with api.step.nest("yasm"):
yasm_install_dir = staging_dir.join("yasm_install")
api.file.ensure_directory("create yasm install dir", yasm_install_dir)
build_yasm(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX", "ASM"]
],
arguments,
yasm_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
with api.step.nest("boringssl"):
boringssl_install_dir = staging_dir.join("boringssl_install")
api.file.ensure_directory(
"create boringssl_install_dir", boringssl_install_dir
)
nasm_option = []
if api.platform.is_win:
nasm_option = [
"-DCMAKE_ASM_NASM_COMPILER=%s"
% yasm_install_dir.join("bin", "yasm.exe")
]
boringssl_cmake_dir = build_boringssl(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX", "ASM"]
]
+ ["-DGO_EXECUTABLE=%s" % api.go.go_root.join("bin", "go")]
+ nasm_option,
arguments,
boringssl_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
with api.step.nest("curl"):
curl_install_dir = staging_dir.join("curl_install")
api.file.ensure_directory("create curl install dir", curl_install_dir)
curl_cmake_dir = build_curl(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX", "ASM"]
]
+ [
"-DCMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
"-DOpenSSL_ROOT=%s" % boringssl_cmake_dir,
]
+ use_zlib_options,
arguments,
curl_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
api.file.write_proto(
"source manifest", pkg_dir.join("source_manifest.json"), manifest, "JSONPB"
)
# build clang+llvm
build_dir = staging_dir.join("llvm_build")
api.file.ensure_directory("create llvm build dir", build_dir)
arguments.update(
{
"BOOTSTRAP_": "BOOTSTRAP_",
"STAGE2_": "STAGE2_",
"stage2_": "stage2-",
"_stage2": "",
}
if do_2stage
else {"BOOTSTRAP_": "", "STAGE2_": "", "stage2_": "", "_stage2": "-stage2"}
)
llvm_projects = ["clang", "clang-tools-extra", "lld", "llvm", "polly"]
llvm_runtimes = ["compiler-rt", "libcxx", "libcxxabi", "libunwind"]
options.extend(
[
"-D{BOOTSTRAP_}LLVM_ENABLE_ZLIB=FORCE_ON",
"-D{BOOTSTRAP_}ZLIB_INCLUDE_DIR=%s" % zlib_install_dir.join("include"),
"-D{BOOTSTRAP_}ZLIB_LIBRARY=%s"
% zlib_install_dir.join(
"lib", "zlibstatic.lib" if api.platform.is_win else "libz.a"
),
"-D{BOOTSTRAP_}LLVM_ENABLE_ZSTD=FORCE_ON",
"-D{BOOTSTRAP_}zstd_DIR=%s"
% zstd_install_dir.join("lib", "cmake", "zstd"),
"-D{BOOTSTRAP_}LLVM_ENABLE_LIBXML2=FORCE_ON",
# Note that the LibXml2 spelling has to match the spelling used in
# https://github.com/llvm/llvm-project/blob/cf54424a/llvm/cmake/config-ix.cmake#L144
"-D{BOOTSTRAP_}LibXml2_ROOT=%s" % libxml2_cmake_dir,
"-D{BOOTSTRAP_}LLVM_ENABLE_CURL=FORCE_ON",
"-D{BOOTSTRAP_}CURL_ROOT=%s" % curl_cmake_dir,
"-D{BOOTSTRAP_}OpenSSL_ROOT=%s" % boringssl_cmake_dir,
"-D{BOOTSTRAP_}CMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
]
)
if api.platform.name == "linux":
if do_2stage:
options.extend(
[
"-D{BOOTSTRAP_}CMAKE_SYSROOT={target_sysroot}",
]
+ [
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-static-libstdc++" % mode
for mode in ["SHARED", "MODULE", "EXE"]
]
+ [
# Unprefixed flags are used by the first stage compiler.
"-DCMAKE_%s_LINKER_FLAGS=-static-libstdc++" % mode
for mode in ["SHARED", "MODULE", "EXE"]
]
)
# TODO(fxbug.dev/81937)
# This is a temporary work around to resolve the out of memory issue that
# linux-arm64 builders running into, and should be removed when we implement
# support for fat LTO in Clang.
if api.platform.arch == "arm":
options.extend(
[
"-D{STAGE2_}LLVM_PARALLEL_LINK_JOBS=8",
]
)
else:
options.extend(
[
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-static-libstdc++" % mode
for mode in ["SHARED", "MODULE", "EXE"]
]
)
elif api.platform.name == "mac":
if do_2stage:
options.extend(
["-D{BOOTSTRAP_}CMAKE_SYSROOT={target_sysroot}"]
+ [
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, build_dir.join("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
+ [
# Unprefixed flags are used by the first stage compiler.
"-DCMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, cipd_dir.join("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
+ (["-D{STAGE2_}CMAKE_LINKER={ld}"] if not enable_lld else [])
)
else:
options.extend(
[
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, cipd_dir.join("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
)
if host_triple != target_triple:
options.extend(
[
"-D{BOOTSTRAP_}CMAKE_SYSTEM_NAME=%s" % system_name,
"-D{BOOTSTRAP_}CMAKE_SYSTEM_PROCESSOR=%s" % system_processor,
"-D{BOOTSTRAP_}CMAKE_C_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}CMAKE_CXX_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}CMAKE_ASM_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}LLVM_DEFAULT_TARGET_TRIPLE={target_triple}",
]
)
# STAGE2_ prefixed flags are passed to the second stage by the first stage build.
options.extend(
[
"-D{STAGE2_}LINUX_aarch64-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_armv7-unknown-linux-gnueabihf_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_i386-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_x86_64-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}FUCHSIA_SDK={fuchsia_sdk}",
"-D{STAGE2_}LLVM_LIT_ARGS=--resultdb-output=%s -v" % RESULTDB_JSON,
]
)
if enable_lto is not None:
lto_value = lto_mode if enable_lto else str(enable_lto)
options.extend(["-D{STAGE2_}LLVM_ENABLE_LTO=%s" % lto_value.title()])
if enable_lld is not None:
options.extend(["-D{STAGE2_}LLVM_ENABLE_LLD=%s" % str(enable_lld).title()])
if enable_assertions:
options.extend(
[
"-D{STAGE2_}LLVM_ENABLE_ASSERTIONS=%s"
% str(enable_assertions).title()
]
)
if enable_backtraces:
options.extend(
[
"-D{STAGE2_}LLVM_ENABLE_BACKTRACES=%s"
% str(enable_backtraces).title()
]
)
if use_inliner_model and not target_platform in ["linux-arm64", "mac-arm64"]:
venv_dir = api.path["start_dir"].join("tensorflow-venv")
tensorflow_path = api.step(
"get tensorflow",
cmd=[
"python",
api.resource("get_tensorflow.py"),
"-vpython-root",
venv_dir,
],
stdout=api.raw_io.output_text(
name="tensorflow-path", add_output_log=True
),
step_test_data=lambda: api.raw_io.test_api.stream_output_text(
"%s"
% venv_dir.join("lib", "python3.8", "site-packages", "tensorflow")
),
).stdout.strip()
options.extend(
[
"-D{STAGE2_}TENSORFLOW_AOT_PATH=%s" % tensorflow_path,
"-D{STAGE2_}LLVM_OVERRIDE_MODEL_HEADER_INLINERSIZEMODEL=%s"
% cipd_dir.join("model", "InlinerSizeModel.h"),
"-D{STAGE2_}LLVM_OVERRIDE_MODEL_OBJECT_INLINERSIZEMODEL=%s"
% cipd_dir.join("model", "InlinerSizeModel.o"),
"-D{STAGE2_}LLVM_RAEVICT_MODEL_PATH=none",
]
)
# Directory name for storing clang reproducers during clang build
crash_report_dir = build_dir.join("clang-crashreports")
env = {
"CLANG_MODULE_CACHE_PATH": "",
"CLANG_CRASH_DIAGNOSTICS_DIR": crash_report_dir,
}
with api.step.nest("clang"), api.context(
cwd=build_dir,
env=env,
):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [
"-C",
llvm_dir.join(
"clang",
"cmake",
"caches",
"Fuchsia{_stage2}.cmake".format(**arguments),
),
llvm_dir.join("llvm"),
],
)
api.file.read_text(
"read CMakeError.log", build_dir.join("CMakeFiles", "CMakeError.log")
)
try:
# Build the full (two-stage) distribution.
api.ninja(
"build",
[
# This only applies to the first stage, second stage is invoked by
# CMake as a subprocess and will use Ninja's default.
"-j%d" % ninja_jobs,
"{stage2_}distribution".format(**arguments),
],
)
except api.step.StepFailure:
upload_crash_reproducer(
api,
crash_report_dir,
artifact_gcs_bucket,
api.buildbucket_util.id,
)
# Extract build traces.
upload_build_traces(
api,
use_ninjatrace,
cipd_dir,
build_dir,
artifact_gcs_bucket,
api.buildbucket_util.id,
)
with api.context(env={"DESTDIR": pkg_dir}):
api.ninja(
"install",
[
"{stage2_}install-distribution".format(**arguments),
],
)
# Upload toolchain to CAS before running clang tests.
with api.context(cwd=api.path["start_dir"]):
clang_version = read_clang_version_from_build(api, build_dir, do_2stage)
generate_clang_runtime_json(
api,
pkg_dir,
cipd_dir,
use_breakpad,
clang_version,
)
generate_clang_license(
api, llvm_dir, pkg_dir, llvm_projects, llvm_runtimes
)
cas_digest = api.cas_util.upload(pkg_dir, output_property="isolated")
# Run the tests.
projects = ["clang", "lld", "llvm", "polly"]
# TODO(leonardchan): run host runtime tests for mac.
if (
host_triple in ["x86_64-unknown-linux-gnu", "aarch64-unknown-linux-gnu"]
and not do_2stage
):
projects += ["runtimes-" + host_triple]
if host_platform == target_platform:
with resultdb_context(api, build_dir):
api.ninja(
"test",
["-j%d" % api.goma.jobs, "-k0"]
+ [
("{stage2_}check-" + project).format(**arguments)
for project in projects
],
)
if upload_package and clang_version:
# The published package has the same name for every platform.
api.cipd_util.upload_package(
"fuchsia/third_party/clang/%s" % target_platform,
pkg_dir,
search_tag={"git_revision": revision},
repository=repository,
metadata=[("version", clang_version)],
)
# TODO(phosek): move this logic to clang_trigger.py recipe.
# Suppress triggering from the RBE-built tools for now.
if (
cas_digest
and not use_rbe
and not target_platform
in [
"linux-arm64",
"mac-arm64",
"windows-amd64",
]
):
# Do a full integration build. This will use the just-built toolchain
# to build all of Fuchsia to check whether there are any regressions.
api.toolchain.trigger_build(
"clang_toolchain",
repository,
revision,
cas_digest,
builders=builders[target_platform],
fuchsia_ref="refs/heads/releases/canary",
)
if tot_builders and target_platform in tot_builders:
api.toolchain.trigger_build(
"clang_toolchain",
repository,
revision,
cas_digest,
builders=tot_builders[target_platform],
fuchsia_ref="refs/heads/main",
)
def GenTests(api):
builders = {
"linux-amd64": ["fuchsia/linux-x64-builder"],
"mac-amd64": ["fuchsia/mac-x64-builder"],
}
tot_builders = {
"linux-amd64": ["fuchsia/linux-x64-builder-tot"],
}
for os in ("linux", "mac"):
yield (
api.status_check.test("ci_%s_x64" % os)
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name(os)
+ api.properties(
platform=os + "-amd64",
enable_assertions=True,
enable_backtraces=True,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.status_check.test("prod_%s_x64" % os)
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name(os)
+ api.properties(
platform=os + "-amd64", builders=builders, tot_builders=tot_builders
)
+ api.git.get_remote_branch_head("git ls-remote", "b" * 40)
)
yield (
api.status_check.test("windows_amd64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("win")
+ api.properties(
platform="windows-amd64",
use_inliner_model=True,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.status_check.test("linux_arm64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform("linux", arch="arm", bits=64)
+ api.properties(
platform="linux-arm64",
do_2stage=True,
enable_lto=False,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.status_check.test("mac_arm64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("mac")
+ api.platform.bits(64)
+ api.properties(
platform="mac-arm64",
do_2stage=True,
enable_lto=False,
upload_package=False,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.status_check.test("lld")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("mac")
+ api.properties(
platform="mac-amd64",
enable_lld=True,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.status_check.test("lto")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
enable_lto=True,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.status_check.test("thinlto")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
enable_lto=True,
lto_mode="thin",
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.status_check.test("rbe")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
builders=builders,
tot_builders=tot_builders,
use_rbe=True,
)
)
yield (
api.status_check.test("clang_reproducer")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
enable_lto=True,
lto_mode="thin",
builders=builders,
tot_builders=tot_builders,
)
+ api.step_data("clang.build", retcode=1)
)