blob: dfac803c194a701a66b85c3f07296354f0677714 [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Clang toolchain."""
import contextlib
import re
from PB.go.chromium.org.luci.common.proto.srcman.manifest import Manifest
from PB.recipes.fuchsia.contrib.clang_toolchain import InputProperties, LTOMode
GIT_VERSION = "version:2@2.35.1.chromium.8"
DEPS = [
"fuchsia/buildbucket_util",
"fuchsia/cas_util",
"fuchsia/cipd_util",
"fuchsia/cmake",
"fuchsia/git",
"fuchsia/git_checkout",
"fuchsia/go",
"fuchsia/goma",
"fuchsia/gsutil",
"fuchsia/macos_sdk",
"fuchsia/ninja",
"fuchsia/platform_util",
"fuchsia/python3",
"fuchsia/rbe",
"fuchsia/tensorflow",
"fuchsia/toolchain",
"fuchsia/windows_sdk",
"fuchsia/zlib",
"fuchsia/zstd",
"recipe_engine/archive",
"recipe_engine/buildbucket",
"recipe_engine/cipd",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/path",
"recipe_engine/platform",
"recipe_engine/properties",
"recipe_engine/raw_io",
"recipe_engine/resultdb",
"recipe_engine/step",
"recipe_engine/url",
]
TARGET_TO_ARCH = {
"x64": "x86_64",
"arm64": "aarch64",
}
TARGETS = TARGET_TO_ARCH.keys()
LIBXML2_GIT = "https://fuchsia.googlesource.com/third_party/libxml2"
BORINGSSL_GIT = "https://boringssl.googlesource.com/boringssl"
CURL_GIT = "https://fuchsia.googlesource.com/third_party/curl"
YASM_GIT = "https://fuchsia.googlesource.com/third_party/yasm"
CPP_HTTPLIB_GIT = "https://llvm.googlesource.com/third_party/cpp-httplib"
NCURSES_GIT = "https://llvm.googlesource.com/third_party/ncurses"
LIBEDIT_GIT = "https://llvm.googlesource.com/third_party/libedit"
LIBFFI_GIT = "https://llvm.googlesource.com/third_party/libffi"
CPYTHON_GIT = "https://llvm.googlesource.com/third_party/cpython"
CPYTHON_VERSION = "3.11.4"
# TODO(https://fxbug.dev/327507421): Remove this temporary SDK version pin when
# we have a concrete solution.
FUCHSIA_SDK_VERSION = "f18"
CIPD_SERVER_HOST = "chrome-infra-packages.appspot.com"
# TODD(fxbug.dev/91157): Restore the file name once
# path length issue is properly fixed.
RESULTDB_JSON = "r.j"
NINJA_LOG = ".ninja_log"
PROPERTIES = InputProperties
def RunSteps(
api,
props,
):
# default values
if not props.repository:
props.repository = "https://llvm.googlesource.com/llvm-project"
if not props.revision:
props.revision = "refs/heads/main"
if not props.artifact_gcs_bucket:
props.artifact_gcs_bucket = "fuchsia-artifacts"
use_goma = (
not api.platform.arch == "arm" and api.platform.bits == 64
) and not api.platform.is_win
if props.use_rbe:
ninja_jobs = api.goma.jobs # doesn't really use Goma
# Valid choices for AbsolutePathPolicy for cmake-based builds are:
# RELATIVIZE: rewrite commands using relative paths for remote
# execution.
# ALLOW: Force the remote environment to look like local paths.
remote_build_context = api.rbe(
absolute_path_policy=api.rbe.AbsolutePathPolicy.ALLOW
)
elif use_goma:
ninja_jobs = api.goma.jobs
remote_build_context = api.goma()
else:
ninja_jobs = api.platform.cpu_count
remote_build_context = contextlib.nullcontext()
host_platform = api.cipd_util.platform_name
target_platform = props.platform or host_platform
use_breakpad = host_platform == "linux-amd64"
# TODO: Make build_lldb an infra configurable option
build_lldb = True
should_build_ncurses = (
build_lldb
and target_platform != "mac-arm64"
and target_platform != "windows-amd64"
)
should_build_cpython = (
build_lldb
and host_platform == target_platform
and target_platform != "windows-amd64"
)
should_fetch_cpython = (
build_lldb
and host_platform == target_platform
and target_platform == "windows-amd64"
)
should_run_lldb_test = should_build_cpython
manifest = Manifest()
use_ninjatrace = False
with api.step.nest("ensure_packages"):
with api.context(infra_steps=True):
cipd_dir = api.path.start_dir / "cipd"
pkgs = api.cipd.EnsureFile()
# Use Linux x64 SDK on all platforms. We only need the sysroot, we
# don't use any tools so the host architecture is irrelevant.
pkgs.add_package("fuchsia/sdk/core/linux-amd64", FUCHSIA_SDK_VERSION, "sdk")
pkgs.add_package("fuchsia/third_party/clang/${platform}", "integration")
# TODO(fxbug.dev/87518) Unify cmake and ninja revisions after test failures are fixed.
if api.platform.is_win:
# boringssl and clang rely on POSIX tools provided in the
# "git for windows" package, but they are not in PATH.
# Bootstrap git again so these tools can be added to PATH.
pkgs.add_package("infra/3pp/tools/git/${platform}", GIT_VERSION)
if not api.platform.is_win:
# Experimental windows runtime cross compilation under Linux.
win_sdk_dir = api.windows_sdk.ensure_sdk()
pkgs.add_package(
"fuchsia/third_party/sysroot/linux",
"integration",
"linux",
)
pkgs.add_package(
"fuchsia/third_party/sysroot/focal",
"git_revision:fa7a5a9710540f30ff98ae48b62f2cdf72ed2acd",
"focal",
)
if not target_platform in [
"linux-arm64",
"mac-arm64",
]:
pkgs.add_package(
f"fuchsia/model/inlining/{target_platform}",
"git_revision:0ee1ce61a844b8fd285ff1a2c47c4b8690fd2b7d",
"model",
)
if use_breakpad:
pkgs.add_package(
"fuchsia/tools/breakpad/${platform}", "integration", "breakpad"
)
if api.platform.arch != "arm" and not api.platform.is_win:
use_ninjatrace = True
pkgs.add_package(
"fuchsia/tools/ninjatrace/${platform}", "latest", "ninjatrace"
)
if build_lldb:
if should_build_cpython or should_build_ncurses:
# There is no make for Windows in CIPD.
# Current Windows LLDB setup also doesn't need GNU make.
pkgs.add_package(
"fuchsia/third_party/make/${platform}", "version:4.3"
)
pkgs.add_package("infra/3pp/tools/swig/${platform}", "latest")
if api.platform.is_linux:
# TODO: Remove this once we update compiler-rt to no longer assume binutils.
pkgs.add_package(
"fuchsia/third_party/binutils-gdb/${platform}",
"git_revision:9030a82d6f700e03ab143f0d002e9f21ae2fd52f",
"binutils-gdb",
)
ensured = api.cipd.ensure(cipd_dir, pkgs)
for subdir, pins in ensured.items():
directory = manifest.directories[str(cipd_dir / subdir)]
directory.cipd_server_host = CIPD_SERVER_HOST
for pin in pins:
directory.cipd_package[pin.package].instance_id = pin.instance_id
with api.step.nest("update sdk"):
sdk_dir = api.path.start_dir / "sdk"
api.file.copytree("copy sdk", cipd_dir / "sdk", sdk_dir)
api.file.chmod(
"chmod pkg/sysroot/meta.json",
sdk_dir.joinpath("pkg", "sysroot", "meta.json"),
"0644",
)
staging_dir = api.path.start_dir
pkg_dir = staging_dir / "llvm_install"
api.file.ensure_directory("create pkg dir", pkg_dir)
if build_lldb:
lldb_pkg_dir = staging_dir / "lldb_install"
api.file.ensure_directory("create lldb pkg dir", lldb_pkg_dir)
llvm_dir, revision = api.git_checkout(props.repository, fallback_ref=props.revision)
git_checkout = manifest.directories[str(llvm_dir)].git_checkout
git_checkout.repo_url = props.repository
git_checkout.revision = revision
with api.context(cwd=llvm_dir):
for revert in props.reverts:
api.git.revert(revert)
target_triple = api.toolchain.PLATFORM_TO_TRIPLE[target_platform]
host_triple = api.toolchain.PLATFORM_TO_TRIPLE[host_platform]
cas_digest = ""
clang_version = ""
if api.platform.is_win:
# Add git's tools to PATH.
env_prefixes = {
"PATH": [cipd_dir.joinpath("usr", "bin"), cipd_dir.joinpath("bin")]
}
else:
env_prefixes = {"PATH": [cipd_dir / "bin"]}
with api.macos_sdk(
kind="ios"
), api.windows_sdk(), remote_build_context, api.context(env_prefixes=env_prefixes):
if api.platform.name == "linux":
target_sysroot = host_sysroot = cipd_dir / "linux"
elif api.platform.name == "mac":
# TODO(fxbug.dev/3043): Eventually use our own hermetic sysroot as for Linux.
target_sysroot = host_sysroot = api.macos_sdk.sysroot
elif api.platform.name == "win":
target_sysroot = host_sysroot = api.windows_sdk.sdk_dir
else: # pragma: no cover
assert False, "unsupported platform"
arguments = {
"target_triple": target_triple,
"host_triple": host_triple,
"target_sysroot": target_sysroot,
"host_sysroot": host_sysroot,
"linux_sysroot": cipd_dir / "linux",
"fuchsia_sdk": sdk_dir,
"ninja": api.ninja.path,
}
arguments.update(
{
"win": {
"cc": cipd_dir.joinpath("bin", "clang-cl.exe"),
"cxx": cipd_dir.joinpath("bin", "clang-cl.exe"),
"ar": cipd_dir.joinpath("bin", "llvm-lib.exe"),
"ld": cipd_dir.joinpath("bin", "lld-link.exe"),
"mt": cipd_dir.joinpath("bin", "llvm-mt.exe"),
"nm": cipd_dir.joinpath("bin", "llvm-nm.exe"),
"objcopy": cipd_dir.joinpath("bin", "llvm-objcopy.exe"),
"objdump": cipd_dir.joinpath("bin", "llvm-objdump.exe"),
"ranlib": cipd_dir.joinpath("bin", "llvm-ranlib.exe"),
"rc": cipd_dir.joinpath("bin", "llvm-rc.exe"),
"readelf": cipd_dir.joinpath("bin", "llvm-readelf.exe"),
"strip": cipd_dir.joinpath("bin", "llvm-strip.exe"),
},
"mac": {
"cc": cipd_dir.joinpath("bin", "clang"),
"cxx": cipd_dir.joinpath("bin", "clang++"),
"ar": cipd_dir.joinpath("bin", "llvm-ar"),
"ld": "/usr/bin/ld",
"libtool": cipd_dir.joinpath("bin", "llvm-libtool-darwin"),
"lipo": cipd_dir.joinpath("bin", "llvm-lipo"),
"nm": cipd_dir.joinpath("bin", "llvm-nm"),
"objcopy": cipd_dir.joinpath("bin", "llvm-objcopy"),
"objdump": cipd_dir.joinpath("bin", "llvm-objdump"),
"ranlib": cipd_dir.joinpath("bin", "llvm-ranlib"),
"readelf": cipd_dir.joinpath("bin", "llvm-readelf"),
"strip": cipd_dir.joinpath("bin", "llvm-strip"),
},
"linux": {
"cc": cipd_dir.joinpath("bin", "clang"),
"cxx": cipd_dir.joinpath("bin", "clang++"),
"ar": cipd_dir.joinpath("bin", "llvm-ar"),
"ld": cipd_dir.joinpath("bin", "ld.lld"),
"nm": cipd_dir.joinpath("bin", "llvm-nm"),
"objcopy": cipd_dir.joinpath("bin", "llvm-objcopy"),
"objdump": cipd_dir.joinpath("bin", "llvm-objdump"),
"ranlib": cipd_dir.joinpath("bin", "llvm-ranlib"),
"readelf": cipd_dir.joinpath("bin", "llvm-readelf"),
"strip": cipd_dir.joinpath("bin", "llvm-strip"),
},
}[api.platform.name]
)
if props.use_rbe:
arguments.update(
{"compiler_wrapper": api.rbe.cxx_compiler_wrapper()}
# TODO(https://fxbug.dev/107610): prefer pass a multi-token
# wrapper directly, joining with ';'.
# {"compiler_wrapper": ";".join(api.rbe.cxx_compiler_wrapper_command())}
)
elif use_goma:
arguments.update({"compiler_wrapper": api.goma.goma_dir / "gomacc"})
options = [
"-GNinja",
"-DCMAKE_MAKE_PROGRAM={ninja}",
"-DCMAKE_INSTALL_PREFIX=",
"-DCMAKE_C_COMPILER={cc}",
"-DCMAKE_CXX_COMPILER={cxx}",
"-DCMAKE_ASM_COMPILER={cc}",
f"-DCLANG_REPOSITORY_STRING={props.repository}",
]
if use_goma or props.use_rbe:
options.extend(
[
"-DCMAKE_C_COMPILER_LAUNCHER={compiler_wrapper}",
"-DCMAKE_CXX_COMPILER_LAUNCHER={compiler_wrapper}",
"-DCMAKE_ASM_COMPILER_LAUNCHER={compiler_wrapper}",
]
)
options.extend(
{
"linux": [
"-DCMAKE_AR={ar}",
"-DCMAKE_LINKER={ld}",
"-DCMAKE_NM={nm}",
"-DCMAKE_OBJCOPY={objcopy}",
"-DCMAKE_OBJDUMP={objdump}",
"-DCMAKE_RANLIB={ranlib}",
"-DCMAKE_READELF={readelf}",
"-DCMAKE_STRIP={strip}",
],
"mac": [
"-DCMAKE_LIBTOOL={libtool}",
"-DCMAKE_LIPO={lipo}",
],
"win": [
"-DCMAKE_AR={ar}",
"-DCMAKE_LINKER={ld}",
"-DCMAKE_NM={nm}",
"-DCMAKE_OBJCOPY={objcopy}",
"-DCMAKE_OBJDUMP={objdump}",
"-DCMAKE_RANLIB={ranlib}",
"-DCMAKE_READELF={readelf}",
"-DCMAKE_STRIP={strip}",
# TODO(phosek): reenable once we update the host toolchain
# "-DCMAKE_RC_COMPILER={rc}",
# "-DCMAKE_MT={mt}",
],
}[api.platform.name]
)
if api.platform.is_mac and not props.enable_lld:
options.extend(["-DCMAKE_LINKER={ld}"])
options.extend(["-DCMAKE_SYSROOT={target_sysroot}"])
# TODO(phosek): consider moving these to a cache file
platform_options = []
if api.platform.is_mac:
platform_options.extend(["-DCMAKE_OSX_DEPLOYMENT_TARGET=10.13"])
platform_options.extend(
[
f"-DCMAKE_{mode}_LINKER_FLAGS=-nostdlib++ {cipd_dir.joinpath('lib', 'libc++.a')}"
for mode in ["SHARED", "MODULE", "EXE"]
]
)
arguments.update(
{
"cflags": "-mmacosx-version-min=10.13",
"cxxflags": "-mmacosx-version-min=10.13",
"ldflags": "-nostdlib++ "
+ str(api.path.start_dir.joinpath("cipd", "lib", "libc++.a")),
}
)
else:
arguments.update({"cflags": "", "cxxflags": "", "ldflags": ""})
if api.platform.is_win:
platform_options.extend(
[
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
# The above flag was added in CMake 3.15, but some
# dependencies have earlier minimum versions. This policy
# switch applies the above flag to these as well.
"-DCMAKE_POLICY_DEFAULT_CMP0091=NEW",
]
)
platform_options_target = platform_options.copy()
if host_triple != target_triple:
# TODO(phosek): Switch to api.platform_util and avoid string split.
(os, arch) = target_platform.split("-")
system_name = os.replace("mac", "darwin").title()
system_processor = arch.replace("amd64", "x86_64").replace(
"arm64", "aarch64"
)
toolchain_file = api.path.start_dir / "ToolChain.cmake"
api.file.write_text(
"write CMake toolchain file",
toolchain_file,
"\n".join(
[
f"set(CMAKE_SYSTEM_NAME {system_name})",
f"set(CMAKE_SYSTEM_PROCESSOR {system_processor})",
f"set(CMAKE_C_COMPILER_TARGET {target_triple})",
f"set(CMAKE_CXX_COMPILER_TARGET {target_triple})",
f"set(CMAKE_ASM_COMPILER_TARGET {target_triple})",
"",
]
),
)
platform_options_target.append(f"-DCMAKE_TOOLCHAIN_FILE={toolchain_file}")
with api.step.nest("zlib"):
zlib_build_dir_target = staging_dir / "zlib_build_target"
api.file.ensure_directory("create zlib_build_dir", zlib_build_dir_target)
zlib_install_dir_target = staging_dir / "zlib_install_target"
api.file.ensure_directory(
"create zlib_install_dir", zlib_install_dir_target
)
api.zlib.build(
cmake_extra_args=[
option.format(**arguments)
for option in options
+ platform_options_target
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET.
f"-DCMAKE_{lang}_FLAGS=--target={target_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ [
# TODO: once we no longer build libLTO, we can drop
# this.
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
]
],
platform=api.platform_util.platform(target_platform),
ninja_jobs=ninja_jobs,
build_dir=zlib_build_dir_target,
install_dir=zlib_install_dir_target,
)
use_zlib_options_target = [
f"-DZLIB_INCLUDE_DIR={zlib_install_dir_target / 'include'}",
f"-DZLIB_LIBRARY={zlib_install_dir_target / 'lib' / ('zlibstatic.lib' if api.platform.is_win else 'libz.a')}",
]
if host_triple != target_triple:
zlib_build_dir_host = staging_dir / "zlib_build_host"
api.file.ensure_directory("create zlib_build_dir", zlib_build_dir_host)
zlib_install_dir_host = staging_dir / "zlib_install_host"
api.file.ensure_directory(
"create zlib_install_dir", zlib_install_dir_host
)
api.zlib.build(
cmake_extra_args=[
option.format(**arguments)
for option in options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET.
f"-DCMAKE_{lang}_FLAGS=--target={host_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ [
# TODO: once we no longer build libLTO, we can drop
# this.
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
]
],
platform=api.platform_util.platform(host_platform),
ninja_jobs=ninja_jobs,
build_dir=zlib_build_dir_host,
install_dir=zlib_install_dir_host,
)
use_zlib_options_host = [
f"-DZLIB_INCLUDE_DIR={zlib_install_dir_host.joinpath('include')}",
f"-DZLIB_LIBRARY={zlib_install_dir_host.joinpath('lib', 'zlibstatic.lib' if api.platform.is_win else 'libz.a')}",
]
with api.step.nest("zstd"):
zstd_build_dir = staging_dir / "zstd_build"
api.file.ensure_directory("create zstd", zstd_build_dir)
zstd_install_dir = staging_dir / "zstd_install"
api.file.ensure_directory("create zstd_install_dir", zstd_install_dir)
api.zstd.build(
cmake_extra_args=[
option.format(**arguments)
for option in options
+ platform_options_target
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
f"-DCMAKE_{lang}_FLAGS=--target={target_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ [
"-DZSTD_BUILD_SHARED=OFF",
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
]
],
ninja_jobs=ninja_jobs,
build_dir=zstd_build_dir,
install_dir=zstd_install_dir,
)
with api.step.nest("libxml2"):
libxml2_install_dir_target = staging_dir / "libxml2_install_target"
api.file.ensure_directory(
"create libxml2_install_dir", libxml2_install_dir_target
)
libxml2_cmake_dir_target, libxml2_dir_target = build_libxml2(
api,
options
+ platform_options_target
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
f"-DCMAKE_{lang}_FLAGS=--target={target_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ use_zlib_options_target
+ [
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
],
arguments,
libxml2_install_dir_target,
ninja_jobs,
cipd_dir,
manifest,
dir_suffix="_target",
)
if host_triple != target_triple:
libxml2_install_dir_host = staging_dir / "libxml2_install_host"
api.file.ensure_directory(
"create libxml2_install_dir for host", libxml2_install_dir_host
)
libxml2_cmake_dir_host, _ = build_libxml2(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
f"-DCMAKE_{lang}_FLAGS=--target={host_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ use_zlib_options_host
+ [
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
],
arguments,
libxml2_install_dir_host,
ninja_jobs,
cipd_dir,
manifest,
dir_suffix="_host",
)
if api.platform.is_win:
with api.step.nest("yasm"):
yasm_install_dir = staging_dir / "yasm_install"
api.file.ensure_directory("create yasm install dir", yasm_install_dir)
build_yasm(
api,
options
+ platform_options_target
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
f"-DCMAKE_{lang}_FLAGS=--target={target_triple}"
for lang in ["C", "CXX", "ASM"]
],
arguments,
yasm_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
with api.step.nest("boringssl"):
boringssl_install_dir = staging_dir / "boringssl_install"
api.file.ensure_directory(
"create boringssl_install_dir", boringssl_install_dir
)
nasm_option = []
if api.platform.is_win:
nasm_option = [
f"-DCMAKE_ASM_NASM_COMPILER={yasm_install_dir.joinpath('bin', 'yasm.exe')}"
]
boringssl_pic_option = []
if build_lldb:
boringssl_pic_option = ["-DCMAKE_POSITION_INDEPENDENT_CODE=ON"]
boringssl_cmake_dir = build_boringssl(
api,
options
+ platform_options_target
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
f"-DCMAKE_{lang}_FLAGS=--target={target_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ [f"-DGO_EXECUTABLE={api.go.go_root.joinpath('bin', 'go')}"]
+ nasm_option
+ boringssl_pic_option,
arguments,
boringssl_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
with api.step.nest("curl"):
curl_install_dir = staging_dir / "curl_install"
api.file.ensure_directory("create curl install dir", curl_install_dir)
curl_cmake_dir = build_curl(
api,
options
+ platform_options_target
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
f"-DCMAKE_{lang}_FLAGS=--target={target_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ [
"-DCMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
f"-DOpenSSL_ROOT={boringssl_cmake_dir}",
]
+ use_zlib_options_target,
arguments,
curl_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
with api.step.nest("cpp-httplib"):
cpp_httplib_install_dir = staging_dir / "cpp_httplib_install"
api.file.ensure_directory(
"create cpp-httplib install dir", cpp_httplib_install_dir
)
cpp_httplib_cmake_dir = build_cpp_httplib(
api,
options
+ platform_options_target
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX,ASM}_COMPILER_TARGET
f"-DCMAKE_{lang}_FLAGS=--target={target_triple}"
for lang in ["C", "CXX", "ASM"]
]
+ [
"-DCMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
]
+ use_zlib_options_target,
arguments,
cpp_httplib_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
if build_lldb:
if should_build_ncurses:
with api.step.nest("ncurses"):
ncurses_install_dir = staging_dir / "ncurses_install"
api.file.ensure_directory(
"create ncurses install dir", ncurses_install_dir
)
build_ncurses(
api,
arguments,
ncurses_install_dir,
manifest,
)
with api.step.nest("libedit"):
libedit_install_dir = staging_dir / "libedit_install"
api.file.ensure_directory(
"create libedit install dir", libedit_install_dir
)
build_libedit(
api,
arguments,
ncurses_install_dir,
libedit_install_dir,
manifest,
)
if should_fetch_cpython:
cpython_install_dir = lldb_pkg_dir / "python3"
api.file.ensure_directory(
"create cpython install dir", cpython_install_dir
)
fetch_and_extract_windows_python(
api, CPYTHON_VERSION, cpython_install_dir
)
if should_build_cpython:
with api.step.nest("libffi"):
libffi_install_dir = staging_dir / "libffi_install"
api.file.ensure_directory(
"create libffi install dir", libffi_install_dir
)
build_libffi(
api,
arguments,
libffi_install_dir,
manifest,
)
with api.step.nest("cpython"):
zlib_install_dir = (
zlib_install_dir_host
if host_triple != target_triple
else zlib_install_dir_target
)
cpython_install_dir = lldb_pkg_dir / "python3"
api.file.ensure_directory(
"create cpython install dir", cpython_install_dir
)
build_cpython(
api,
arguments,
libedit_install_dir,
libffi_install_dir,
ncurses_install_dir,
zlib_install_dir,
cpython_install_dir,
manifest,
)
if api.platform.is_mac:
with api.step.nest("lldb keychain"):
make_lldb_keychain(api)
api.file.write_proto(
"source manifest", pkg_dir / "source_manifest.json", manifest, "JSONPB"
)
# build clang+llvm
build_dir = staging_dir / "llvm_build"
api.file.ensure_directory("create llvm build dir", build_dir)
# The LLDB Python host looks for libraries relative to the executable,
# so copy the libraries from the install directory into the appropriate
# relative paths from binaries used during testing.
if build_lldb:
lldb_build_dir = build_dir
if props.do_2stage:
lldb_build_dir = lldb_build_dir.joinpath(
"tools", "clang", "stage2-bins"
)
if should_build_cpython or should_fetch_cpython:
api.file.copytree(
"copy python libraries to build dir",
cpython_install_dir,
lldb_build_dir / "python3",
)
api.file.copytree(
"copy python libraries to unit test dir",
cpython_install_dir,
lldb_build_dir.joinpath(
"tools", "lldb", "unittests", "ScriptInterpreter", "python3"
),
)
arguments.update(
{
"BOOTSTRAP_": "",
"STAGE2_": "STAGE2_",
"stage2_": "stage2-",
"_stage2": "",
}
if props.do_2stage
else {"BOOTSTRAP_": "", "STAGE2_": "", "stage2_": "", "_stage2": "-stage2"}
)
llvm_projects = ["clang", "clang-tools-extra", "lld", "llvm", "polly"]
llvm_runtimes = ["compiler-rt", "libcxx", "libcxxabi", "libunwind"]
options.extend(
[
"-D{BOOTSTRAP_}LLVM_ENABLE_ZLIB=FORCE_ON",
"-D{BOOTSTRAP_}ZLIB_INCLUDE_DIR=%s"
% zlib_install_dir_target.join("include"),
"-D{BOOTSTRAP_}ZLIB_LIBRARY=%s"
% zlib_install_dir_target.joinpath(
"lib", "zlibstatic.lib" if api.platform.is_win else "libz.a"
),
"-D{BOOTSTRAP_}LLVM_ENABLE_ZSTD=FORCE_ON",
"-D{BOOTSTRAP_}zstd_DIR=%s"
% zstd_install_dir.joinpath("lib", "cmake", "zstd"),
"-D{BOOTSTRAP_}LLVM_ENABLE_LIBXML2=FORCE_ON",
# Note that the LibXml2 spelling has to match the spelling used in
# https://github.com/llvm/llvm-project/blob/cf54424a/llvm/cmake/config-ix.cmake#L144
"-D{BOOTSTRAP_}LibXml2_ROOT=%s" % libxml2_cmake_dir_target,
"-D{BOOTSTRAP_}LLVM_ENABLE_CURL=FORCE_ON",
"-D{BOOTSTRAP_}CURL_ROOT=%s" % curl_cmake_dir,
"-D{BOOTSTRAP_}OpenSSL_ROOT=%s" % boringssl_cmake_dir,
"-D{BOOTSTRAP_}LLVM_ENABLE_HTTPLIB=FORCE_ON",
"-D{BOOTSTRAP_}httplib_ROOT=%s" % cpp_httplib_cmake_dir,
"-D{BOOTSTRAP_}CMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
]
)
if not api.platform.is_win and host_triple == target_triple:
options.extend(
[
"-DLLVM_WINSYSROOT=%s" % win_sdk_dir,
"-DLLVM_VFSOVERLAY=%s"
% win_sdk_dir.joinpath("llvm-vfsoverlay.yaml"),
]
)
if api.platform.name == "linux":
options.extend(
[
# Unprefixed flags are used by the first stage compiler.
f"-DCMAKE_{mode}_LINKER_FLAGS=-static-libstdc++"
for mode in ["SHARED", "MODULE", "EXE"]
]
)
if props.do_2stage:
# TODO(fxbug.dev/81937)
# This is a temporary work around to resolve the out of memory issue that
# linux-arm64 builders running into, and should be removed when we implement
# support for fat LTO in Clang.
if api.platform.arch == "arm":
options.extend(
[
"-D{STAGE2_}LLVM_PARALLEL_LINK_JOBS=8",
]
)
elif api.platform.name == "mac":
if props.do_2stage:
options.extend(
[
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, build_dir.joinpath("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
+ [
# Unprefixed flags are used by the first stage compiler.
f"-DCMAKE_{mode}_LINKER_FLAGS=-nostdlib++ {cipd_dir.joinpath('lib', 'libc++.a')}"
for mode in ["SHARED", "MODULE", "EXE"]
]
+ (["-D{STAGE2_}CMAKE_LINKER={ld}"] if not props.enable_lld else [])
)
else:
options.extend(
[
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-DCMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, cipd_dir.joinpath("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
)
if host_triple != target_triple:
arguments.update(
{
"BOOTSTRAP_": "BOOTSTRAP_",
}
)
options.extend(
[
"-D{BOOTSTRAP_}CMAKE_SYSTEM_NAME=%s" % system_name,
"-D{BOOTSTRAP_}CMAKE_SYSTEM_PROCESSOR=%s" % system_processor,
"-D{BOOTSTRAP_}CMAKE_C_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}CMAKE_CXX_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}CMAKE_ASM_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}LLVM_DEFAULT_TARGET_TRIPLE={target_triple}",
"-DLLVM_ENABLE_ZLIB=FORCE_ON",
"-DZLIB_INCLUDE_DIR=%s" % zlib_install_dir_host.join("include"),
"-DZLIB_LIBRARY=%s"
% zlib_install_dir_host.joinpath(
"lib", "zlibstatic.lib" if api.platform.is_win else "libz.a"
),
"-DLLVM_ENABLE_LIBXML2=FORCE_ON",
# Note that the LibXml2 spelling has to match the spelling used in
# https://github.com/llvm/llvm-project/blob/cf54424a/llvm/cmake/config-ix.cmake#L144
"-DLibXml2_ROOT=%s" % libxml2_cmake_dir_host,
"-DCMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
]
)
# STAGE2_ prefixed flags are passed to the second stage by the first stage build.
options.extend(
[
"-D{STAGE2_}LINUX_aarch64-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_armv7-unknown-linux-gnueabihf_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_i386-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_riscv64-unknown-linux-gnu_SYSROOT=%s"
% cipd_dir.joinpath("focal"),
"-D{STAGE2_}LINUX_x86_64-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}FUCHSIA_SDK={fuchsia_sdk}",
"-D{STAGE2_}LLVM_LIT_ARGS=--resultdb-output=%s -v" % RESULTDB_JSON,
]
)
cross_native_flags = [
"-DCMAKE_C_COMPILER={cc}",
"-DCMAKE_CXX_COMPILER={cxx}",
"-DCMAKE_%sSYSROOT={host_sysroot}"
% ("OSX_" if api.platform.is_mac else ""),
]
if api.platform.is_mac:
cross_native_flags.extend(
[
f"-DCMAKE_{mode}_LINKER_FLAGS=-nostdlib++ {cipd_dir.joinpath('lib', 'libc++.a')}"
for mode in ["SHARED", "MODULE", "EXE"]
]
)
# TODO(fxbug.dev/127486): The stage2 build of the host tools in a cross compile doesn't
# correctly use CMAKE_C{,XX}_COMPILER. This solution isn't ideal because it uses the
# compiler from cipd and not the one we just built in stage1, but this is better than it
# using a system compiler that no longer exists. See https://reviews.llvm.org/D126313#3543242.
options.append(
"-D{STAGE2_}CROSS_TOOLCHAIN_FLAGS_NATIVE=" + ";".join(cross_native_flags)
)
options.append(
"-D{STAGE2_}LLVM_ENABLE_LTO=%s" % LTOMode.Name(props.lto_mode).title()
)
if props.enable_lld:
options.append("-D{STAGE2_}LLVM_ENABLE_LLD=%s" % props.enable_lld)
if props.enable_assertions:
options.append(
"-D{STAGE2_}LLVM_ENABLE_ASSERTIONS=%s" % props.enable_assertions
)
if props.enable_backtraces:
options.append(
"-D{STAGE2_}LLVM_ENABLE_BACKTRACES=%s" % props.enable_backtraces
)
if not target_platform in [
"linux-arm64",
"mac-arm64",
]:
options.extend(
[
"-D{STAGE2_}TENSORFLOW_AOT_PATH=%s" % api.tensorflow.path,
"-D{STAGE2_}LLVM_OVERRIDE_MODEL_HEADER_INLINERSIZEMODEL=%s"
% cipd_dir.joinpath("model", "InlinerSizeModel.h"),
"-D{STAGE2_}LLVM_OVERRIDE_MODEL_OBJECT_INLINERSIZEMODEL=%s"
% cipd_dir.joinpath("model", "InlinerSizeModel.o"),
"-D{STAGE2_}LLVM_RAEVICT_MODEL_PATH=none",
]
)
if build_lldb:
if api.platform.is_mac:
if should_build_cpython:
python3_rpath = "@loader_path/../python3"
python3_libraries = (
f"{cpython_install_dir}/lib/lib{PYTHON_MAJOR_MINOR}.dylib"
)
options.append(
# This is default, but LLDB CMake looks for it in the
# system toolchain. We can't add a certificate to the
# system toolchain without GUI access, but the toolchain
# created earlier is on the search path, so setting this
# explicitly works despite the warning.
"-D{BOOTSTRAP_}LLDB_CODESIGN_IDENTITY=lldb_codesign",
)
elif should_build_cpython or should_fetch_cpython:
python3_rpath = "$ORIGIN/../python3/lib"
if api.platform.is_win:
python3_libraries = f"{cpython_install_dir}/libs/{PYTHON_MAJOR_MINOR.replace('.','')}.lib"
else:
python3_libraries = (
f"{cpython_install_dir}/lib/lib{PYTHON_MAJOR_MINOR}.so.1.0"
)
options.extend(
[
"-DFUCHSIA_ENABLE_LLDB=ON",
f"-DLLDB_ENABLE_PYTHON={'ON' if (should_build_cpython or should_fetch_cpython) else 'OFF'}",
]
)
if should_build_ncurses:
options.extend(
[
"-D{BOOTSTRAP_}LLDB_ENABLE_CURSES=ON",
# Libedit requires ncurses for terminfo, but the wide
# character header check in LLDB does not include any
# library dependencies.
"-D{BOOTSTRAP_}LLDB_EDITLINE_USE_WCHAR=ON",
"-D{BOOTSTRAP_}CURSES_INCLUDE_DIRS="
+ f"{ncurses_install_dir}/include;{ncurses_install_dir}/include/ncurses",
"-D{BOOTSTRAP_}CURSES_LIBRARIES="
+ f"{ncurses_install_dir}/lib/libncurses.a",
"-D{BOOTSTRAP_}PANEL_LIBRARIES="
+ f"{ncurses_install_dir}/lib/libpanel.a",
"-D{BOOTSTRAP_}LLVM_ENABLE_TERMINFO=FORCE_ON",
"-D{BOOTSTRAP_}Terminfo_LIBRARIES="
+ f"{ncurses_install_dir}/lib/libncurses.a",
"-D{BOOTSTRAP_}LLVM_ENABLE_LIBEDIT=FORCE_ON",
"-D{BOOTSTRAP_}LLDB_ENABLE_LIBEDIT=ON",
"-D{BOOTSTRAP_}LibEdit_INCLUDE_DIRS="
+ f"{libedit_install_dir}/include",
"-D{BOOTSTRAP_}LibEdit_LIBRARIES="
+ f"{libedit_install_dir}/lib/libedit.a",
]
)
if should_build_cpython or should_fetch_cpython:
options.extend(
[
"-DLLDB_EMBED_PYTHON_HOME=ON",
# Python directories are searched for in
# $PYTHONHOME/lib/python<MAJOR>.<MINOR>, relative to
# liblldb
"-DLLDB_PYTHON_HOME=../python3",
f"-DPython3_LIBRARIES={python3_libraries}",
f"-DPython3_RPATH={python3_rpath}",
"-D{BOOTSTRAP_}LLDB_TEST_USER_ARGS=--skip-category=pexpect",
]
)
if should_build_cpython:
options.extend(
[
# This is the path to install the LLDB python packages to relative to the prefix.
# The autodetect script doesn't like the fake prefix and defaults to lib/python<x.xx>.
f"-DLLDB_PYTHON_RELATIVE_PATH=python3/lib/{PYTHON_MAJOR_MINOR}/site-packages",
f"-DPython3_EXECUTABLE={cpython_install_dir}/bin/python3",
f"-DPython3_INCLUDE_DIRS={cpython_install_dir}/include/{PYTHON_MAJOR_MINOR}",
]
)
if should_fetch_cpython:
options.extend(
[
# This is the path to install the LLDB python packages to relative to the prefix.
# The autodetect script doesn't like the fake prefix and defaults to lib/python<x.xx>.
"-DLLDB_PYTHON_RELATIVE_PATH=python3/lib/site-packages",
f"-DPython3_EXECUTABLE={cpython_install_dir}/python.exe",
f"-DPython3_INCLUDE_DIRS={cpython_install_dir}/include",
]
)
if api.platform.is_win:
# On windows, cmake cannot find swig automatically even though it is
# placed in PATH. Use SWIG_DIR and SWIG_EXECUTABLE to explicitly to
# configure it.
options.extend(
[
"-DSWIG_DIR=" + f"{cipd_dir / 'Lib'}",
"-DSWIG_EXECUTABLE=" + f"{cipd_dir / 'swig.exe'}",
]
)
# Directory name for storing clang reproducers during clang build
crash_report_dir = build_dir / "clang-crashreports"
env = {
"CLANG_MODULE_CACHE_PATH": "",
"CLANG_CRASH_DIAGNOSTICS_DIR": crash_report_dir,
}
if api.platform.is_linux:
env_prefixes = {"PATH": [cipd_dir.joinpath("binutils-gdb", "bin")]}
else:
env_prefixes = {}
def maybe_run_tests(step_name, projects):
if host_platform == target_platform:
with api.context(env=env, env_prefixes=env_prefixes), resultdb_context(
api, build_dir
):
api.ninja(
step_name,
[f"-j{int(api.goma.jobs)}", "-k0"]
+ [
("{stage2_}check-" + project).format(**arguments)
for project in projects
],
build_dir=build_dir,
)
# Upload all clang dependencies to cas.
install_dirs = api.file.glob_paths(
"find dependency install dirs", staging_dir, "*install*"
)
if build_lldb:
# This directory contains CPython, since it's installed with LLDB.
install_dirs.append(lldb_pkg_dir)
api.cas_util.upload(
staging_dir,
install_dirs,
step_name="upload dependencies",
output_property="deps_isolated",
)
with api.step.nest("clang"), api.context(env=env):
api.cmake(
step_name="configure",
src_dir=llvm_dir / "llvm",
build_dir=build_dir,
extra_args=[option.format(**arguments) for option in options]
+ [
"-C",
llvm_dir.joinpath(
"clang",
"cmake",
"caches",
"Fuchsia{_stage2}.cmake".format(**arguments),
),
],
)
api.file.read_text(
"read CMakeConfigureLog.yaml",
build_dir.joinpath("CMakeFiles", "CMakeConfigureLog.yaml"),
)
targets = ["{stage2_}toolchain-distribution".format(**arguments)]
if build_lldb:
targets.append("{stage2_}debugger-distribution".format(**arguments))
try:
# Build the full (two-stage) distribution.
api.ninja(
"build",
targets,
# This only applies to the first stage, second stage is
# invoked by CMake as a subprocess and will use Ninja's
# default.
ninja_jobs=ninja_jobs,
build_dir=build_dir,
)
except api.step.StepFailure:
upload_crash_reproducer(
api,
crash_report_dir,
props.artifact_gcs_bucket,
api.buildbucket_util.id,
)
# Extract build traces.
upload_build_traces(
api,
use_ninjatrace,
cipd_dir,
build_dir,
props.artifact_gcs_bucket,
api.buildbucket_util.id,
)
with api.context(env={"DESTDIR": pkg_dir}):
api.ninja(
"install",
[
"{stage2_}install-toolchain-distribution".format(**arguments),
],
build_dir=build_dir,
)
if build_lldb:
with api.context(env={"DESTDIR": lldb_pkg_dir}):
api.ninja(
"install",
[
"{stage2_}install-debugger-distribution".format(
**arguments
),
],
build_dir=build_dir,
)
if api.platform.is_win:
# Copy python.dll to lldb.exe directory due to Windows
# DLL search path limitations.
dllname = f"{PYTHON_MAJOR_MINOR.replace('.','')}.dll"
api.file.copy(
"copy python.dll",
cpython_install_dir / dllname,
lldb_pkg_dir.joinpath("bin", dllname),
)
# Upload toolchain to CAS before running clang tests.
with api.context(cwd=api.path.start_dir):
clang_version = read_clang_version_from_build(
api, build_dir, props.do_2stage
)
generate_clang_runtime_json(
api,
pkg_dir,
cipd_dir,
use_breakpad,
clang_version,
)
generate_license(
api,
llvm_dir,
pkg_dir,
llvm_projects + llvm_runtimes,
libxml2_dir_target,
should_build_ncurses,
should_build_cpython,
is_lldb=False,
)
cas_digest = api.cas_util.upload(pkg_dir, output_property="isolated")
if build_lldb:
with api.step.nest("lldb"):
generate_license(
api,
llvm_dir,
lldb_pkg_dir,
["clang", "llvm"],
libxml2_dir_target,
should_build_ncurses,
should_build_cpython,
is_lldb=True,
)
api.cas_util.upload(lldb_pkg_dir, output_property="lldb_isolated")
# Run the tests.
projects = ["clang", "lld", "llvm", "polly"]
# TODO(leonardchan): run host runtime tests for mac.
if (
host_triple
in [
"x86_64-unknown-linux-gnu",
"aarch64-unknown-linux-gnu",
"x86_64-pc-windows-msvc",
]
and not props.do_2stage
):
projects += ["runtimes-" + host_triple]
maybe_run_tests("test", projects)
def maybe_upload(name, pkg_dir):
if props.upload_package and clang_version:
# The published package has the same name for every platform.
api.cipd_util.upload_package(
f"fuchsia/third_party/{name}/{target_platform}",
pkg_dir,
search_tag={"git_revision": revision},
repository=props.repository,
metadata=[("version", clang_version)],
)
# Upload clang package and do triggers if the tests didn't fail.
maybe_upload("clang", pkg_dir)
# TODO(phosek): move this logic to clang_trigger.py recipe.
# Suppress triggering from the RBE-built tools for now.
if (
cas_digest
and not props.use_rbe
and not target_platform
in [
"linux-arm64",
"windows-amd64",
]
):
# Do a full integration build. This will use the just-built toolchain
# to build all of Fuchsia to check whether there are any regressions.
if props.builders and target_platform in props.builders:
api.toolchain.trigger_build(
"clang_toolchain",
props.repository,
revision,
cas_digest,
builders=props.builders[target_platform],
fuchsia_ref="refs/heads/releases/canary",
)
if props.tot_builders and target_platform in props.tot_builders:
api.toolchain.trigger_build(
"clang_toolchain",
props.repository,
revision,
cas_digest,
builders=props.tot_builders[target_platform],
fuchsia_ref="refs/heads/main",
)
# TODO(https://fxbug.dev/42074681): Enable LLDB tests on Windows once python binding is working.
if build_lldb:
with api.step.nest("lldb"):
xfail_tests = [
# http://fxbug.dev/132371
"lldb-api :: commands/expression/import-std-module/array/TestArrayFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/deque-basic/TestDequeFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/deque-dbg-info-content/TestDbgInfoContentDequeFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/queue/TestQueueFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/vector-dbg-info-content/TestDbgInfoContentVectorFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/vector-of-vectors/TestVectorOfVectorsFromStdModule.py",
# http://fxbug.dev/132374
"lldb-api :: lang/c/global_variables/TestGlobalVariables.py",
"lldb-api :: lang/cpp/char8_t/TestCxxChar8_t.py",
# http://fxbug.dev/132375
"lldb-api :: functionalities/data-formatter/data-formatter-stl/libcxx/function/TestLibCxxFunction.py",
"lldb-api :: lang/cpp/std-function-step-into-callable/TestStdFunctionStepIntoCallable.py",
# http://fxbug.dev/132379
"lldb-api :: functionalities/target-new-solib-notifications/TestModuleLoadedNotifys.py",
# http://fxbug.dev/132431
"lldb-api :: lang/cpp/stl/TestStdCXXDisassembly.py",
]
filter_regexps = []
filter_out_regexps = [
# Fail to build test application
"data-formatter-stl/(generic|libstdcpp)",
"functionalities/postmortem/FreeBSDKernel/TestFreeBSDKernelVMCore.py",
"iohandler/unicode/TestUnicode.py",
# TODO(fxbug.dev/331486430): TestConcurrentVFork potentially leave a few process without
# cleaning up, causing the test to time out. Disable it for now.
"functionalities/fork/concurrent_vfork/TestConcurrentVFork.py",
# TODO(https://issues.fuchsia.dev/337293279) Once we can
# handle psutil tests, TestDAP_console.py should be removed
# from this list
"tools/lldb-dap/console/TestDAP_console.py",
]
if api.platform.is_linux:
xfail_tests.extend(
[
# http://fxbug.dev/132374
"lldb-api :: commands/target/basic/TestTargetCommand.py",
]
)
elif api.platform.is_mac:
# Just run a smoke test. See http://fxbug.dev/319159788.
filter_regexps.append("commands/expression")
if target_platform == "linux-arm64":
xfail_tests.extend(
[
# http://fxbug.dev/322406146
"lldb-api :: commands/expression/import-std-module/basic/TestImportStdModule.py",
"lldb-api :: commands/expression/import-std-module/conflicts/TestStdModuleWithConflicts.py",
"lldb-api :: commands/expression/import-std-module/forward_list-dbg-info-content/TestDbgInfoContentForwardListFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/forward_list/TestForwardListFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/iterator/TestIteratorFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/list-dbg-info-content/TestDbgInfoContentListFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/list/TestListFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/retry-with-std-module/TestRetryWithStdModule.py",
"lldb-api :: commands/expression/import-std-module/shared_ptr-dbg-info-content/TestSharedPtrDbgInfoContentFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/shared_ptr/TestSharedPtrFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/sysroot/TestStdModuleSysroot.py",
"lldb-api :: commands/expression/import-std-module/weak_ptr-dbg-info-content/TestDbgInfoContentWeakPtrFromStdModule.py",
"lldb-api :: commands/expression/import-std-module/weak_ptr/TestWeakPtrFromStdModule.py",
]
)
else:
xfail_tests.extend(
[
# http://fxbug.dev/132425
"lldb-api :: lang/c/calling-conventions/TestCCallingConventions.py",
]
)
if should_run_lldb_test:
with api.context(
env={
"LIT_XFAIL": ";".join(xfail_tests),
"LIT_FILTER": "|".join(filter_regexps),
"LIT_FILTER_OUT": "|".join(filter_out_regexps),
}
):
# Separately test lldb and upload it if those tests succeed.
# If this step fails, it prevents uploading lldb but not clang.
maybe_run_tests("test", ["lldb"])
maybe_upload("lldb", lldb_pkg_dir)
def checkout_source(api, git, ref, src_dir, manifest):
_, revision = api.git_checkout(git, path=src_dir, revision=ref)
git_checkout = manifest.directories[str(src_dir)].git_checkout
git_checkout.repo_url = git
git_checkout.revision = revision
@contextlib.contextmanager
def resultdb_context(api, build_dir):
try:
yield
finally:
upload_to_resultdb(api, build_dir)
def upload_to_resultdb(api, build_dir):
if not api.resultdb.enabled:
return # pragma: no cover
test_data = f"{build_dir.joinpath(RESULTDB_JSON)}\n{build_dir.joinpath('test', RESULTDB_JSON)}\n"
# api.path.glob does not handle long path on
# windows, use a python3 script to mitigate
# this issue. fxbug.dev/98099
results = api.python3(
"collect results.json",
[
api.resource("find_files.py"),
"--dir",
build_dir,
"--pattern",
f"**/{RESULTDB_JSON}",
],
step_test_data=lambda: api.raw_io.test_api.stream_output_text(test_data),
stdout=api.raw_io.output_text(add_output_log=True),
)
results_paths = results.stdout.splitlines()
if not results_paths:
return # pragma: no cover
resultdb_base_variant = {
"bucket": api.buildbucket.build.builder.bucket,
"builder": api.buildbucket.build.builder.builder,
}
cmd = ["vpython3", api.resource("resultdb.py")]
cmd.extend(f"--json={p}" for p in results_paths)
api.step(
"resultdb",
api.resultdb.wrap(cmd, base_variant=resultdb_base_variant.copy(), include=True),
)
def fetch_and_extract_windows_python(api, py_version, install_dir):
list_of_installers = [
f"https://www.python.org/ftp/python/{py_version}/amd64/core.msi",
f"https://www.python.org/ftp/python/{py_version}/amd64/dev.msi",
f"https://www.python.org/ftp/python/{py_version}/amd64/doc.msi",
f"https://www.python.org/ftp/python/{py_version}/amd64/exe.msi",
f"https://www.python.org/ftp/python/{py_version}/amd64/lib.msi",
]
download_dir = api.path.start_dir / "msi"
api.file.ensure_directory("create python download dir", download_dir)
downloaded_installer = []
with api.step.nest("fetch cpython"):
for item in list_of_installers:
filename = item.split("/")[-1].strip()
file_path = download_dir / filename
api.url.get_file(item, file_path, step_name=f"Download {filename}")
downloaded_installer.append(file_path)
with api.step.nest("extract cpython"):
for item in downloaded_installer:
filename = str(item)
api.step(
f"install {filename}",
[
"msiexec.exe",
"/a",
item,
f"targetdir={install_dir}",
"/quiet",
],
)
def build_libxml2(
api,
options,
arguments,
destdir,
ninja_jobs,
cipd_dir,
manifest,
dir_suffix,
):
# libxml2 requires CMAKE_INSTALL_PREFIX to be set to a valid path so replace
# an empty prefix with the destination directory.
install_prefix = "-DCMAKE_INSTALL_PREFIX="
options = [
install_prefix + str(destdir) if option == install_prefix else option
for option in options
]
libxml2_dir = api.path.start_dir.joinpath("libxml2%s" % dir_suffix)
src_dir = libxml2_dir / "src"
checkout_source(api, LIBXML2_GIT, "refs/tags/v2.9.12", src_dir, manifest)
build_dir = libxml2_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
api.cmake.build_with_ninja(
src_dir=src_dir,
build_dir=build_dir,
build_type="Release",
extra_args=[option.format(**arguments) for option in options]
+ [
"-DBUILD_SHARED_LIBS=OFF",
"-DLIBXML2_WITH_ICONV=OFF",
"-DLIBXML2_WITH_ICU=OFF",
"-DLIBXML2_WITH_LZMA=OFF",
"-DLIBXML2_WITH_PYTHON=OFF",
"-DLIBXML2_WITH_TESTS=OFF",
# TODO(phosek): lld only links libxml2 and not zlib, so when
# zlib support in libxml2 is enabled, this fails in the case of
# static linking.
"-DLIBXML2_WITH_ZLIB=OFF",
],
ninja_jobs=ninja_jobs,
)
return destdir.joinpath("lib", "cmake", "libxml2-2.9.10"), libxml2_dir
def build_boringssl(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
base_dir = api.path.start_dir / "boringssl"
src_dir = base_dir / "src"
checkout_source(
api,
BORINGSSL_GIT,
"01d195bd03bfff54dc99c0df0858197c71d35417",
src_dir,
manifest,
)
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
api.cmake.build_with_ninja(
src_dir=src_dir,
build_dir=build_dir,
install_dir=destdir,
build_type="Release",
extra_args=[option.format(**arguments) for option in options],
ninja_jobs=ninja_jobs,
)
return destdir.joinpath("lib", "cmake", "OpenSSL")
def build_curl(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
base_dir = api.path.start_dir / "curl"
src_dir = base_dir / "src"
checkout_source(api, CURL_GIT, "refs/tags/curl-7_82_0", src_dir, manifest)
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
api.cmake.build_with_ninja(
src_dir=src_dir,
build_dir=build_dir,
install_dir=destdir,
build_type="Release",
extra_args=[option.format(**arguments) for option in options]
+ [
"-DBUILD_SHARED_LIBS=OFF",
"-DCURL_USE_OPENSSL=ON",
],
ninja_jobs=ninja_jobs,
)
return destdir.joinpath("lib", "cmake", "CURL")
def build_yasm(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
base_dir = api.path.start_dir / "yasm"
src_dir = base_dir / "src"
checkout_source(api, YASM_GIT, "refs/tags/v1.3.0", src_dir, manifest)
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
api.cmake.build_with_ninja(
src_dir=src_dir,
build_dir=build_dir,
install_dir=destdir,
build_type="Release",
extra_args=[option.format(**arguments) for option in options],
ninja_jobs=ninja_jobs,
)
def build_cpp_httplib(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
# cpp-httplib requires CMAKE_INSTALL_PREFIX to be set to a valid path so
# replace an empty prefix with the destination directory.
install_prefix = "-DCMAKE_INSTALL_PREFIX="
options = [
install_prefix + str(destdir) if option == install_prefix else option
for option in options
]
base_dir = api.path.start_dir / "cpp-httplib"
src_dir = base_dir / "src"
checkout_source(api, CPP_HTTPLIB_GIT, "refs/tags/v0.12.2", src_dir, manifest)
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
api.cmake(
step_name="configure",
src_dir=src_dir,
build_dir=build_dir,
build_type="Release",
extra_args=[option.format(**arguments) for option in options]
+ [
"-DHTTPLIB_USE_BROTLI_IF_AVAILABLE=OFF",
# TODO(fxbug.dev/125958): Enable boringssl in cpp-httplib.
"-DHTTPLIB_USE_OPENSSL_IF_AVAILABLE=OFF",
"-DHTTPLIB_REQUIRE_ZLIB=ON",
],
)
# This is a header-only library, so there's no build step.
api.ninja(
"install",
["install"],
build_dir=build_dir,
)
return destdir.joinpath("lib", "cmake", "httplib")
COMMON_CONFIGURE_OPTIONS = [
"CC={cc}",
"CXX={cxx}",
"AR={ar}",
"LD={ld}",
"RANLIB={ranlib}",
"NM={nm}",
"STRIP={strip}",
"OBJCOPY={objcopy}",
]
def build_ncurses(api, arguments, destdir, manifest):
base_dir = api.path.start_dir / "ncurses"
src_dir = base_dir / "src"
checkout_source(api, NCURSES_GIT, "refs/tags/v6.4", src_dir, manifest)
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[src_dir / "configure"]
+ [
option.format(**arguments)
for option in [
"--prefix=",
"--host={target_triple}",
"--with-terminfo-dirs=/etc/terminfo:/lib/terminfo:/usr/share/terminfo",
"--with-default-terminfo-dir=/usr/share/terminfo",
"--disable-stripping",
*COMMON_CONFIGURE_OPTIONS,
"CPPFLAGS=--sysroot={host_sysroot} --target={target_triple}",
"CFLAGS={cflags} -fPIC -O3",
"CXXFLAGS={cxxflags} -fPIC -O3",
"LDFLAGS=--sysroot={host_sysroot} --target={target_triple} {ldflags}",
]
],
)
api.step("build", ["make", f"-j{api.platform.cpu_count}"])
api.step("install", ["make", "install", f"DESTDIR={destdir}"])
def build_libedit(api, arguments, ncurses_install_dir, destdir, manifest):
base_dir = api.path.start_dir / "libedit"
src_dir = base_dir / "src"
checkout_source(api, LIBEDIT_GIT, "refs/tags/v20221030-3.1", src_dir, manifest)
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
# Git does preserve timestamps, so the configure script attempts to re-run autotools.
# See https://www.gnu.org/software/automake/manual/html_node/CVS.html
api.step(
"touch generated files",
[
"touch",
src_dir / "aclocal.m4",
src_dir / "configure",
src_dir / "config.h.in",
*api.file.glob_paths("find makefiles", src_dir, "**/Makefile.in"),
],
)
api.step(
"configure",
[src_dir / "configure"]
+ [
option.format(**arguments)
for option in [
"--prefix=",
"--host={target_triple}",
"--disable-shared",
"--with-pic",
*COMMON_CONFIGURE_OPTIONS,
# Note: --with-sysroot does not affect compilation tests,
# so using it fails on MacOS.
"CPPFLAGS=--sysroot={host_sysroot} --target={target_triple}"
+ f" -I{ncurses_install_dir.joinpath('include')} -I{ncurses_install_dir.joinpath('include', 'ncurses')}",
"CFLAGS=-O3 {cflags}",
"LDFLAGS=--sysroot={host_sysroot} --target={target_triple} {ldflags}"
+ f" -L{ncurses_install_dir / 'lib'}",
]
],
)
api.step("build", ["make", f"-j{api.platform.cpu_count}"])
api.step("install", ["make", "install", f"DESTDIR={destdir}"])
def build_libffi(api, arguments, destdir, manifest):
base_dir = api.path.start_dir / "libffi"
src_dir = base_dir / "src"
checkout_source(api, LIBFFI_GIT, "refs/tags/v3.4.4", src_dir, manifest)
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[src_dir / "configure"]
+ [
option.format(**arguments)
for option in [
"--prefix=",
"--host={target_triple}",
"--disable-docs",
"--disable-shared",
*COMMON_CONFIGURE_OPTIONS,
"CPPFLAGS=--sysroot={host_sysroot} --target={target_triple}",
"CFLAGS=-O3 -fPIC -Wno-error=implicit-function-declaration {cflags}",
"LDFLAGS=--sysroot={host_sysroot} --target={target_triple} {ldflags}",
]
],
)
api.step("build", ["make", f"-j{api.platform.cpu_count}"])
api.step("install", ["make", "install", f"DESTDIR={destdir}"])
def build_cpython(
api,
arguments,
libedit_install_dir,
libffi_install_dir,
ncurses_install_dir,
zlib_install_dir,
destdir,
manifest,
):
base_dir = api.path.start_dir / "cpython"
src_dir = base_dir / "src"
checkout_source(
api, CPYTHON_GIT, "refs/tags/v" + CPYTHON_VERSION, src_dir, manifest
)
# Rpath works differently on Darwin, and Python's Makefile already includes
# irrevocable handling of it that precludes a portable installation. Patch
# the Makefile to set up a relative dylib search.
if api.platform.is_mac:
with api.context(cwd=src_dir):
api.git.apply_patchfile(api.resource("cpython-darwin.patch"))
build_dir = base_dir / "build"
api.file.ensure_directory("make build dir", build_dir)
# These directories have their include and lib directories added formulaically.
dep_dirs = [
libedit_install_dir,
libffi_install_dir,
ncurses_install_dir,
zlib_install_dir,
]
include_args = "--sysroot={host_sysroot} " + " ".join(
[f"-I{dep_dir / 'include'}" for dep_dir in dep_dirs]
+ [f"-I{ncurses_install_dir.joinpath('include').joinpath('ncurses')}"]
)
cpython_ldflags = ""
if not api.platform.is_mac:
cpython_ldflags = "-Wl,-rpath,'$$ORIGIN'/../lib "
cpython_ldflags += "--sysroot={host_sysroot} " + " ".join(
[f"-L{dep_dir / 'lib'}" for dep_dir in dep_dirs]
)
with api.context(cwd=build_dir):
api.step(
"configure",
[src_dir / "configure"]
+ [
option.format(**arguments)
for option in [
# While this leaks the destination directory into Python,
# Python's build and install process currently requires a
# real prefix to work properly. It's possible to fake the
# prefix with "/.", but this breaks the later installation
# of pip and setuptools.
f"--prefix={destdir}",
"--enable-shared",
"--enable-optimizations",
"--with-readline=editline",
*COMMON_CONFIGURE_OPTIONS,
f"CPPFLAGS={include_args}",
# On Darwin CPPFLAGS isn't forwarded to the module build.
f"CFLAGS=-O3 {include_args}" + " {cflags}",
f"LDFLAGS={cpython_ldflags}" + " {ldflags}",
]
],
)
api.step("build", ["make", f"-j{api.platform.cpu_count}"])
api.step("install", ["make", "install"])
# Derive the string "python<MAJOR>.<MINOR>" from the version.
PYTHON_MAJOR_MINOR = f'python{".".join(CPYTHON_VERSION.split(".")[:2])}'
# Make a "lldb" keychain containing a self-signing key "lldb_codesign". Both
# LLDB's server and test binaries must be signed to allow attaching a debugger
# in the test suite without GUI interaction. Making a new keychain is somewhat
# circuitous compared to importing the key into the system or user keychain,
# but both of those require GUI interaction, while this procedure can succeed
# headlessly, and it still allows codesign to use the key.
def make_lldb_keychain(api):
# Builders are not hermetic, so delete the keychain if it already exists.
api.step(
"delete keychain",
["security", "delete-keychain", "lldb"],
ok_ret="any",
)
api.step(
"create keychain",
["security", "create-keychain", "-p", "lldb_codesign", "lldb"],
)
api.step(
"add keychain to search list",
["security", "list-keychains", "-s", "lldb"],
)
api.step(
"remove keychain default settings",
["security", "set-keychain-settings", "lldb"],
)
api.step(
"import cert",
[
"security",
"import",
api.resource("lldb_codesign.p12"),
"-P",
"lldb_codesign",
"-k",
"lldb",
"-T",
"/usr/bin/codesign",
],
)
api.step(
"set keychain ACL",
[
"security",
"set-key-partition-list",
"-S",
"apple-tool:,apple:",
"-s",
"-k",
"lldb_codesign",
"lldb",
],
)
# //zircon/public/gn/toolchain/clang.gni:clang_runtime sets the JSON schema.
#
# This function makes the prototype spec; debug and breakpad info is added by
# runtimes.py.
def make_runtimes_spec(clang_version):
# TODO(fxbug.dev/27110): Ideally this would be done by the cmake build itself.
runtimes = []
for arch, has_hwasan in [("aarch64", True), ("riscv64", True), ("x86_64", False)]:
for mode_ldflags in [[], ["-static-libstdc++"]]:
for mode_sanitizer_cflags, mode_sanitizer_multilibs, mode_runtimes in [
([], [], []),
(["-fsanitize=address"], ["asan"], ["libclang_rt.asan.so"]),
(
["-fsanitize=undefined"],
[],
["libclang_rt.ubsan_standalone.so"],
),
] + (
[
(
["-fsanitize=hwaddress"],
["hwasan"],
["libclang_rt.hwasan.so"],
),
]
if has_hwasan
else []
):
target_triple = f"{arch}-unknown-fuchsia"
target = [target_triple, f"{arch}-fuchsia"]
runtime_dir = f"clang/{clang_version}/lib/{target_triple}"
stdlib_dir = f"{target_triple}"
mode_cflags = mode_sanitizer_cflags
mode_multilib = "+".join(mode_sanitizer_multilibs)
if mode_multilib:
mode_multilib += "/"
runtime = [
{"dist": runtime_dir + "/" + soname} for soname in mode_runtimes
]
if not mode_ldflags:
stdlib = [
{
"dist": stdlib_dir + "/" + mode_multilib + soname,
"name": soname.split(".")[0],
}
for soname in [
"libc++.so.2",
"libc++abi.so.1",
"libunwind.so.1",
]
]
runtime.extend(stdlib)
runtimes.append(
{
"target": target,
"cflags": mode_cflags,
"ldflags": mode_ldflags,
"runtime": runtime,
}
)
return runtimes
def extract_trace_filename(api, build_dir, trace):
if str(build_dir) not in trace:
# trace file should be always in build_dir.
# But if it happens, just upload it anyway
# instead of throw an exception.
return api.path.basename(trace) # pragma: no cover
relative_path = str(api.path.relpath(trace, build_dir))
relative_path = relative_path.replace(api.path.sep, "_")
return relative_path
def upload_crash_reproducer(api, crashreports_dir, gcs_bucket, build_id):
with api.step.nest("upload crash reproducer"), api.context(infra_steps=True):
temp = api.path.mkdtemp("reproducers")
reproducers = api.file.glob_paths(
"find reproducers",
crashreports_dir,
"*.sh",
test_data=(crashreports_dir / "foo.sh",),
)
for reproducer in reproducers:
base = api.path.splitext(api.path.basename(reproducer))[0]
files = api.file.glob_paths(
f"find {base} files",
crashreports_dir,
base + ".*",
test_data=(
crashreports_dir / "foo.sh",
crashreports_dir / "foo.cpp",
),
)
tgz_basename = f"{base}.tar.gz"
tgz_path = temp / tgz_basename
package = api.archive.package(crashreports_dir)
for f in files:
package.with_file(f)
package.archive(f"create {tgz_basename}", tgz_path)
api.gsutil.upload_namespaced_file(
source=tgz_path,
bucket=gcs_bucket,
subpath=tgz_basename,
namespace=build_id,
)
def upload_build_traces(
api, ninjatrace_available, cipd_dir, build_dir, artifact_gcs_bucket, build_id
):
with api.step.nest("ninja log and traces"):
test_data = f"{build_dir.joinpath(NINJA_LOG)}\n{build_dir.joinpath('test', NINJA_LOG)}\n"
# api.path.glob does not handle long path on
# windows, use a python3 script to mitigate
# this issue. fxbug.dev/98099
logs = api.python3(
"collect .ninja.log",
[
api.resource("find_files.py"),
"--dir",
build_dir,
"--pattern",
f"**/{NINJA_LOG}",
],
step_test_data=lambda: api.raw_io.test_api.stream_output_text(test_data),
stdout=api.raw_io.output_text(add_output_log=True),
)
log_paths = logs.stdout.splitlines()
if not ninjatrace_available:
# ninjatrace is not yet available on Linux-arm64 and Windows
# platforms. Upload logs for offline analysis.
with api.step.nest("upload ninja log"):
for log_file in log_paths:
filename = extract_trace_filename(api, build_dir, log_file)
api.gsutil.upload_namespaced_file(
source=log_file,
bucket=artifact_gcs_bucket,
subpath=filename,
namespace=build_id,
)
return
trace_paths = []
with api.step.nest("generate ninja traces"):
for log_file in log_paths:
trace = log_file + ".json"
try:
api.step(
"ninjatrace",
[
cipd_dir.joinpath("ninjatrace", "ninjatrace"),
"-ninjalog",
log_file,
# TODO(haowei): add compdb
"-trace-json",
trace,
],
)
trace_paths.append(trace)
except api.step.StepFailure: # pragma: no cover
pass
with api.step.nest("upload traces"):
for trace in trace_paths:
filename = extract_trace_filename(api, build_dir, trace)
# Upload may fail if service account don't have access to bucket.
# Ignore the error for now since upload failures do not impact the
# toolchain build process.
try:
step = api.gsutil.upload_namespaced_file(
source=trace,
bucket=artifact_gcs_bucket,
subpath=filename,
namespace=build_id,
)
step.presentation.links["perfetto_ui"] = (
f"https://ui.perfetto.dev/#!?url={api.gsutil.unauthenticated_url(step.presentation.links[filename])}"
)
except api.step.StepFailure: # pragma: no cover
pass
def read_clang_version_from_build(api, build_dir, do_2stage):
step_result = api.file.read_text(
"Version.inc",
build_dir.joinpath(
*(
(["tools", "clang", "stage2-bins"] if do_2stage else [])
+ ["tools", "clang", "include", "clang", "Basic", "Version.inc"]
)
),
test_data='#define CLANG_VERSION_MAJOR_STRING "8"',
)
m = re.search(r'CLANG_VERSION_MAJOR_STRING "([a-zA-Z0-9]+)"', step_result)
assert m, "Cannot determine Clang version"
return m.group(1)
def generate_clang_runtime_json(
api,
pkg_dir,
cipd_dir,
use_breakpad,
clang_version,
):
api.toolchain.strip_runtimes(
"generate runtime.json",
spec=make_runtimes_spec(clang_version),
path=pkg_dir / "lib",
build_id_subpath="debug/.build-id",
readelf=cipd_dir.joinpath("bin", "llvm-readelf"),
objcopy=cipd_dir.joinpath("bin", "llvm-objcopy"),
dump_syms=(
cipd_dir.joinpath("breakpad", "dump_syms", "dump_syms")
if use_breakpad
else None
),
)
def generate_license(
api,
llvm_dir,
pkg_dir,
projects,
libxml2_dir,
built_ncurses,
built_cpython,
is_lldb,
):
license_args = ["--include"] + [
llvm_dir.joinpath(project, "LICENSE.TXT") for project in projects
]
if not is_lldb:
license_args.extend(
[
"--extract",
llvm_dir.joinpath("polly", "lib", "External", "isl", "LICENSE"),
"-",
]
)
license_args.extend(
[
"--extract",
llvm_dir.joinpath(
"llvm", "lib", "Support", "UnicodeNameToCodepointGenerated.cpp"
),
"17-63",
"--extract",
libxml2_dir.joinpath("src", "Copyright"),
"-",
"--extract",
api.zlib.checkout / "zlib.h",
"4-22",
"--extract",
api.path.start_dir.joinpath("boringssl", "src", "LICENSE"),
"-",
"--extract",
api.path.start_dir.joinpath("curl", "src", "COPYING"),
"-",
]
)
if is_lldb:
if built_ncurses:
license_args.extend(
[
"--extract",
api.path.start_dir.joinpath("ncurses", "src", "COPYING"),
"-",
"--extract",
api.path.start_dir.joinpath("libedit", "src", "COPYING"),
"-",
]
)
if built_cpython:
license_args.extend(
[
"--extract",
api.path.start_dir.joinpath("libffi", "src", "LICENSE"),
"-",
"--extract",
api.path.start_dir.joinpath("cpython", "src", "LICENSE"),
"-",
]
)
api.step(
"generate license",
cmd=["python3", api.resource("generate_license.py")] + license_args,
stdout=api.raw_io.output_text(leak_to=pkg_dir / "LICENSE"),
)
def GenTests(api):
builders = {
"linux-amd64": ["fuchsia/linux-x64-builder"],
"mac-amd64": ["fuchsia/mac-x64-builder"],
}
tot_builders = {
"linux-amd64": ["fuchsia/linux-x64-builder-tot"],
}
for os in ("linux", "mac"):
yield (
api.test(f"ci_{os}_x64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name(os)
+ api.properties(
platform=os + "-amd64",
enable_assertions=True,
enable_backtraces=True,
builders=builders,
tot_builders=tot_builders,
reverts=["b" * 40],
)
)
yield (
api.test(f"prod_{os}_x64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name(os)
+ api.properties(
platform=os + "-amd64",
builders=builders,
tot_builders=tot_builders,
do_2stage=True,
upload_package=True,
)
+ api.git.get_remote_branch_head("git ls-remote", "b" * 40)
)
yield (
api.test("windows_amd64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("win")
+ api.properties(
platform="windows-amd64",
do_2stage=True,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.test("linux_arm64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform("linux", arch="arm", bits=64)
+ api.properties(
platform="linux-arm64",
do_2stage=True,
builders=builders,
tot_builders=tot_builders,
upload_package=True,
)
)
yield (
api.test("mac_arm64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("mac")
+ api.platform.bits(64)
+ api.properties(
platform="mac-arm64",
do_2stage=True,
upload_package=False,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.test("lld")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("mac")
+ api.properties(
platform="mac-amd64",
enable_lld=True,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.test("lto")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
lto_mode=LTOMode.FULL,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.test("thinlto")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
lto_mode=LTOMode.THIN,
builders=builders,
tot_builders=tot_builders,
)
)
yield (
api.test("rbe")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
builders=builders,
tot_builders=tot_builders,
use_rbe=True,
)
)
yield (
api.test("clang_reproducer")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.properties(
platform="linux-amd64",
lto_mode=LTOMode.THIN,
builders=builders,
tot_builders=tot_builders,
)
+ api.step_data("clang.build", retcode=1)
)