| # Copyright 2021 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for building binutils-gdb package.""" |
| |
| import contextlib |
| import re |
| |
| DEPS = [ |
| "fuchsia/buildbucket_util", |
| "fuchsia/cas_util", |
| "fuchsia/cipd_util", |
| "fuchsia/git", |
| "fuchsia/git_checkout", |
| "fuchsia/goma", |
| "fuchsia/macos_sdk", |
| "recipe_engine/buildbucket", |
| "recipe_engine/cipd", |
| "recipe_engine/context", |
| "recipe_engine/file", |
| "recipe_engine/path", |
| "recipe_engine/platform", |
| "recipe_engine/raw_io", |
| "recipe_engine/step", |
| ] |
| |
| BINUTILS_PROJECT = "binutils-gdb" |
| BINUTILS_GIT = "https://gnu.googlesource.com/" + BINUTILS_PROJECT |
| BINUTILS_REF = "HEAD" |
| |
| # Bump this number whenever changing this recipe in ways that affect the |
| # package built without also changing any upstream revision pin. |
| # TODO(crbug.com/947158): Remove this when the recipes repo/rev are available. |
| RECIPE_SALT = "" |
| |
| |
| def RunSteps(api): |
| use_goma = api.platform.arch != "arm" |
| # TODO(mcgrathr): temporarily disable goma while working on some bugs |
| use_goma = False |
| if use_goma: # pragma: no cover |
| compile_jobs = api.goma.jobs |
| goma_context = api.goma() |
| else: |
| compile_jobs = api.platform.cpu_count |
| goma_context = contextlib.nullcontext() |
| |
| run_tests = api.platform.name == "linux" # Mac doesn't have dejagnu. |
| # TODO(mcgrathr): gold and gdb test suites not happy. Maybe care later. |
| run_tests = False |
| |
| prod = api.buildbucket.build.builder.bucket == "prod" |
| |
| binutils_dir, binutils_revision = api.git_checkout( |
| BINUTILS_GIT, fallback_ref=BINUTILS_REF |
| ) |
| with api.context(cwd=binutils_dir, infra_steps=True): |
| git_url = api.git.get_remote_url( |
| "origin", |
| step_test_data=lambda: api.raw_io.test_api.stream_output_text(BINUTILS_GIT), |
| ) |
| binutils_pkgversion = f"{git_url} {binutils_revision}" |
| |
| with api.step.nest("ensure_packages"), api.context(infra_steps=True): |
| pkgs = api.cipd.EnsureFile() |
| pkgs.add_package("fuchsia/third_party/clang/${platform}", "integration") |
| pkgs.add_package("fuchsia/third_party/make/${platform}", "version:4.3") |
| pkgs.add_package("fuchsia/third_party/libtool/${platform}", "version:2.4.6") |
| pkgs.add_package("fuchsia/third_party/pkg-config/${platform}", "version:0.29.2") |
| pkgs.add_package("fuchsia/third_party/bison/${platform}", "version:3.7") |
| pkgs.add_package("fuchsia/third_party/flex/${platform}", "version:2.6.4") |
| pkgs.add_package("fuchsia/third_party/m4/${platform}", "version:1.4.18") |
| pkgs.add_package("fuchsia/third_party/texinfo/${platform}", "version:6.8.0") |
| for pkg, version in [ |
| ("gmp", "6.2.1"), |
| ("mpfr", "4.1.0"), |
| ("expat", "2.4.1"), |
| ("babeltrace2", "2.0.3"), |
| ("ncurses", "6.2"), |
| ]: |
| pkgs.add_package( |
| "fuchsia/third_party/source/" + pkg, |
| "version:" + version, |
| "source/" + pkg, |
| ) |
| if api.platform.name == "linux": |
| pkgs.add_package("fuchsia/third_party/sysroot/linux", "latest") |
| cipd_dir = api.path.start_dir / "cipd" |
| api.cipd.ensure(cipd_dir, pkgs) |
| |
| # This is a no-op on non-Mac. Do it just once around all the logic that |
| # needs any xcode bits. Doing it multiple times separately seems to |
| # produce anomalous results. |
| with api.macos_sdk(): |
| if api.platform.name == "linux": |
| host_sysroot = cipd_dir |
| elif api.platform.name == "mac": |
| host_sysroot = api.macos_sdk.sysroot |
| # Log ld versions because it's been a bug issue in the past. |
| api.step("/usr/bin/ld -v", ["/usr/bin/ld", "-v"]) |
| else: # pragma: no cover |
| assert False, "what platform?" |
| |
| staging_dir = api.path.start_dir / "staging" |
| pkg_name = f"binutils-gdb-{api.platform.name.replace('mac', 'darwin')}" |
| pkg_dir = staging_dir / pkg_name |
| api.file.ensure_directory("create pkg dir", pkg_dir) |
| |
| # Some of the makefile logic splits $CC at its first word and injects |
| # a switch there. So make $CC and $CXX be single words by writing |
| # little scripts. Autoconf does some checks with CPPFLAGS but not |
| # CFLAGS and other checks with CFLAGS but not CPPFLAGS. The sysroot |
| # is necessary for all cases, so fold that into the script too so it's |
| # impossible to omit it in any $CC or $CXX invocation. |
| cc_path = staging_dir / "host-cc" |
| cxx_path = staging_dir / "host-cxx" |
| gomacc_path = api.goma.goma_dir / "gomacc" if use_goma else "" |
| for script, compiler, trailing_switches in [ |
| (cc_path, "clang", []), |
| (cxx_path, "clang++", ["-Wno-enum-constexpr-conversion"]), |
| ]: |
| compiler_path = cipd_dir.joinpath("bin", compiler) |
| if api.platform.name == "mac" and compiler == "clang++": |
| # Our host toolchain for Mac provides static libc++ but doesn't |
| # know how to link it in by itself. Things in LIBS or LDFLAGS |
| # get put onto `ar` command lines for static libraries, which |
| # doesn't go well. A link input on non-linking clang++ command |
| # lines is useless though harmless, but it generates a lot of |
| # warning noise that makes the build logs hard to read and slow |
| # to collect. So the wrapper script tries to add it only to |
| # linking command lines. |
| script_text = """#!/bin/sh |
| extra=(-nostdlib++ %s) |
| for arg; do |
| case "$arg" in |
| -[cE]) |
| extra=() |
| break |
| ;; |
| esac |
| done |
| extra+=(%s) |
| exec %s %s -no-canonical-prefixes --sysroot=%s "$@" "${extra[@]}" |
| """ % ( |
| cipd_dir.joinpath("lib", "libc++.a"), |
| " ".join(trailing_switches), |
| gomacc_path, |
| compiler_path, |
| host_sysroot, |
| ) |
| else: |
| script_text = f"""#!/bin/sh |
| exec {gomacc_path} {compiler_path} -no-canonical-prefixes --sysroot={host_sysroot} "$@\" |
| """ |
| api.file.write_text( |
| f"write {api.path.basename(script)} script", script, script_text |
| ) |
| api.step( |
| f"make {api.path.basename(script)} executable", |
| ["chmod", "+x", script], |
| ) |
| |
| lib_install_dir = api.path.start_dir / "libs" |
| host_cflags = "-O3" |
| # pylint: disable=condition-evals-to-constant |
| if api.platform.name != "mac": # pragma: no cover |
| # LTO works for binutils on Linux but fails on macOS. |
| host_cflags += " -flto" |
| # Suppress some warnings that are fatal in Clang but harmless enough in |
| # the GDB code. |
| host_cxxflags = host_cflags + " -Wno-enum-constexpr-conversion -Wno-switch" |
| host_compiler_args = { |
| "CC": f"{cc_path}", |
| "CXX": f"{cxx_path}", |
| "CFLAGS": host_cflags, |
| "CXXFLAGS": host_cxxflags, |
| "CPPFLAGS": f"-I{lib_install_dir.joinpath('include')} -I{lib_install_dir.joinpath('include', 'ncursesw')}", |
| "AR": cipd_dir.joinpath("bin", "llvm-ar"), |
| "RANLIB": cipd_dir.joinpath("bin", "llvm-ranlib"), |
| "LDFLAGS": f"-L{lib_install_dir / 'lib'}", |
| "M4": f"{cipd_dir.joinpath('bin', 'm4')}", |
| } |
| host_compiler_env = {"CPPFLAGS": host_compiler_args["CPPFLAGS"]} |
| |
| if api.platform.name.startswith("linux"): |
| host_compiler_args.update( |
| { |
| "AR": cipd_dir.joinpath("bin", "llvm-ar"), |
| "RANLIB": cipd_dir.joinpath("bin", "llvm-ranlib"), |
| "NM": cipd_dir.joinpath("bin", "llvm-nm"), |
| "STRIP": cipd_dir.joinpath("bin", "llvm-strip"), |
| "OBJCOPY": cipd_dir.joinpath("bin", "llvm-objcopy"), |
| } |
| ) |
| |
| python_wrapper_path = cipd_dir.joinpath("bin", "python-config-wrapper") |
| api.file.write_text( |
| "write python-config-wrapper script", |
| python_wrapper_path, |
| """#!/bin/sh |
| shift |
| exec %s --embed "$@" |
| """ |
| % cipd_dir.joinpath( |
| "usr", |
| "bin", |
| "%s-linux-gnu-python3.8-config" |
| % {"arm": "aarch64", "intel": "x86_64"}[api.platform.arch], |
| ), |
| ) |
| api.step( |
| "make python-config-wrapper executable", |
| ["chmod", "+x", python_wrapper_path], |
| ) |
| with_python = python_wrapper_path |
| elif api.platform.name == "mac": |
| # TODO(fxbug.dev/99129): Disabled for now. If we can figure out |
| # how to make the build work with macOS system Python, we should |
| # reenable it. |
| with_python = "no" |
| else: # pragma: no cover |
| with_python = "yes" |
| |
| if api.platform.name != "mac": |
| # Always link libc++ statically in case a shared library is available. |
| host_compiler_args["CXXFLAGS"] += " -static-libstdc++" |
| |
| host_compiler_args = sorted( |
| "%s=%s" % item for item in host_compiler_args.items() |
| ) |
| |
| with goma_context, api.context(env_prefixes={"PATH": [cipd_dir / "bin"]}): |
| lib_install_dir = api.path.start_dir / "libs" |
| api.file.ensure_directory("create libs dir", lib_install_dir) |
| |
| def build_lib(name, configure_args=[], libtool=True): |
| with api.step.nest(name): |
| src_dir = cipd_dir.joinpath("source", name) |
| build_dir = api.path.start_dir / name |
| api.file.ensure_directory("create build dir", build_dir) |
| try: |
| with api.context(cwd=build_dir): |
| api.step( |
| "configure", |
| [ |
| src_dir / "configure", |
| "--prefix=", |
| "--disable-silent-rules", |
| "--disable-dependency-tracking", |
| "--enable-static", |
| "--disable-shared", |
| ] |
| + host_compiler_args |
| + configure_args, |
| ) |
| api.step("build", ["make", f"-j{int(compile_jobs)}", "V=1"]) |
| api.step( |
| "install", |
| ["make", "install", f"DESTDIR={lib_install_dir}"], |
| ) |
| if libtool: |
| api.file.remove( |
| "remove .la file", |
| lib_install_dir.joinpath("lib", f"lib{name}.la"), |
| ) |
| finally: |
| # Whatever happened, publish some configure-generated |
| # files to aid debugging, if they got written at all. |
| log_files = ["config.log"] |
| if libtool: |
| log_files.append("libtool") |
| for log_file in log_files: |
| step_result = api.step.empty(f"{log_file} file") |
| try: |
| step_result.presentation.logs[log_file] = ( |
| api.file.read_text( |
| log_file, build_dir / log_file |
| ).splitlines() |
| ) |
| except: # pragma: no cover |
| pass |
| |
| build_lib("gmp") |
| build_lib("mpfr", [f"--with-gmp={lib_install_dir}"]) |
| build_lib("expat") |
| # TODO(mcgrathr): build_lib("babeltrace2") |
| build_lib( |
| "ncurses", |
| [ |
| "--enable-pc-files", |
| "--enable-sigwinch", |
| "--enable-widec", |
| "--without-gpm", |
| "--without-progs", |
| "--without-cxx-binding", |
| "--disable-db-install", |
| "--with-terminfo-dirs=/usr/share/terminfo:/usr/lib/terminfo", |
| "--with-default-terminfo-dir=/usr/share/terminfo", |
| ], |
| libtool=False, |
| ) |
| |
| binutils_build_dir = staging_dir / "build_dir" |
| api.file.ensure_directory("create build dir", binutils_build_dir) |
| with api.context(cwd=binutils_build_dir): |
| try: |
| api.step( |
| "configure", |
| [ |
| binutils_dir / "configure", |
| f"--with-pkgversion={binutils_pkgversion}", |
| "--disable-silent-rules", |
| # We're building a relocatable package. |
| "--prefix=", |
| "--enable-static", |
| # Support everything we can. |
| "--enable-targets=all", |
| "--enable-gold", |
| # Within reason. |
| "--disable-sim", |
| # Not ready for prime time. |
| "--disable-gprofng", |
| # Explicitly enable things so the build fails if |
| # autodetection doesn't find needed libraries. |
| "--enable-tui", |
| f"--with-gmp={lib_install_dir}", |
| f"--with-mpfr={lib_install_dir}", |
| "--with-expat", |
| f"--with-libexpat-prefix={lib_install_dir}", |
| "--with-libexpat-type=static", |
| f"--with-python={with_python}", |
| # TODO(mcgrathr): "--with-babeltrace", |
| f"--with-libbabeltrace-prefix={lib_install_dir}", |
| "--with-libbabeltrace-type=static", |
| # Good defaults for the tools. |
| "--enable-deterministic-archives", |
| "--enable-textrel-check=error", |
| # Enable plugins and threading for Gold. This also |
| # happens to make it explicitly link in -lpthread and |
| # -dl, which are required by host_clang's static |
| # libc++. |
| "--enable-plugins", |
| "--enable-threads", |
| # Ignore Clang warnings. |
| "--disable-werror", |
| # No need for localization, so minimize deps. |
| "--disable-nls", |
| "--with-included-gettext", |
| # Some weird issues probably Clang-related. |
| "--disable-unit-tests", |
| ] |
| + host_compiler_args, |
| ) |
| except api.step.StepFailure as error: |
| log = api.file.read_text( |
| "config.log", binutils_build_dir / "config.log" |
| ).splitlines() |
| api.step.empty("binutils configure failure").presentation.logs[ |
| "config.log" |
| ] = log |
| raise error |
| |
| def binutils_make_step(name, jobs, *make_args): |
| cmd = ["make", f"-j{jobs}", "V=1"] + list(make_args) |
| # As of 2.32, gold/testsuite/Makefile.am unconditionally edits |
| # in a -B.../ switch to make the compiler use the just-built |
| # gold as the linker for test suite binaries. This is wrong |
| # when building a cross-linker. Force it to a no-op on the |
| # make command line to work around the bug. TODO(mcgrathr): |
| # Drop this when we roll to a binutils that has this fixed |
| # upstream. |
| cmd.append("MAKEOVERRIDES=editcc=-eb") |
| return api.step(name, cmd) |
| |
| try: |
| with api.context(env=host_compiler_env): |
| binutils_make_step("build", compile_jobs, "all") |
| finally: |
| logs = {} |
| for subdir in [ |
| "gas", |
| "gnulib", |
| "binutils", |
| "ld", |
| "gold", |
| "gdb", |
| ]: |
| try: |
| log_file = f"{subdir}/config.log" |
| logs[log_file] = api.file.read_text( |
| log_file, |
| binutils_build_dir.joinpath(subdir, "config.log"), |
| ).splitlines() |
| except: # pragma: no cover |
| pass |
| step_result = api.step.empty("binutils config.log files") |
| for name, text in sorted(logs.items()): |
| step_result.presentation.logs[name] = text |
| |
| if run_tests: # pragma: no cover |
| try: |
| binutils_make_step( |
| "test", api.platform.cpu_count, "-k", "check" |
| ) |
| except api.step.StepFailure as error: |
| logs = {} |
| for l in [ |
| ("gas", "testsuite", "gas.log"), |
| ("binutils", "binutils.log"), |
| ("ld", "ld.log"), |
| ("gold", "testsuite", "test-suite.log"), |
| ("gdb", "testsuite", "gdb.log"), |
| ]: |
| try: |
| logs[l[0]] = api.file.read_text( |
| "/".join(l), binutils_build_dir.joinpath(*l) |
| ).splitlines() |
| except: |
| pass |
| step_result = api.step.empty("binutils test failure") |
| for name, text in sorted(logs.items()): |
| step_result.presentation.logs[name] = text |
| raise error |
| |
| binutils_make_step("install", 1, "install-strip", f"DESTDIR={pkg_dir}") |
| |
| binutils_version = api.file.read_text( |
| "binutils version", |
| binutils_dir.joinpath("bfd", "version.m4"), |
| test_data="m4_define([BFD_VERSION], [2.27.0])", |
| ) |
| m = re.match(r"m4_define\(\[BFD_VERSION\], \[([^]]+)\]\)", binutils_version) |
| assert m and m.group( |
| 1 |
| ), f"bfd/version.m4 has unexpected format: {binutils_version!r}" |
| binutils_version = m.group(1) |
| |
| # Copy the license file to the canonical name at the root of the package. |
| api.file.copy( |
| "copy license file", |
| binutils_dir.joinpath("COPYING3"), |
| pkg_dir.joinpath("LICENSE"), |
| ) |
| |
| cipd_git_repository = BINUTILS_GIT |
| cipd_git_revision = binutils_revision |
| |
| # Add a bogus "salt" repository/revision to represent recipe changes when |
| # the upstream revisions haven't changed. Bump this number whenever |
| # changing this recipe in ways that affect the package built without also |
| # changing any upstream revision pin. |
| # TODO(crbug.com/947158): Replace this with the recipes repo/rev when |
| # infra makes that available here. |
| if RECIPE_SALT: # pragma: no cover |
| cipd_git_repository += ",<salt>" |
| cipd_git_revision += f",{RECIPE_SALT}" |
| |
| api.cas_util.upload(pkg_dir, output_property="isolated") |
| |
| # The prod builders actually publish to CIPD. |
| if prod: |
| api.cipd_util.upload_package( |
| "fuchsia/third_party/binutils-gdb/${platform}", |
| pkg_dir, |
| search_tag={"git_revision": cipd_git_revision}, |
| repository=cipd_git_repository, |
| metadata=[("version", binutils_version)], |
| ) |
| |
| |
| def GenTests(api): |
| binutils_revision = "3d861fdb826c2f5cf270dd5f585d0e6057e1bf4f" |
| |
| def test( |
| name, |
| platform="linux", |
| arch="intel", |
| bucket="ci", |
| git_repo=BINUTILS_GIT, |
| revision=binutils_revision, |
| fail=False, |
| ): |
| return ( |
| api.buildbucket_util.test( |
| name, |
| bucket=bucket, |
| git_repo=git_repo, |
| revision=revision, |
| status="FAILURE" if fail else "SUCCESS", |
| ) |
| + api.platform.name(platform) |
| + api.platform.arch(arch) |
| + api.platform.bits(64) |
| ) |
| |
| for platform, arch in (("linux", "intel"), ("linux", "arm"), ("mac", "intel")): |
| yield ( |
| test(f"{platform}_{arch}", platform, arch) |
| + api.step_data( |
| "checkout.git rev-parse", |
| api.raw_io.stream_output_text(binutils_revision), |
| ) |
| ) |
| yield ( |
| test("prod", bucket="prod") |
| + api.step_data( |
| "checkout.git rev-parse", |
| api.raw_io.stream_output_text(binutils_revision), |
| ) |
| ) |
| yield ( |
| test( |
| "configure_fail", |
| git_repo=BINUTILS_GIT, |
| revision=binutils_revision, |
| fail=True, |
| ) |
| + api.step_data("configure", retcode=1) |
| ) |
| yield ( |
| test( |
| "build_fail", |
| git_repo=BINUTILS_GIT, |
| revision=binutils_revision, |
| fail=True, |
| ) |
| + api.step_data("build", retcode=1) |
| ) |