| # Copyright 2018 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for building GCC toolchain.""" |
| |
| from recipe_engine.recipe_api import StepFailure |
| |
| import pipes |
| import re |
| |
| DEPS = [ |
| "fuchsia/git", |
| "fuchsia/goma", |
| "fuchsia/macos_sdk", |
| "fuchsia/nullcontext", |
| "fuchsia/status_check", |
| "fuchsia/toolchain", |
| "fuchsia/upload", |
| "recipe_engine/buildbucket", |
| "recipe_engine/cipd", |
| "recipe_engine/context", |
| "recipe_engine/file", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/platform", |
| "recipe_engine/raw_io", |
| "recipe_engine/step", |
| ] |
| |
| BINUTILS_GIT = "https://gnu.googlesource.com/binutils-gdb" |
| BINUTILS_REF = "refs/heads/master" |
| |
| GCC_GIT = "https://gnu.googlesource.com/gcc" |
| GCC_REF = "refs/heads/releases/gcc-10" |
| |
| # Bump this number whenever changing this recipe in ways that affect the |
| # package built without also changing any upstream revision pin. |
| # TODO(crbug.com/947158): Remove this when the recipes repo/rev are available. |
| RECIPE_SALT = "" |
| |
| |
| def RunSteps(api): |
| use_goma = api.platform.arch != "arm" |
| # TODO(mcgrathr): temporarily disable goma while working on some bugs |
| use_goma = False |
| if use_goma: # pragma: no cover |
| api.goma.ensure() |
| compile_jobs = api.goma.jobs |
| goma_context = api.goma.build_with_goma |
| else: |
| compile_jobs = api.platform.cpu_count |
| goma_context = api.nullcontext |
| |
| prod = api.buildbucket.builder_id.bucket == "prod" |
| |
| binutils_dir = api.path["start_dir"].join("binutils-gdb") |
| gcc_dir = api.path["start_dir"].join("gcc") |
| |
| projects = { |
| "binutils-gdb": (BINUTILS_GIT, binutils_dir, BINUTILS_REF), |
| "gcc": (GCC_GIT, gcc_dir, GCC_REF), |
| } |
| |
| gitiles_commit = api.buildbucket.build_input.gitiles_commit |
| if gitiles_commit.host and gitiles_commit.project and gitiles_commit.id: |
| projects[gitiles_commit.project] = ( |
| "https://%s/%s" % (gitiles_commit.host, gitiles_commit.project), |
| api.path["start_dir"].join(gitiles_commit.project), |
| gitiles_commit.id, |
| ) |
| |
| with api.context(infra_steps=True): |
| with api.step.nest("binutils-gdb"): |
| binutils_revision = api.git.checkout(*projects["binutils-gdb"]) |
| binutils_pkgversion = "%s %s" % ( |
| projects["binutils-gdb"][0], |
| binutils_revision, |
| ) |
| with api.step.nest("gcc"): |
| gcc_revision = api.git.checkout(*projects["gcc"]) |
| gcc_pkgversion = "%s %s" % (projects["gcc"][0], gcc_revision) |
| |
| with api.step.nest("ensure_packages"): |
| with api.context(infra_steps=True): |
| pkgs = api.cipd.EnsureFile() |
| pkgs.add_package("fuchsia/third_party/clang/${platform}", "integration") |
| pkgs.add_package("fuchsia/third_party/make/${platform}", "version:4.3") |
| pkgs.add_package("fuchsia/third_party/libtool/${platform}", "version:2.4.6") |
| pkgs.add_package( |
| "fuchsia/third_party/pkg-config/${platform}", "version:0.29.2" |
| ) |
| pkgs.add_package("fuchsia/third_party/bison/${platform}", "version:3.7") |
| pkgs.add_package("fuchsia/third_party/flex/${platform}", "version:2.6.4") |
| pkgs.add_package("fuchsia/third_party/m4/${platform}", "version:1.4.18") |
| if api.platform.name == "linux": |
| pkgs.add_package("fuchsia/sysroot/${platform}", "latest") |
| cipd_dir = api.path["start_dir"].join("cipd") |
| api.cipd.ensure(cipd_dir, pkgs) |
| |
| # This is a no-op on non-Mac. Do it just once around all the logic that |
| # needs any xcode bits. Doing it multiple times separately seems to |
| # produce anomalous results. |
| with api.macos_sdk(): |
| |
| if api.platform.name == "linux": |
| host_sysroot = cipd_dir |
| elif api.platform.name == "mac": |
| step_result = api.step( |
| "xcrun", |
| ["xcrun", "--sdk", "macosx", "--show-sdk-path"], |
| stdout=api.raw_io.output(name="sdk-path", add_output_log=True), |
| step_test_data=lambda: api.raw_io.test_api.stream_output( |
| "/some/xcode/path" |
| ), |
| ) |
| host_sysroot = step_result.stdout.strip() |
| # Log ld versions because it's been a bug issue in the past. |
| api.step("/usr/bin/ld -v", ["/usr/bin/ld", "-v"]) |
| else: # pragma: no cover |
| assert False, "what platform?" |
| |
| with api.context(cwd=gcc_dir): |
| # download GCC dependencies: GMP, ISL, MPC and MPFR libraries |
| api.step( |
| "download prerequisites", |
| [gcc_dir.join("contrib", "download_prerequisites")], |
| ) |
| |
| staging_dir = api.path["start_dir"].join("staging") |
| pkg_name = "gcc-%s" % api.platform.name.replace("mac", "darwin") |
| pkg_dir = staging_dir.join(pkg_name) |
| api.file.ensure_directory("create pkg dir", pkg_dir) |
| |
| # Some of the makefile logic splits $CC at its first word and injects |
| # a switch there. So make $CC and $CXX be single words by writing |
| # little scripts. Autoconf does some checks with CPPFLAGS but not |
| # CFLAGS and other checks with CFLAGS but not CPPFLAGS. The sysroot |
| # is necessary for all cases, so fold that into the script too so it's |
| # impossible to omit it in any $CC or $CXX invocation. |
| cc_path = staging_dir.join("host-cc") |
| cxx_path = staging_dir.join("host-cxx") |
| gomacc_path = api.goma.goma_dir.join("gomacc") if use_goma else "" |
| for script, compiler in [(cc_path, "clang"), (cxx_path, "clang++")]: |
| compiler_path = cipd_dir.join("bin", compiler) |
| if api.platform.name == "mac" and compiler == "clang++": |
| # Our host toolchain for Mac provides static libc++ but doesn't |
| # know how to link it in by itself. Things in LIBS or LDFLAGS |
| # get put onto `ar` command lines for static libraries, which |
| # doesn't go well. A link input on non-linking clang++ command |
| # lines is useless though harmless, but it generates a lot of |
| # warning noise that makes the build logs hard to read and slow |
| # to collect. So the wrapper script tries to add it only to |
| # linking command lines. |
| script_text = """#!/bin/sh |
| extra=(-nostdlib++ %s) |
| for arg; do |
| case "$arg" in |
| -[cE]) |
| extra=() |
| break |
| ;; |
| esac |
| done |
| exec %s %s -no-canonical-prefixes --sysroot=%s "$@" "${extra[@]}" |
| """ % ( |
| cipd_dir.join("lib", "libc++.a"), |
| gomacc_path, |
| compiler_path, |
| host_sysroot, |
| ) |
| else: |
| script_text = """#!/bin/sh |
| %s %s -no-canonical-prefixes --sysroot=%s "$@" |
| """ % ( |
| gomacc_path, |
| compiler_path, |
| host_sysroot, |
| ) |
| api.file.write_text( |
| "write %s script" % api.path.basename(script), script, script_text |
| ) |
| api.step( |
| "make %s executable" % api.path.basename(script), |
| ["chmod", "+x", script], |
| ) |
| |
| host_cflags = "-O3" |
| if api.platform.name != "mac": |
| # LTO works for binutils on Linux but fails on macOS. |
| host_cflags += " -flto" |
| host_compiler_args = { |
| "CC": "%s" % cc_path, |
| "CXX": "%s" % cxx_path, |
| "CFLAGS": host_cflags, |
| "CXXFLAGS": host_cflags, |
| "M4": "%s" % cipd_dir.join("bin", "m4"), |
| } |
| |
| if api.platform.name != "mac": |
| # Always link libc++ statically in case a shared library is available. |
| host_compiler_args["CXXFLAGS"] += " -static-libstdc++" |
| |
| host_compiler_args = sorted( |
| "%s=%s" % item for item in host_compiler_args.iteritems() |
| ) |
| |
| # We force LIMITS_H_TEST=true to tell the compiler to install a |
| # <limits.h> that requires a system <limits.h> it can find via |
| # #include_next. But we don't have any system headers for the |
| # just-built compiler building target code (i.e. libgcc). So mock up a |
| # little include directory that contains just an empty <limits.h> for |
| # it to find. |
| mock_include_dir = staging_dir.join("mock-include") |
| api.file.ensure_directory("create mock include dir", mock_include_dir) |
| api.file.write_text( |
| "write tmp <limits.h>", mock_include_dir.join("limits.h"), "/* tmp */" |
| ) |
| |
| with goma_context(), api.context(env_prefixes={"PATH": [cipd_dir.join("bin")]}): |
| for target, enable_targets in [ |
| ("aarch64", "arm-eabi"), |
| ("x86_64", "x86_64-pep"), |
| ]: |
| # configure arguments that are the same for binutils and gcc. |
| common_args = host_compiler_args + [ |
| "--prefix=", # we're building a relocatable package |
| "--target=%s-elf" % target, |
| # Fuchsia uses .init/.fini arrays |
| "--enable-initfini-array", |
| # Zircon uses gold for userspace build |
| "--enable-gold", |
| # Enable plugins and threading for Gold. This also happens |
| # to make it explicitly link in -lpthread and -dl, which |
| # are required by host_clang's static libc++. |
| "--enable-plugins", |
| "--enable-threads", |
| "--disable-werror", # ignore warnings reported by Clang |
| "--disable-nls", # no need for localization |
| "--with-included-gettext", # use include gettext library |
| ] |
| |
| # build binutils |
| with api.step.nest("%s binutils" % target): |
| binutils_build_dir = staging_dir.join( |
| "binutils_%s_build_dir" % target |
| ) |
| api.file.ensure_directory("create build dir", binutils_build_dir) |
| |
| with api.context(cwd=binutils_build_dir): |
| |
| def binutils_make_step(name, prefix, jobs, make_args=None): |
| # As of 2.32, gold/testsuite/Makefile.am |
| # unconditionally edits in a -B.../ switch to make |
| # the compiler use the just-built gold as the |
| # linker for test suite binaries. This is wrong |
| # when building a cross-linker. Force it to a |
| # no-op on the make command line to work around the |
| # bug. TODO(mcgrathr): Drop this when we roll to a |
| # binutils that has this fixed upstream. |
| make_args = make_args or [] |
| make_args.append("MAKEOVERRIDES=editcc=-eb") |
| return api.step( |
| name, |
| ["make", "-j%s" % jobs] |
| + make_args |
| + [ |
| "%s-%s" % (prefix, component) |
| for component in ["binutils", "gas", "ld", "gold"] |
| ], |
| ) |
| |
| try: |
| api.step( |
| "configure", |
| [ |
| binutils_dir.join("configure"), |
| "--with-pkgversion=%s" % binutils_pkgversion, |
| "--enable-deterministic-archives", # more deterministic builds |
| "--enable-targets=%s" % enable_targets, |
| ] |
| + common_args, |
| ) |
| except StepFailure as error: |
| log = api.file.read_text( |
| "config.log", binutils_build_dir.join("config.log") |
| ).splitlines() |
| step_result = api.step( |
| "binutils configure failure", cmd=None |
| ) |
| step_result.presentation.logs["config.log"] = log |
| raise error |
| try: |
| binutils_make_step("build", "all", compile_jobs) |
| except StepFailure as error: |
| logs = { |
| l[0]: api.file.read_text( |
| "/".join(l), binutils_build_dir.join(*l) |
| ).splitlines() |
| for l in [ |
| ("gas", "config.log"), |
| ("binutils", "config.log"), |
| ("ld", "config.log"), |
| ("gold", "config.log"), |
| ] |
| } |
| step_result = api.step("binutils build failure", cmd=None) |
| for name, text in logs.iteritems(): |
| step_result.presentation.logs[name] = text |
| raise error |
| try: |
| binutils_make_step( |
| "test", "check", api.platform.cpu_count, ["-k"] |
| ) |
| except StepFailure as error: |
| logs = { |
| l[0]: api.file.read_text( |
| "/".join(l), binutils_build_dir.join(*l) |
| ).splitlines() |
| for l in [ |
| ("gas", "testsuite", "gas.log"), |
| ("binutils", "binutils.log"), |
| ("ld", "ld.log"), |
| ("gold", "testsuite", "test-suite.log"), |
| ] |
| } |
| step_result = api.step("binutils test failure", cmd=None) |
| for name, text in logs.iteritems(): |
| step_result.presentation.logs[name] = text |
| raise error |
| binutils_make_step( |
| "install", "install-strip", 1, ["DESTDIR=%s" % pkg_dir] |
| ) |
| |
| # build gcc |
| with api.step.nest("%s gcc" % target): |
| gcc_build_dir = staging_dir.join("gcc_%s_build_dir" % target) |
| api.file.ensure_directory("create build dir", gcc_build_dir) |
| |
| with api.context( |
| cwd=gcc_build_dir, env_prefixes={"PATH": [pkg_dir.join("bin")]} |
| ): |
| gcc_goals = ["gcc", "target-libgcc"] |
| |
| def gcc_make_step(name, jobs, args, **kwargs): |
| cmd = [ |
| "make", |
| "-j%s" % jobs, |
| "MAKEOVERRIDES=USE_GCC_STDINT=provide LIMITS_H_TEST=true", |
| # Force flags for libgcc to get <limits.h> kludge. |
| "CPPFLAGS_FOR_TARGET=-idirafter %s" % mock_include_dir, |
| ] |
| # Recipes won't let us capture stdout and stderr as |
| # a single stream, so use shell redirection to put |
| # stderr onto stdout. |
| cmd = [ |
| "/bin/sh", |
| "-c", |
| " ".join( |
| [pipes.quote(s) for s in cmd + args] + ["2>&1"] |
| ), |
| ] |
| return api.step(name, cmd, **kwargs) |
| |
| api.step( |
| "configure", |
| [ |
| gcc_dir.join("configure"), |
| "--with-pkgversion=%s" % gcc_pkgversion, |
| "--enable-languages=c,c++", |
| # We don't need these runtime libraries. |
| "--disable-libstdcxx", |
| "--disable-libssp", |
| "--disable-libquadmath", |
| ] |
| + common_args |
| + ( |
| # The runtimes must observe the Fuchsia ABI, |
| # which reserves x18. |
| ["CFLAGS_FOR_TARGET=-g -O2 -ffixed-x18"] |
| if target == "aarch64" |
| else [] |
| ), |
| ) |
| gcc_make_step( |
| "build", |
| compile_jobs, |
| ["all-%s" % goal for goal in gcc_goals], |
| stdout=api.raw_io.output( |
| name="%s gcc build log" % target, add_output_log=True |
| ), |
| ) |
| # Skip tests on Mac because they are unreasonably slow. |
| if api.platform.name != "mac": |
| try: |
| gcc_make_step( |
| "test", api.platform.cpu_count, ["check-gcc"] |
| ) |
| finally: |
| logs = { |
| l[-1]: api.file.read_text( |
| "gcc %s %s" % (target, "/".join(l)), |
| gcc_build_dir.join(*l), |
| ).splitlines() |
| for l in [ |
| ("gcc", "testsuite", "gcc", "gcc.log"), |
| ("gcc", "testsuite", "g++", "g++.log"), |
| ] |
| } |
| step_result = api.step("test logs", cmd=None) |
| for name, text in logs.iteritems(): |
| step_result.presentation.logs[name] = text |
| gcc_make_step( |
| "install", |
| 1, |
| ["DESTDIR=%s" % pkg_dir] |
| + ["install-strip-%s" % goal for goal in gcc_goals], |
| ) |
| |
| binutils_version = api.file.read_text( |
| "binutils version", |
| binutils_dir.join("bfd", "version.m4"), |
| test_data="m4_define([BFD_VERSION], [2.27.0])", |
| ) |
| m = re.match(r"m4_define\(\[BFD_VERSION\], \[([^]]+)\]\)", binutils_version) |
| assert m and m.group(1), ( |
| "bfd/version.m4 has unexpected format: %r" % binutils_version |
| ) |
| binutils_version = m.group(1) |
| gcc_version = api.file.read_text( |
| "gcc version", gcc_dir.join("gcc", "BASE-VER") |
| ).rstrip() |
| version = ",".join([gcc_version, binutils_version]) |
| |
| # Copy the license file to the canonical name at the root of the package. |
| api.file.copy( |
| "copy license file", gcc_dir.join("COPYING3"), pkg_dir.join("LICENSE") |
| ) |
| |
| cipd_git_repository = ",".join([GCC_GIT, BINUTILS_GIT]) |
| cipd_git_revision = ",".join([gcc_revision, binutils_revision]) |
| |
| # Add a bogus "salt" repository/revision to represent recipe changes when |
| # the upstream revisions haven't changed. Bump this number whenever |
| # changing this recipe in ways that affect the package built without also |
| # changing any upstream revision pin. |
| # TODO(crbug.com/947158): Replace this with the recipes repo/rev when |
| # infra makes that available here. |
| if RECIPE_SALT: # pragma: no cover |
| cipd_git_repository += ",<salt>" |
| cipd_git_revision += ",%s" % RECIPE_SALT |
| |
| isolated = api.upload.upload_isolated(pkg_dir) |
| |
| # The prod builders actually publish to CIPD. The CI builders may run |
| # quicker and they are the ones that trigger the test builds using the |
| # toolchain just built. |
| if prod: |
| api.upload.cipd_package( |
| "fuchsia/third_party/gcc/${platform}", |
| pkg_dir, |
| [api.upload.DirectoryPath(pkg_dir)], |
| {"git_revision": cipd_git_revision}, |
| repository=cipd_git_repository, |
| extra_tags={"version": version}, |
| ) |
| elif api.platform.name == "linux" and api.platform.arch == "intel": |
| # Do a full integration build. This will use the just-built toolchain |
| # to build all of Fuchsia to check whether there are any regressions. |
| api.toolchain.trigger_build( |
| "gcc_toolchain", |
| GCC_GIT, |
| gcc_revision, |
| isolated, |
| git_repository_info=cipd_git_repository, |
| git_revision_info=cipd_git_revision, |
| builders=[ |
| "gcc_toolchain.%s" % bot |
| for bot in [ |
| "bringup.arm64-gcc", |
| "bringup.x64-gcc", |
| "zbi_tests-arm64-gcc", |
| "zbi_tests-x64-gcc", |
| ] |
| ], |
| ) |
| |
| |
| def GenTests(api): |
| binutils_revision = "3d861fdb826c2f5cf270dd5f585d0e6057e1bf4f" |
| gcc_revision = "4b5e15daff8b54440e3fda451c318ad31e532fab" |
| cipd_revision = ",".join([gcc_revision, binutils_revision]) |
| if RECIPE_SALT: # pragma: no cover |
| cipd_revision += ",%s" % RECIPE_SALT |
| |
| for bucket in ("ci", "prod"): |
| for platform, arch in (("linux", "intel"), ("linux", "arm"), ("mac", "intel")): |
| |
| def test(name, git_repo=GCC_GIT, revision=gcc_revision, fail=False): |
| # The point of this helper is to capture the loop variables. |
| # pylint: disable=cell-var-from-loop |
| result = ( |
| api.status_check.test( |
| "%s_%s" % (bucket, name), |
| status="failure" if fail else "success", |
| ) |
| + api.buildbucket.ci_build( |
| project="fuchsia", |
| bucket=bucket, |
| git_repo=git_repo, |
| revision=revision, |
| ) |
| + api.platform.name(platform) |
| + api.platform.arch(arch) |
| + api.platform.bits(64) |
| ) |
| # pylint: disable=cell-var-from-loop |
| if ( |
| bucket == "ci" |
| and not fail |
| and platform == "linux" |
| and arch == "intel" |
| ): |
| result += api.git.get_remote_branch_head("git ls-remote", "b" * 40) |
| return result |
| |
| yield ( |
| test("%s_%s" % (platform, arch)) |
| + api.step_data( |
| "binutils-gdb.git rev-parse", |
| api.raw_io.stream_output(binutils_revision), |
| ) |
| + api.step_data( |
| "gcc.git rev-parse", api.raw_io.stream_output(gcc_revision) |
| ) |
| ) |
| for salt in ["", 999] + ([RECIPE_SALT] if RECIPE_SALT else []): |
| salt = "%s" % salt |
| if bucket == "prod": |
| yield ( |
| test("%s_%s_new%s" % (platform, arch, salt)) |
| + api.step_data( |
| "binutils-gdb.git rev-parse", |
| api.raw_io.stream_output(binutils_revision), |
| ) |
| + api.step_data( |
| "gcc.git rev-parse", api.raw_io.stream_output(gcc_revision) |
| ) |
| + api.step_data( |
| "cipd.cipd search fuchsia/third_party/gcc/${platform} git_revision:" |
| + cipd_revision, |
| api.json.output({"result": []}), |
| ) |
| + api.step_data("gcc version", api.file.read_text("7.1.2\n")) |
| ) |
| yield ( |
| test( |
| "%s_%s_binutils_configure_fail%s" % (platform, arch, salt), |
| git_repo=BINUTILS_GIT, |
| revision=binutils_revision, |
| fail=True, |
| ) |
| + api.step_data( |
| "gcc.git rev-parse", api.raw_io.stream_output(gcc_revision) |
| ) |
| + api.step_data("x86_64 binutils.configure", retcode=1) |
| ) |
| yield ( |
| test( |
| "%s_%s_binutils_build_fail%s" % (platform, arch, salt), |
| git_repo=BINUTILS_GIT, |
| revision=binutils_revision, |
| fail=True, |
| ) |
| + api.step_data( |
| "gcc.git rev-parse", api.raw_io.stream_output(gcc_revision) |
| ) |
| + api.step_data("x86_64 binutils.build", retcode=1) |
| ) |
| yield ( |
| test( |
| "%s_%s_binutils_test_fail%s" % (platform, arch, salt), |
| git_repo=BINUTILS_GIT, |
| revision=binutils_revision, |
| fail=True, |
| ) |
| + api.step_data( |
| "gcc.git rev-parse", api.raw_io.stream_output(gcc_revision) |
| ) |
| + api.step_data("x86_64 binutils.test", retcode=1) |
| ) |
| if platform != "mac": |
| yield ( |
| test( |
| "%s_%s_gcc_test_fail%s" % (platform, arch, salt), fail=True |
| ) |
| + api.step_data( |
| "binutils-gdb.git rev-parse", |
| api.raw_io.stream_output(binutils_revision), |
| ) |
| + api.step_data("aarch64 gcc.test", retcode=1) |
| ) |