| # Copyright 2018 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for building GCC toolchain.""" |
| |
| from contextlib import contextmanager |
| from recipe_engine.recipe_api import StepFailure |
| |
| import pipes |
| import re |
| |
| DEPS = [ |
| 'fuchsia/git', |
| 'fuchsia/gitiles', |
| 'fuchsia/goma', |
| 'fuchsia/hash', |
| 'fuchsia/jiri', |
| 'fuchsia/macos_sdk', |
| 'fuchsia/upload', |
| 'recipe_engine/buildbucket', |
| 'recipe_engine/cipd', |
| 'recipe_engine/context', |
| 'recipe_engine/file', |
| 'recipe_engine/json', |
| 'recipe_engine/path', |
| 'recipe_engine/platform', |
| 'recipe_engine/properties', |
| 'recipe_engine/raw_io', |
| 'recipe_engine/runtime', |
| 'recipe_engine/scheduler', |
| 'recipe_engine/step', |
| 'recipe_engine/url', |
| ] |
| |
| BINUTILS_GIT = 'https://gnu.googlesource.com/binutils-gdb' |
| BINUTILS_REF = 'refs/heads/binutils-2_33-branch' |
| |
| GCC_GIT = 'https://gnu.googlesource.com/gcc' |
| GCC_REF = 'refs/heads/gcc-9-branch' |
| |
| # Bump this number whenever changing this recipe in ways that affect the |
| # package built without also changing any upstream revision pin. |
| # TODO(crbug.com/947158): Remove this when the recipes repo/rev are available. |
| RECIPE_SALT = '' |
| |
| |
| def RunSteps(api): |
| use_goma = api.platform.arch != 'arm' |
| if use_goma: |
| api.goma.ensure() |
| compile_jobs = api.goma.jobs |
| goma_context = lambda: api.goma.build_with_goma() |
| else: |
| compile_jobs = api.platform.cpu_count |
| |
| @contextmanager |
| def null_context(): |
| yield |
| |
| goma_context = null_context |
| |
| prod = api.buildbucket.builder_id.bucket == 'prod' |
| |
| binutils_dir = api.path['start_dir'].join('binutils-gdb') |
| gcc_dir = api.path['start_dir'].join('gcc') |
| |
| projects = { |
| 'binutils-gdb': (BINUTILS_GIT, binutils_dir, BINUTILS_REF), |
| 'gcc': (GCC_GIT, gcc_dir, GCC_REF), |
| } |
| |
| gitiles_commit = api.buildbucket.build_input.gitiles_commit |
| if gitiles_commit.host and gitiles_commit.project and gitiles_commit.id: |
| projects[gitiles_commit.project] = ( |
| 'https://%s/%s' % (gitiles_commit.host, gitiles_commit.project), |
| api.path['start_dir'].join(gitiles_commit.project), gitiles_commit.id) |
| |
| with api.context(infra_steps=True): |
| with api.step.nest('binutils-gdb'): |
| binutils_revision = api.git.checkout(*projects['binutils-gdb']) |
| binutils_pkgversion = '%s %s' % (projects['binutils-gdb'][0], |
| binutils_revision) |
| with api.step.nest('gcc'): |
| gcc_revision = api.git.checkout(*projects['gcc']) |
| gcc_pkgversion = '%s %s' % (projects['gcc'][0], gcc_revision) |
| |
| with api.step.nest('ensure_packages'): |
| with api.context(infra_steps=True): |
| pkgs = api.cipd.EnsureFile() |
| pkgs.add_package( |
| 'fuchsia/third_party/clang/${platform}', 'goma' if use_goma else |
| 'git_revision:ea93d7d6421612e9ea51b321eaf97fbdd64fe39b') |
| if api.platform.name == 'linux': |
| pkgs.add_package('fuchsia/sysroot/${platform}', 'latest') |
| cipd_dir = api.path['start_dir'].join('cipd') |
| api.cipd.ensure(cipd_dir, pkgs) |
| |
| if api.platform.name == 'linux': |
| host_sysroot = cipd_dir |
| elif api.platform.name == 'mac': |
| # TODO(IN-148): Eventually use our own hermetic sysroot as for Linux. |
| with api.macos_sdk(): |
| step_result = api.step( |
| 'xcrun', ['xcrun', '--show-sdk-path'], |
| stdout=api.raw_io.output(name='sdk-path', add_output_log=True), |
| step_test_data=lambda: api.raw_io.test_api.stream_output( |
| '/some/xcode/path')) |
| host_sysroot = step_result.stdout.strip() |
| else: # pragma: no cover |
| assert False, 'what platform?' |
| |
| with api.context(cwd=gcc_dir): |
| # download GCC dependencies: GMP, ISL, MPC and MPFR libraries |
| api.step('download prerequisites', |
| [gcc_dir.join('contrib', 'download_prerequisites')]) |
| |
| staging_dir = api.path.mkdtemp('gcc') |
| pkg_name = 'gcc-%s' % api.platform.name.replace('mac', 'darwin') |
| pkg_dir = staging_dir.join(pkg_name) |
| api.file.ensure_directory('create pkg dir', pkg_dir) |
| |
| # Some of the makefile logic splits $CC at its first word and injects |
| # a switch there. So make $CC and $CXX be single words by writing |
| # little scripts. Autoconf does some checks with CPPFLAGS but not |
| # CFLAGS and other checks with CFLAGS but not CPPFLAGS. The sysroot |
| # is necessary for all cases, so fold that into the script too so it's |
| # impossible to omit it in any $CC or $CXX invocation. |
| cc_path = staging_dir.join('host-cc') |
| cxx_path = staging_dir.join('host-cxx') |
| gomacc_path = api.goma.goma_dir.join('gomacc') if use_goma else '' |
| for script, compiler in [(cc_path, 'clang'), (cxx_path, 'clang++')]: |
| compiler_path = cipd_dir.join('bin', compiler) |
| if api.platform.name == 'mac' and compiler == 'clang++': |
| # Our host toolchain for Mac provides static libc++ but doesn't know how |
| # to link it in by itself. Things in LIBS or LDFLAGS get put onto `ar` |
| # command lines for static libraries, which doesn't go well. A link |
| # input on non-linking clang++ command lines is useless though harmless, |
| # but it generates a lot of warning noise that makes the build logs hard |
| # to read and slow to collect. So the wrapper script tries to add it |
| # only to linking command lines. |
| script_text = """#!/bin/sh |
| extra=(-nostdlib++ %s) |
| for arg; do |
| case "$arg" in |
| -[cE]) |
| extra=() |
| break |
| ;; |
| esac |
| done |
| exec %s %s --sysroot=%s "$@" "${extra[@]}" |
| """ % (cipd_dir.join('lib', |
| 'libc++.a'), gomacc_path, compiler_path, host_sysroot) |
| else: |
| script_text = """#!/bin/sh |
| exec %s %s --sysroot=%s "$@" |
| """ % (gomacc_path, compiler_path, host_sysroot) |
| api.file.write_text('write %s script' % api.path.basename(script), script, |
| script_text) |
| api.step('make %s executable' % api.path.basename(script), |
| ['chmod', '+x', script]) |
| |
| host_cflags = '-O3' |
| if api.platform.name != 'mac': |
| # LTO works for binutils on Linux but fails on macOS. |
| host_cflags += ' -flto' |
| host_compiler_args = { |
| 'CC': '%s' % cc_path, |
| 'CXX': '%s' % cxx_path, |
| 'CFLAGS': host_cflags, |
| 'CXXFLAGS': host_cflags, |
| } |
| |
| if api.platform.name != 'mac': |
| # Always link libc++ statically in case a shared library is available. |
| host_compiler_args['CXXFLAGS'] += ' -static-libstdc++' |
| |
| host_compiler_args = sorted( |
| '%s=%s' % item for item in host_compiler_args.iteritems()) |
| |
| # We force LIMITS_H_TEST=true to tell the compiler to install a <limits.h> |
| # that requires a system <limits.h> it can find via #include_next. But we |
| # don't have any system headers for the just-built compiler building target |
| # code (i.e. libgcc). So mock up a little include directory that contains |
| # just an empty <limits.h> for it to find. |
| mock_include_dir = staging_dir.join('mock-include') |
| api.file.ensure_directory('create mock include dir', mock_include_dir) |
| api.file.write_text('write dummy <limits.h>', |
| mock_include_dir.join('limits.h'), '/* dummy */') |
| |
| with api.macos_sdk(), goma_context(): |
| for target, enable_targets in [('aarch64', 'arm-eabi'), |
| ('x86_64', 'x86_64-pep')]: |
| # configure arguments that are the same for binutils and gcc. |
| common_args = host_compiler_args + [ |
| '--prefix=', # we're building a relocatable package |
| '--target=%s-elf' % target, |
| '--enable-initfini-array', # Fuchsia uses .init/.fini arrays |
| '--enable-gold', # Zircon uses gold for userspace build |
| # Enable plugins and threading for Gold. |
| # This also happens to make it explicitly link in -lpthread and -dl, |
| # which are required by host_clang's static libc++. |
| '--enable-plugins', |
| '--enable-threads', |
| '--disable-werror', # ignore warnings reported by Clang |
| '--disable-nls', # no need for localization |
| '--with-included-gettext', # use include gettext library |
| ] |
| |
| # build binutils |
| with api.step.nest('%s binutils' % target): |
| binutils_build_dir = staging_dir.join('binutils_%s_build_dir' % target) |
| api.file.ensure_directory('create build dir', binutils_build_dir) |
| |
| with api.context(cwd=binutils_build_dir): |
| |
| def binutils_make_step(name, prefix, jobs, make_args=None): |
| # As of 2.32, gold/testsuite/Makefile.am unconditionally edits in |
| # a -B.../ switch to make the compiler use the just-built gold as |
| # the linker for test suite binaries. This is wrong when |
| # building a cross-linker. Force it to a no-op on the make |
| # command line to work around the bug. |
| # TODO(mcgrathr): Drop this when we roll to a binutils that has |
| # this fixed upstream. |
| make_args = make_args or [] |
| make_args.append('MAKEOVERRIDES=editcc=-eb') |
| return api.step(name, ['make', '-j%s' % jobs] + make_args + [ |
| '%s-%s' % (prefix, component) |
| for component in ['binutils', 'gas', 'ld', 'gold'] |
| ]) |
| |
| api.step( |
| 'configure', |
| [ |
| binutils_dir.join('configure'), |
| '--with-pkgversion=%s' % binutils_pkgversion, |
| '--enable-deterministic-archives', # more deterministic builds |
| '--enable-targets=%s' % enable_targets, |
| ] + common_args) |
| binutils_make_step('build', 'all', compile_jobs) |
| try: |
| binutils_make_step('test', 'check', api.platform.cpu_count, ['-k']) |
| except StepFailure as error: |
| logs = { |
| l[0]: |
| api.file.read_text('/'.join(l), |
| binutils_build_dir.join(*l)).splitlines() |
| for l in [ |
| ('gas', 'testsuite', 'gas.log'), |
| ('binutils', 'binutils.log'), |
| ('ld', 'ld.log'), |
| ('gold', 'testsuite', 'test-suite.log'), |
| ] |
| } |
| step_result = api.step('binutils test failure', cmd=None) |
| for name, text in logs.iteritems(): |
| step_result.presentation.logs[name] = text |
| raise error |
| binutils_make_step('install', 'install-strip', 1, |
| ['DESTDIR=%s' % pkg_dir]) |
| |
| # build gcc |
| with api.step.nest('%s gcc' % target): |
| gcc_build_dir = staging_dir.join('gcc_%s_build_dir' % target) |
| api.file.ensure_directory('create build dir', gcc_build_dir) |
| |
| with api.context( |
| cwd=gcc_build_dir, env_prefixes={'PATH': [pkg_dir.join('bin')]}): |
| gcc_goals = ['gcc', 'target-libgcc'] |
| |
| def gcc_make_step(name, jobs, args, **kwargs): |
| cmd = [ |
| 'make', |
| '-j%s' % jobs, |
| 'MAKEOVERRIDES=USE_GCC_STDINT=provide LIMITS_H_TEST=true', |
| # Force flags for libgcc to get <limits.h> kludge. |
| 'CPPFLAGS_FOR_TARGET=-idirafter %s' % mock_include_dir, |
| ] |
| # Recipes won't let us capture stdout and stderr as a single stream, |
| # so use shell redirection to put stderr onto stdout. |
| cmd = [ |
| '/bin/sh', '-c', |
| ' '.join([pipes.quote(s) for s in cmd + args] + ['2>&1']) |
| ] |
| return api.step(name, cmd, **kwargs) |
| |
| api.step( |
| 'configure', |
| [ |
| gcc_dir.join('configure'), |
| '--with-pkgversion=%s' % gcc_pkgversion, |
| '--enable-languages=c,c++', |
| '--disable-libstdcxx', # we don't need libstdc++ |
| '--disable-libssp', # we don't need libssp either |
| '--disable-libquadmath', # and nor do we need libquadmath |
| ] + common_args + ( |
| # The runtimes must observe the Fuchsia ABI, which reserves x18. |
| ['CFLAGS_FOR_TARGET=-g -O2 -ffixed-x18'] |
| if target == 'aarch64' else [])) |
| gcc_make_step( |
| 'build', |
| compile_jobs, ['all-%s' % goal for goal in gcc_goals], |
| stdout=api.raw_io.output( |
| name='%s gcc build log' % target, add_output_log=True)) |
| try: |
| gcc_make_step('test', api.platform.cpu_count, ['check-gcc']) |
| finally: |
| logs = { |
| l[-1]: api.file.read_text('gcc %s %s' % (target, '/'.join(l)), |
| gcc_build_dir.join(*l)).splitlines() |
| for l in [ |
| ('gcc', 'testsuite', 'gcc', 'gcc.log'), |
| ('gcc', 'testsuite', 'g++', 'g++.log'), |
| ] |
| } |
| step_result = api.step('test logs', cmd=None) |
| for name, text in logs.iteritems(): |
| step_result.presentation.logs[name] = text |
| gcc_make_step('install', 1, ['DESTDIR=%s' % pkg_dir] + |
| ['install-strip-%s' % goal for goal in gcc_goals]) |
| |
| binutils_version = api.file.read_text( |
| 'binutils version', |
| binutils_dir.join('bfd', 'version.m4'), |
| test_data='m4_define([BFD_VERSION], [2.27.0])') |
| m = re.match(r'm4_define\(\[BFD_VERSION\], \[([^]]+)\]\)', binutils_version) |
| assert m and m.group(1), ('bfd/version.m4 has unexpected format: %r' % |
| binutils_version) |
| binutils_version = m.group(1) |
| gcc_version = api.file.read_text('gcc version', |
| gcc_dir.join('gcc', 'BASE-VER')).rstrip() |
| version = ','.join([gcc_version, binutils_version]) |
| |
| cipd_git_repository = ','.join([GCC_GIT, BINUTILS_GIT]) |
| cipd_git_revision = ','.join([gcc_revision, binutils_revision]) |
| |
| # Add a bogus "salt" repository/revision to represent recipe changes when |
| # the upstream revisions haven't changed. Bump this number whenever |
| # changing this recipe in ways that affect the package built without also |
| # changing any upstream revision pin. |
| # TODO(crbug.com/947158): Replace this with the recipes repo/rev when |
| # infra makes that available here. |
| if RECIPE_SALT: # pragma: no cover |
| cipd_git_repository += ',<salt>' |
| cipd_git_revision += ',%s' % RECIPE_SALT |
| |
| isolated = api.upload.upload_isolated(pkg_dir) |
| |
| # The prod builders actually publish to CIPD. The CI builders may run |
| # quicker and they are the ones that trigger the test builds using the |
| # toolchain just built. |
| if prod: |
| api.upload.cipd_package( |
| 'fuchsia/third_party/gcc/${platform}', |
| pkg_dir, [api.upload.DirectoryPath(pkg_dir)], |
| {'git_revision': cipd_git_revision}, |
| repository=cipd_git_repository, |
| extra_tags={'version': version}) |
| elif api.platform.name == 'linux' and api.platform.arch == 'intel': |
| |
| host = 'fuchsia.googlesource.com' |
| project = 'fuchsia' |
| refs = api.gitiles.refs('https://%s/%s' % (host, project)) |
| ref = 'refs/heads/master' |
| revision = refs.get(ref, 'HEAD') |
| |
| # Do a full integration build. This will use the just-built toolchain |
| # to build all of Fuchsia to check whether there are any regressions. |
| # |
| # TODO(43620): This is modelled on the clang_toolchain.py code. |
| # Ideally this logic would be shared somewhere in a recipe module, |
| # perhaps just api.build. Ideally the list of bots to test for each |
| # toolchain would be shared with/generated from fuchsia.star directly. |
| api.scheduler.emit_trigger( |
| api.scheduler.BuildbucketTrigger( |
| properties={ |
| 'gcc_toolchain': { |
| 'git_repository': cipd_git_repository, |
| 'git_revision': cipd_git_revision, |
| }, |
| 'build.gcc_toolchain': { |
| 'type': 'isolated', |
| 'instance': isolated, |
| }, |
| }, |
| tags={ |
| 'buildset': |
| 'commit/gitiles/%s/%s/+/%s' % (host, project, revision), |
| 'gitiles_ref': |
| ref, |
| }), |
| project=project, |
| jobs=[ |
| 'gcc.%s' % bot for bot in [ |
| 'bringup.arm64-gcc-qemu_kvm', |
| 'bringup.x64-gcc-qemu_kvm', |
| 'zbi_tests-arm64-gcc', |
| 'zbi_tests-x64-gcc', |
| ] |
| ]) |
| |
| |
| def GenTests(api): |
| binutils_revision = '3d861fdb826c2f5cf270dd5f585d0e6057e1bf4f' |
| gcc_revision = '4b5e15daff8b54440e3fda451c318ad31e532fab' |
| cipd_revision = ','.join([gcc_revision, binutils_revision]) |
| if RECIPE_SALT: # pragma: no cover |
| cipd_revision += ',%s' % RECIPE_SALT |
| |
| for bucket in ('ci', 'prod'): |
| for platform, arch in (('linux', 'intel'), ('linux', 'arm'), ('mac', |
| 'intel')): |
| |
| def Test(name, git_repo=GCC_GIT, revision=gcc_revision, fail=False): |
| # The point of this helper is to capture the loop variables. |
| # pylint: disable=cell-var-from-loop |
| result = ( |
| api.test('%s_%s' % (bucket, name)) + api.buildbucket.ci_build( |
| project='fuchsia', |
| bucket=bucket, |
| git_repo=git_repo, |
| revision=revision, |
| ) + api.runtime(is_luci=True, is_experimental=False) + |
| api.platform.name(platform) + api.platform.arch(arch) + |
| api.platform.bits(64)) |
| # pylint: disable=cell-var-from-loop |
| if bucket == 'ci' and not fail and platform == 'linux' and arch == 'intel': |
| result += api.gitiles.refs('refs', ('refs/heads/master', 'b' * 40)) |
| return result |
| |
| yield (Test('%s_%s' % (platform, arch)) + |
| api.step_data('binutils-gdb.git rev-parse', |
| api.raw_io.stream_output(binutils_revision)) + |
| api.step_data('gcc.git rev-parse', |
| api.raw_io.stream_output(gcc_revision))) |
| for salt in ['', 999] + ([RECIPE_SALT] if RECIPE_SALT else []): |
| salt = '%s' % salt |
| if bucket == 'prod': |
| yield ( |
| Test('%s_%s_new%s' % (platform, arch, salt)) + |
| api.step_data('binutils-gdb.git rev-parse', |
| api.raw_io.stream_output(binutils_revision)) + |
| api.step_data('gcc.git rev-parse', |
| api.raw_io.stream_output(gcc_revision)) + |
| api.step_data( |
| 'cipd.cipd search fuchsia/third_party/gcc/${platform} git_revision:' |
| + cipd_revision, api.json.output({'result': []})) + |
| api.step_data('gcc version', api.file.read_text('7.1.2\n'))) |
| yield (Test( |
| '%s_%s_binutils_test_fail%s' % (platform, arch, salt), |
| git_repo=BINUTILS_GIT, |
| revision=binutils_revision, |
| fail=True) + api.step_data('gcc.git rev-parse', |
| api.raw_io.stream_output(gcc_revision)) + |
| api.step_data('x86_64 binutils.test', retcode=1)) |
| yield ( |
| Test('%s_%s_gcc_test_fail%s' % (platform, arch, salt), fail=True) + |
| api.step_data('binutils-gdb.git rev-parse', |
| api.raw_io.stream_output(binutils_revision)) + |
| api.step_data('aarch64 gcc.test', retcode=1)) |