blob: d2058836186051fadc8637dd5e58a8a817f500eb [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia SDKs."""
from contextlib import contextmanager
from recipe_engine.config import Enum
from recipe_engine.recipe_api import Property
import collections
DEPS = [
'infra/bazel',
'infra/fuchsia',
'infra/go',
'infra/gsutil',
'infra/hash',
'infra/jiri',
'infra/tar',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
]
BUILD_TYPE = 'release'
PROPERTIES = {
'project':
Property(kind=str, help='Jiri remote manifest project'),
'manifest':
Property(kind=str, help='Jiri manifest to use'),
'remote':
Property(kind=str, help='Remote manifest repository'),
}
def RunSteps(api, project, manifest, remote):
api.go.ensure_go()
api.gsutil.ensure_gsutil()
build = api.buildbucket.build
api.fuchsia.checkout(
build=build,
manifest=manifest,
remote=remote,
project=project)
revision = build.input.gitiles_commit.id
global_integration = 'global' in build.builder.bucket
# Build fuchsia for each target.
builds = {}
for target in ('arm64', 'x64'):
with api.step.nest('build ' + target):
sdk_build_package = 'topaz/packages/sdk/topaz'
builds[target] = api.fuchsia.build(
target=target,
build_type=BUILD_TYPE,
packages=[sdk_build_package],
gn_args=['build_sdk_archives=true'])
# Merge the SDK archives for each target into a single archive.
# Note that "alpha" and "beta" below have no particular meaning.
merge_path = api.path['start_dir'].join('scripts', 'sdk', 'merger',
'merge.py')
full_archive_path = api.path['cleanup'].join('merged_sdk_archive.tar.gz')
api.python('merge sdk archives',
merge_path,
args=[
'--alpha-archive',
builds['x64'].fuchsia_build_dir.join('sdk', 'archive', 'topaz.tar.gz'),
'--beta-archive',
builds['arm64'].fuchsia_build_dir.join('sdk', 'archive',
'topaz.tar.gz'),
'--output-archive',
full_archive_path,
])
# Generate a Bazel workspace along with its tests.
# These tests are being run for every SDK type.
scripts_path = api.path['start_dir'].join('scripts', 'sdk', 'bazel')
sdk_dir = api.path['cleanup'].join('sdk-bazel')
test_workspace_dir = api.path['cleanup'].join('tests')
api.python('create bazel sdk',
scripts_path.join('generate.py'),
args=[
'--archive',
full_archive_path,
'--output',
sdk_dir,
'--tests',
test_workspace_dir,
],
)
with api.step.nest('test sdk'):
bazel_path = api.bazel.ensure_bazel()
api.python('run tests',
test_workspace_dir.join('run.py'),
args=[
'--bazel',
bazel_path,
],
)
# TODO(nsylvain): Remove restriction on uploading only for global
# integration CI once Bazel SDK consumers can differentiate between local
# and global uploads.
if revision and global_integration:
gcs_archive_path = api.path.join('sdk', 'core',
'%s-amd64' % api.platform.name)
cipd_pkg_name = 'fuchsia/sdk/core/${platform}'
upload_raw_sdk(
api,
sdk_name='raw-core-sdk',
gcs_archive_path=gcs_archive_path,
cipd_pkg_name=cipd_pkg_name,
archive_path=full_archive_path,
remote=remote,
revision=revision,
upload_digest=True,
)
with api.step.nest('upload bazel sdk'):
# Upload the SDK to CIPD and GCS.
UploadPackage(api, 'bazel', sdk_dir, remote, revision)
def upload_raw_sdk(api, sdk_name, gcs_archive_path, cipd_pkg_name, archive_path,
remote, revision, upload_digest):
sdk_dir = api.path['cleanup'].join(sdk_name)
# Extract the archive to a directory for CIPD processing.
with api.step.nest('extract ' + sdk_name):
api.file.ensure_directory('create sdk dir', sdk_dir)
api.tar.ensure_tar()
api.tar.extract(
step_name='unpack sdk archive',
path=archive_path,
directory=sdk_dir,
)
if revision:
with api.step.nest('upload ' + sdk_name):
# Upload the raw SDK to GCS and CIPD. Only upload digest when
# upload_digest is true.
UploadArchive(
api,
gcs_archive_path=gcs_archive_path,
cipd_pkg_name=cipd_pkg_name,
sdk=archive_path,
out_dir=sdk_dir,
remote=remote,
revision=revision,
upload_digest=upload_digest,
)
# Given an SDK |sdk_name| with artifacts found in |staging_dir|, upload a
# corresponding .cipd file to CIPD and GCS.
def UploadPackage(api, sdk_name, staging_dir, remote, revision):
cipd_pkg_name = 'fuchsia/sdk/%s/${platform}' % sdk_name
pins = api.cipd.search(cipd_pkg_name, 'git_revision:' + revision)
if len(pins) > 0:
api.step('Package is up-to-date', cmd=None)
return
pkg_def = api.cipd.PackageDefinition(
package_name=cipd_pkg_name,
package_root=staging_dir,
install_mode='copy')
pkg_def.add_dir(staging_dir)
pkg_def.add_version_file('.versions/sdk.cipd_version')
cipd_pkg_file = api.path['cleanup'].join('sdk.cipd')
api.cipd.build_from_pkg(
pkg_def=pkg_def,
output_package=cipd_pkg_file,
)
pin = api.cipd.register(
package_name=cipd_pkg_name,
package_path=cipd_pkg_file,
refs=['latest'],
tags={
'git_revision': revision,
}
)
gcs_bucket_path = 'sdk/%s/%s-amd64' % (sdk_name, api.platform.name)
api.gsutil.upload(
'fuchsia',
cipd_pkg_file,
api.gsutil.join(gcs_bucket_path, pin.instance_id),
unauthenticated_url=True
)
def UploadArchive(api, gcs_archive_path, cipd_pkg_name, sdk, out_dir, remote,
revision, upload_digest):
digest = api.hash.sha1('hash archive', sdk)
archive_bucket = 'fuchsia'
api.gsutil.upload(
bucket=archive_bucket,
src=sdk,
dst='%s/%s' % (gcs_archive_path, digest),
link_name='archive',
name='upload fuchsia-sdk %s' % digest,
unauthenticated_url=True)
# Note that this will upload the snapshot to a location different from the
# path that api.fuchsia copied it to. This uses a path based on the hash of
# the SDK artifact, not based on the hash of the snapshot itself. Clients can
# use this to find the snapshot used to build a specific SDK artifact.
snapshot_file = api.path['cleanup'].join('jiri.snapshot')
api.jiri.snapshot(snapshot_file)
api.gsutil.upload(
bucket='fuchsia-snapshots',
src=snapshot_file,
dst=digest,
link_name='jiri.snapshot',
name='upload jiri.snapshot',
unauthenticated_url=False)
if upload_digest:
# Record the digest of the most recently uploaded archive for downstream autorollers.
digest_path = api.path['cleanup'].join('digest')
api.file.write_text('write digest', digest_path, digest)
api.gsutil.upload(
bucket=archive_bucket,
src=digest_path,
dst='%s/LATEST_ARCHIVE' % gcs_archive_path,
link_name='LATEST_ARCHIVE',
name='upload latest digest',
unauthenticated_url=False)
# Upload the SDK to CIPD as well.
pins = api.cipd.search(cipd_pkg_name, 'git_revision:' + revision)
if len(pins) > 0:
api.step('Package is up-to-date', cmd=None)
return
pkg_def = api.cipd.PackageDefinition(
package_name=cipd_pkg_name,
package_root=out_dir,
install_mode='copy')
pkg_def.add_dir(out_dir)
api.cipd.create_from_pkg(
pkg_def=pkg_def,
refs=['latest'],
tags={
'git_revision': revision,
'jiri_snapshot': digest,
}
)
# yapf: disable
def GenTests(api):
revision = api.jiri.example_revision
topaz_properties = api.properties(
project='integration',
repo='topaz',
manifest='fuchsia/topaz/topaz',
remote='https://fuchsia.googlesource.com/integration')
topaz_local_ci = topaz_properties + api.buildbucket.ci_build(
git_repo="https://fuchsia.googlesource.com/topaz",
revision=revision,
)
topaz_global_ci = topaz_properties + api.buildbucket.ci_build(
git_repo="https://fuchsia.googlesource.com/topaz",
revision=revision,
bucket="###global-integration-bucket###"
)
yield (api.test('topaz_local_ci') + topaz_local_ci)
yield (api.test('topaz_global_ci') + topaz_global_ci)
yield (api.test('topaz_global_ci_new_upload') +
topaz_global_ci +
api.step_data('upload raw-core-sdk.cipd search fuchsia/sdk/core/${platform} ' +
'git_revision:%s' % revision,
api.json.output({'result': []})) +
api.step_data('upload bazel sdk.cipd search fuchsia/sdk/bazel/${platform} ' +
'git_revision:%s' % revision,
api.json.output({'result': []}))
)
yield (api.test('cq') +
api.buildbucket.try_build(
git_repo="https://fuchsia.googlesource.com/topaz",
) +
topaz_properties
)
# yapf: enable