blob: 29cc71fdab5af21fb9bc7b5b450f92cad1a14609 [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia SDKs."""
from recipe_engine.config import List
from recipe_engine.recipe_api import Property
DEPS = [
'fuchsia/build',
'fuchsia/buildbucket_util',
'fuchsia/checkout',
'fuchsia/fuchsia',
'fuchsia/git',
'fuchsia/gsutil',
'fuchsia/hash',
'fuchsia/jiri',
'fuchsia/macos_sdk',
'fuchsia/release',
'fuchsia/tar',
'fuchsia/sso',
'fuchsia/upload',
'recipe_engine/buildbucket',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/isolated',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/swarming',
'recipe_engine/time',
]
# This represents the location of the isolated hash in the output of the
# fuchsia/build.py. Needs to be kept in sync with fuchsia/build.py.
ISOLATED_OUTPUT_KEY = 'isolated_output_hash'
TARGETS = ['arm64', 'x64']
PROPERTIES = {
'manifest':
Property(kind=str, help='Jiri manifest to use'),
'remote':
Property(kind=str, help='Remote manifest repository'),
'gcs_bucket':
Property(
kind=str,
help='GCS bucket for uploading the sdk.',
default='fuchsia'),
'sdk_name':
Property(
kind=str,
help='The name of the sdk to build.',
default='core',
),
'cipd_root':
Property(
kind=str,
help='The CIPD package root for upload.',
default='fuchsia'),
'companion_images':
Property(
kind=List(dict),
help='''
The Fuchsia images built with a version of the SDK.
name: Name of the image when referred to in the SDK.
builder: Name of the builder generating the image.
bucket: GCS bucket to install images.
''',
default=()),
'is_release_version':
Property(
kind=bool,
help='If input is a release version, publish the SDK and its companion images.',
default=False),
'ref_settings':
Property(
kind=List(dict),
help='''
Branch and ref settings for the SDK.
branch: Update refs and stamps if input is release version reachable on this branch.
update_ref: Update this ref in CIPD, and stamp in GCS.
''',
default=()),
'subbuild_collect_timeout_secs':
Property(
kind=int, help='Timeout for collecting subbuilds.', default=3600),
}
def fetch_sdk_artifacts(api, checkout_root, builds):
with api.step.nest('fetch sdk artifacts'):
isolated_output_path = checkout_root.join('archives')
for build in builds.values():
with api.step.nest(build.builder.builder):
isolated_output = build.output.properties[ISOLATED_OUTPUT_KEY].strip(
'"')
output_dir = isolated_output_path.join(build.builder.builder)
api.file.ensure_directory('makedirs', dest=output_dir)
api.isolated.download(
step_name='download %s archive' % build.builder.builder,
isolated_hash=isolated_output,
output_dir=output_dir)
def relocate_artifacts(api, builds, companion_images, sdk_id):
def relocate_artifact(api, src_bucket, dst_bucket, src_path, dst_path):
api.gsutil.copy(
src_bucket=src_bucket,
src=src_path,
dst_bucket=dst_bucket,
dst=dst_path,
link_name='gsutil.copy',
unauthenticated_url=False)
with api.step.nest('relocate sdk image(s)'):
for build in builds.values():
image_name, image_bucket = next(
(image['name'], image['bucket'])
for image in companion_images
if image['builder'] == build.builder.builder)
with api.step.nest(image_name):
build_gcs_bucket = build.output.properties['gcs_bucket'].strip('"')
# build-archive.tgz
relocate_artifact(
api=api,
src_bucket=build_gcs_bucket,
dst_bucket=image_bucket,
src_path='builds/%s/build-archive.tgz' % build.id,
dst_path='development/%s/images/%s.tgz' % (sdk_id, image_name),
)
# packages.tar.gz
relocate_artifact(
api=api,
src_bucket=build_gcs_bucket,
dst_bucket=image_bucket,
src_path='builds/%s/packages.tar.gz' % build.id,
dst_path='development/%s/packages/%s.tar.gz' % (sdk_id, image_name),
)
def resolve_ref_to_update(api, ref_settings, remote, integration_repo,
release_version):
"""
Resolve which ref to update. Ref settings order is respected i.e. the
first branch which contains the input release version will be used. If the
release version is not reachable on any of the given branches, return None.
"""
for settings in ref_settings:
branch_head = api.git.get_remote_branch_head(
api.sso.sso_to_https(remote), settings['branch'])
if release_version in api.release.get_release_versions(
ref=branch_head, repo_path=integration_repo):
return settings['update_ref']
def RunSteps(api, manifest, remote, gcs_bucket, sdk_name, cipd_root,
is_release_version, companion_images, ref_settings,
subbuild_collect_timeout_secs):
build = api.buildbucket.build
revision = build.input.gitiles_commit.id
sdk_id = str(build.id)
checkout_root = api.path['start_dir'].join('fuchsia')
checkout = api.checkout.fuchsia_with_options(
path=checkout_root, build=build, manifest=manifest, remote=remote)
integration_repo = checkout_root.join('integration')
# Resolve the incoming release version and update refs.
update_refs = []
if is_release_version:
release_version = api.release.ref_to_release_version(
ref=revision, repo_path=integration_repo)
# Override SDK ID with release version.
sdk_id = str(release_version).replace('releases/', '')
update_ref = resolve_ref_to_update(
api=api,
ref_settings=ref_settings,
remote=remote,
integration_repo=integration_repo,
release_version=release_version)
if update_ref:
update_refs.append(update_ref)
subbuild_properties = {'parent_id': sdk_id}
subbuilders = [
'sdk-%s-%s-%s-build_only' % (sdk_name, api.platform.name, target)
for target in TARGETS
]
sdk_schedule_reqs = [
api.buildbucket.schedule_request(
builder=subbuilder,
properties=subbuild_properties,
# Leave unset to avoid overriding priority from configs.
priority=None,
# TODO(olivernewman): Swarming currently breaks if many builds are
# launched simultaneously with the same swarming_parent_run_id. Set
# the swarming_parent_run_id param here after that bug is fixed.
) for subbuilder in subbuilders
]
sdk_builds = api.buildbucket.schedule(
schedule_build_requests=sdk_schedule_reqs,
step_name='schedule sdk builds')
image_builds = []
image_properties = {'sdk_id': sdk_id}
if companion_images:
image_schedule_reqs = [
api.buildbucket.schedule_request(
builder=image_builder,
properties=image_properties,
# Leave unset to avoid overriding priority from configs.
priority=None,
# TODO(olivernewman): Swarming currently breaks if many builds
# are launched simultaneously with the same
# swarming_parent_run_id. Set the swarming_parent_run_id param
# here after that bug is fixed.
) for image_builder in [i['builder'] for i in companion_images]
]
image_builds = api.buildbucket.schedule(
schedule_build_requests=image_schedule_reqs,
step_name='schedule image builds')
sdk_builds = api.buildbucket.collect_builds(
build_ids=[sdk_build.id for sdk_build in sdk_builds],
step_name='collect sdk build results',
timeout=subbuild_collect_timeout_secs)
# Display SDK builds status and show failures on UI, if any.
api.buildbucket_util.display_builds(
step_name='display sdk builds',
builds=sdk_builds.values(),
raise_on_failure=True)
# Download the individual SDK archives
fetch_sdk_artifacts(api, checkout.root_dir, sdk_builds)
# Merge the SDK archives for each target into a single archive.
merge_path = checkout.root_dir.join('scripts', 'sdk', 'merger', 'merge.py')
full_archive_path = api.path['cleanup'].join('merged_sdk_archive.tar.gz')
sdk_archives_dir = checkout.root_dir.join('archives')
sdk_archives_paths = api.file.glob_paths(
'get_sdk_paths',
sdk_archives_dir,
'*/*.tar.gz',
test_data=['%s/core.tar.gz' % subbuilder for subbuilder in subbuilders])
api.file.move('create merged_sdk_archive.tar.gz', sdk_archives_paths[0],
full_archive_path)
for sdk_archives_path in sdk_archives_paths[1:]:
api.python(
'merge %s' % sdk_archives_path,
merge_path,
args=[
'--first-archive',
sdk_archives_path,
'--second-archive',
full_archive_path,
'--output-archive',
full_archive_path,
])
# Generate a Bazel workspace along with its tests.
# These tests are being run for every SDK flavor.
generate_bazel_path = checkout.root_dir.join('scripts', 'sdk', 'bazel',
'generate.py')
sdk_dir = api.path['cleanup'].join('sdk-bazel')
test_workspace_dir = api.path['cleanup'].join('tests')
api.python(
'create bazel sdk',
generate_bazel_path,
args=[
'--archive',
full_archive_path,
'--output',
sdk_dir,
'--tests',
test_workspace_dir,
],
)
with api.step.nest('test sdk'):
# Generate tool_path.json to access bazel tool.
gn_results = api.build.gen(
checkout_root=checkout.root_dir,
fuchsia_build_dir=checkout.root_dir.join('out', 'default'),
target='x64',
build_type='debug',
product='products/bringup.gni',
)
bazel_path = gn_results.tool('bazel')
bazel_user_root_path = api.path['cleanup'].join('bazel')
with api.macos_sdk():
api.python(
'run bazel tests',
test_workspace_dir.join('run.py'),
args=[
'--output_user_root',
bazel_user_root_path,
'--bazel',
bazel_path,
],
)
with api.step.nest('generate gn sdk'):
# Generate a GN workspace along with its tests.
# These tests are being run for every SDK flavor.
generate_gn_path = checkout.root_dir.join('scripts', 'sdk', 'gn',
'generate.py')
gn_sdk_dir = api.path['cleanup'].join('sdk-gn')
gn_sdk_archive = api.path['cleanup'].join('gn.tar.gz')
test_workspace_dir = api.path['cleanup'].join('tests')
api.python(
'create gn sdk',
generate_gn_path,
args=[
'--archive',
full_archive_path,
'--output',
gn_sdk_dir,
'--output-archive',
gn_sdk_archive,
'--tests',
test_workspace_dir,
],
)
with api.step.nest('test gn sdk'):
api.python('run gn tests', test_workspace_dir.join('run.py'))
if image_builds:
image_builds = api.buildbucket.collect_builds(
build_ids=[image_build.id for image_build in image_builds],
step_name='collect image build results')
# Display SDK builds status and show failures on UI, if any.
api.buildbucket_util.display_builds(
step_name='display image builds',
builds=image_builds.values(),
raise_on_failure=True,
)
# Publish the core and GN SDK.
#
# GCS publishing paths:
# gs://fuchsia/development/${sdk_id}/sdk/${platform}
# |-- core.tar.gz
# `-- gn-sdk.tar.gz
#
# CIPD publishing paths (versioning is built into CIPD):
# https://chrome-infra-packages.appspot.com/p/fuchsia/sdk/
# |-- core
# | `-- ${platform}
# `-- gn
# `-- ${platform}
if not is_release_version:
return
if image_builds:
# Relocate the image build outputs into a well-known location based on
# sdk_id.
relocate_artifacts(api, image_builds, companion_images, sdk_id)
# Publish core
gcs_archive_path = 'development/%s/sdk/%s-amd64/%s.tar.gz' % (
sdk_id, api.platform.name, sdk_name)
cipd_pkg_name = '%s/sdk/%s/${platform}' % (cipd_root, sdk_name)
extra_cipd_tags = {
'jiri_snapshot': sdk_id,
'version': sdk_id,
}
upload_core(
api,
sdk_name=sdk_name,
gcs_archive_bucket=gcs_bucket,
gcs_archive_path=gcs_archive_path,
cipd_pkg_name=cipd_pkg_name,
archive_path=full_archive_path,
extra_cipd_tags=extra_cipd_tags,
update_refs=update_refs,
sdk_id=sdk_id,
revision=revision,
checkout_root=checkout.root_dir)
# Publish GN SDK
with api.step.nest('publish gn sdk'):
# Upload SDK dir to CIPD and tarball to GCS
gcs_path = 'development/%s/sdk/%s-amd64/gn.tar.gz' % (sdk_id,
api.platform.name)
api.gsutil.upload(
bucket=gcs_bucket,
src=gn_sdk_archive,
dst=gcs_path,
name='upload gn fuchsia-sdk %s' % sdk_id,
# Publicly available.
unauthenticated_url=True)
# Upload GN SDK CIPD
api.upload.cipd_package(
'%s/sdk/gn/${platform}' % cipd_root,
gn_sdk_dir, [api.upload.DirectoryPath(gn_sdk_dir)],
{'git_revision': revision},
repository=None,
refs=update_refs,
extra_tags=extra_cipd_tags)
def upload_core(api, sdk_name, gcs_archive_bucket, gcs_archive_path,
cipd_pkg_name, archive_path, extra_cipd_tags, update_refs,
revision, sdk_id, checkout_root):
sdk_dir = api.path['cleanup'].join(sdk_name)
# Extract the archive to a directory for CIPD processing.
with api.step.nest('extract ' + sdk_name):
api.file.ensure_directory('create sdk dir', sdk_dir)
api.tar.extract(
step_name='unpack sdk archive',
path=archive_path,
directory=sdk_dir,
)
with api.step.nest('upload ' + sdk_name):
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=archive_path,
dst=gcs_archive_path,
link_name='archive',
name='upload %s fuchsia-sdk %s' % (sdk_name, sdk_id),
# Publicly available.
unauthenticated_url=True)
# Note that this will upload the snapshot to a location different from the
# path that api.fuchsia copied it to. This uses a path based on the hash of
# the SDK artifact, not based on the hash of the snapshot itself. Clients
# can use this to find the snapshot used to build a specific SDK artifact.
snapshot_file = api.path['cleanup'].join('jiri.snapshot')
with api.context(cwd=checkout_root):
api.jiri.snapshot(snapshot_file)
api.gsutil.upload(
bucket='fuchsia-snapshots',
src=snapshot_file,
dst=sdk_id,
link_name='jiri.snapshot',
name='upload jiri.snapshot')
if update_refs:
# Record the sdk_id of the most recently uploaded archive for downstream
# autorollers.
sdk_id_path = api.path['cleanup'].join('sdk_id')
api.file.write_text('write sdk_id', sdk_id_path, sdk_id)
for update_ref in update_refs:
upper_update_ref = update_ref.upper()
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=sdk_id_path,
dst='development/%s_%s' %
(upper_update_ref, api.platform.name.upper()),
link_name=upper_update_ref,
name='upload %s sdk_id' % update_ref)
# Upload the SDK to CIPD as well.
api.upload.cipd_package(
cipd_pkg_name,
sdk_dir, [api.upload.DirectoryPath(sdk_dir)],
{'git_revision': revision},
repository=None,
refs=update_refs,
extra_tags=extra_cipd_tags)
def GenTests(api):
def add_hash_property(build):
build.output.properties[ISOLATED_OUTPUT_KEY] = '###HASH###'
return build
revision = api.jiri.example_revision
topaz_properties = api.properties(
project='integration',
manifest='fuchsia/topaz/topaz',
remote='https://fuchsia.googlesource.com/integration',
)
topaz_local_ci = topaz_properties + api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',
revision=revision,
) + api.properties(revision=revision)
topaz_global_ci = topaz_properties + api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',
revision=revision,
bucket='###global-integration-bucket###') + api.properties(
revision=revision)
topaz_release_ci = topaz_properties + api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',
git_ref='refs/heads/release',
revision=revision,
bucket='###global-integration-bucket###') + api.properties(
revision=revision)
topaz_local_cq = topaz_properties + api.buildbucket.try_build()
ci_subbuilds = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS'))
],
step_name='collect sdk build results')
ci_subbuilds_infra_failure = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(
api.buildbucket.ci_build_message(status='INFRA_FAILURE')),
add_hash_property(
api.buildbucket.ci_build_message(status='INFRA_FAILURE'))
],
step_name='collect sdk build results')
ci_subbuilds_failure = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(api.buildbucket.ci_build_message(status='FAILURE')),
add_hash_property(api.buildbucket.ci_build_message(status='FAILURE'))
],
step_name='collect sdk build results')
cq_subbuilds = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(
api.buildbucket.try_build_message(status='SUCCESS')),
add_hash_property(
api.buildbucket.try_build_message(status='SUCCESS'))
],
step_name='collect sdk build results')
ci_subbuilds_with_images = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS'))
],
step_name='collect sdk build results')
image_build = api.buildbucket.ci_build_message(
builder='###SDK_IMAGE_BUILDER###', status='SUCCESS', build_id=123456789)
image_build.output.properties['gcs_bucket'] = '###BUCKET###'
other_image_build = api.buildbucket.ci_build_message(
builder='###OTHER_SDK_IMAGE_BUILDER###',
status='SUCCESS',
build_id=9876543210)
other_image_build.output.properties['gcs_bucket'] = '###BUCKET###'
ci_image_builds = api.buildbucket.simulated_collect_output(
builds=[image_build, other_image_build],
step_name='collect image build results')
ci_image_builds_failure = api.buildbucket.simulated_collect_output(
builds=[
api.buildbucket.ci_build_message(
builder='###SDK_IMAGE_BUILDER###',
status='FAILURE',
build_id=123456789),
other_image_build,
],
step_name='collect image build results')
tags = [
'releases/0.20191019.0.1',
'releases/0.20191018.0.1',
]
describe = api.step_data('git describe',
api.raw_io.stream_output('releases/0.20191018.0.1'))
ref_settings = [
{
'branch': 'refs/heads/master',
'update_ref': 'latest',
},
]
release_versions = api.step_data(
'get release versions on h3ll0.git --no-pager',
api.raw_io.stream_output('\n'.join(tags)))
no_release_versions = api.step_data(
'get release versions on h3ll0.git --no-pager',
api.raw_io.stream_output(''))
companion_images = [{
'name': '###SDK_IMAGE###',
'builder': '###SDK_IMAGE_BUILDER###',
'bucket': '###DEV_BUCKET###',
}, {
'name': '###OTHER_SDK_IMAGE###',
'builder': '###OTHER_SDK_IMAGE_BUILDER###',
'bucket': '###OTHER_DEV_BUCKET###',
}]
yield (api.test('local_ci') + topaz_local_ci + ci_subbuilds)
yield (api.test('local_cq') + topaz_local_cq + cq_subbuilds)
yield (api.test('local_ci_mac') + topaz_local_ci + api.platform.name('mac') +
ci_subbuilds)
yield (api.test('global_ci') + topaz_global_ci + ci_subbuilds +
api.properties(ref_settings=ref_settings))
yield (api.test('release_ci') + topaz_release_ci + ci_subbuilds +
ci_image_builds + api.properties(
is_release_version=True,
companion_images=companion_images,
ref_settings=ref_settings,
) + describe + release_versions)
yield (api.test('release_ci_no_update_ref') + topaz_release_ci +
ci_subbuilds + ci_image_builds + api.properties(
is_release_version=True,
companion_images=companion_images,
ref_settings=ref_settings,
) + describe + no_release_versions)
yield (api.test('local_ci_build_failure') + topaz_global_ci +
ci_subbuilds_failure)
yield (api.test('local_ci_infra_failure') + topaz_global_ci +
ci_subbuilds_infra_failure)
yield (api.test('release_ci_image_failure') + topaz_global_ci + ci_subbuilds +
ci_image_builds_failure + api.properties(
is_release_version=True,
companion_images=companion_images,
ref_settings=ref_settings,
) + describe + release_versions)
yield (api.test('release_ci_new_upload') + topaz_release_ci + api.step_data(
'upload core.cipd.cipd search fuchsia/sdk/core/${platform} ' +
'git_revision:%s' % revision, api.json.output({'result': []})) +
ci_subbuilds + ci_image_builds + api.properties(
is_release_version=True,
companion_images=companion_images,
ref_settings=ref_settings,
) + describe + release_versions)