blob: 2685d900399f544c82727c6d056c17b2297559ef [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Fuchsia SDKs."""
from recipe_engine.config import List
from recipe_engine.recipe_api import Property
from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2
DEPS = [
'fuchsia/build',
'fuchsia/buildbucket_util',
'fuchsia/checkout',
'fuchsia/fuchsia',
'fuchsia/git',
'fuchsia/gsutil',
'fuchsia/hash',
'fuchsia/jiri',
'fuchsia/macos_sdk',
'fuchsia/release',
'fuchsia/tar',
'fuchsia/sso',
'fuchsia/upload',
'recipe_engine/buildbucket',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/isolated',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'recipe_engine/swarming',
'recipe_engine/time',
]
BUILD_TYPE = 'release'
# These represent the location of the isolated hash in the output of the
# sdk subbuild. They need to be kept in sync with sdk_subbuild.py
ISOLATED_OUTPUT_KEY = 'isolated_output_hash'
TARGETS = ['arm64', 'x64']
PROPERTIES = {
'manifest':
Property(kind=str, help='Jiri manifest to use'),
'remote':
Property(kind=str, help='Remote manifest repository'),
'checkout_snapshot':
Property(
kind=bool,
help='Whether or not to checkout from a Jiri snapshot.'
' Snapshot is expected to be found at the location specified by'
' BuildBucket input.',
default=False),
'gcs_bucket':
Property(
kind=str,
help='GCS bucket for uploading the sdk.',
default='fuchsia'),
'sdk_name':
Property(
kind=str,
help='The name of the sdk to build.',
default='core',
),
'cipd_root':
Property(
kind=str,
help='The CIPD package root for upload.',
default='fuchsia'),
'subbuilders':
Property(
kind=List(basestring),
help='SDK subbuilders used to generate SDK archives.',
# TODO(fxb/40777): Make this required. This is optional for a
# soft-transition away from sdk_subbuild.py to fuchsia/build.py.
# If this property is set, use these fuchsia/build.py subbuilders
# instead of the default sdk_subbuild.py.
default=()),
'companion_images':
Property(
kind=List(dict),
help='The Fuchsia images built with a version of the SDK.'
' Each object should have 3 properties:'
' name: name of the image when referred to in the SDK'
' builder: name of the builder generating the image'
' bucket: GCS bucket where the images should be installed',
default=[]),
'publish_branch':
Property(
kind=str,
help='Publish if input is release version reachable on this branch.',
default=''),
'sdk_id':
Property(kind=str, help='SDK version id', default=None),
}
def fetch_sdk_artifacts(api, checkout_root, builds):
with api.step.nest('fetch sdk artifacts'):
isolated_output_path = checkout_root.join('archives')
for build in builds.values():
with api.step.nest(build.builder.builder):
isolated_output = build.output.properties[ISOLATED_OUTPUT_KEY].strip(
'"')
output_dir = isolated_output_path.join(build.builder.builder)
api.file.ensure_directory('makedirs', dest=output_dir)
api.isolated.download(
step_name='download %s archive' % build.builder.builder,
isolated_hash=isolated_output,
output_dir=output_dir)
def relocate_artifacts(api, builds, companion_images, sdk_id):
def relocate_artifact(api, src_bucket, dst_bucket, src_path, dst_path):
api.gsutil.copy(
src_bucket=src_bucket,
src=src_path,
dst_bucket=dst_bucket,
dst=dst_path,
link_name='gsutil.copy',
unauthenticated_url=False)
with api.step.nest('relocate sdk image(s)'):
for build in builds.values():
image_name, image_bucket = next(
(image['name'], image['bucket'])
for image in companion_images
if image['builder'] == build.builder.builder)
with api.step.nest(image_name):
build_gcs_bucket = build.output.properties['gcs_bucket'].strip('"')
# build-archive.tgz
relocate_artifact(
api=api,
src_bucket=build_gcs_bucket,
dst_bucket=image_bucket,
src_path='builds/%s/build-archive.tgz' % build.id,
dst_path='development/%s/images/%s.tgz' % (sdk_id, image_name),
)
# packages.tar.gz
relocate_artifact(
api=api,
src_bucket=build_gcs_bucket,
dst_bucket=image_bucket,
src_path='builds/%s/packages.tar.gz' % build.id,
dst_path='development/%s/packages/%s.tar.gz' % (sdk_id, image_name),
)
def RunSteps(api, manifest, remote, checkout_snapshot, gcs_bucket, sdk_name,
cipd_root, subbuilders, companion_images, publish_branch, sdk_id):
build = api.buildbucket.build
revision = build.input.gitiles_commit.id
sdk_builds = []
if not sdk_id:
sdk_id = str(build.id)
subbuild_properties = {'sdk_id': sdk_id, 'parent_id': sdk_id}
sdk_schedule_reqs = []
# TODO (fxb/40777): Make this unconditionally use subbuilders.
if not subbuilders:
builder_names = [
'sdk-%s-subbuild-%s-%s' % (sdk_name, target, api.platform.name)
for target in TARGETS
]
else:
builder_names = subbuilders
for builder_name in builder_names:
sdk_schedule_reqs.append(
api.buildbucket.schedule_request(
builder=builder_name,
properties=subbuild_properties,
# Leave unset to avoid overriding priority from configs.
priority=None,
# TODO(olivernewman): Swarming currently breaks if many builds are
# launched simultaneously with the same swarming_parent_run_id. Set
# the swarming_parent_run_id param here after that bug is fixed.
))
sdk_builds.extend(
api.buildbucket.schedule(
schedule_build_requests=sdk_schedule_reqs,
step_name='schedule sdk builds'))
checkout_root = api.path['start_dir'].join('fuchsia')
if checkout_snapshot:
if api.buildbucket_util.is_tryjob:
assert len(build.input.gerrit_changes) == 1
checkout = api.checkout.from_patchset_snapshot(
path=checkout_root, gerrit_change=build.input.gerrit_changes[0])
else:
checkout = api.checkout.from_commit_snapshot(
path=checkout_root, gitiles_commit=build.input.gitiles_commit)
else:
assert manifest
assert remote
checkout = api.checkout.fuchsia_with_options(
path=checkout_root,
build=build,
manifest=manifest,
remote=remote,
)
# Only publish SDK and companion images if the input release version
# is reachable on the publish branch.
publish = False
if revision and publish_branch:
integration_repo = checkout.root_dir.join('integration')
try:
release_version = api.release.ref_to_release_version(
ref=revision, repo_path=integration_repo)
branch_head = api.git.get_remote_branch_head(
api.sso.sso_to_https(remote), publish_branch)
if release_version in api.release.get_release_versions(
ref=branch_head, repo_path=integration_repo):
publish = True
# If the incoming revision was not a release version, continue
# without publishing.
except api.step.StepFailure:
pass
image_builds = []
image_properties = {'sdk_id': sdk_id}
if publish and companion_images:
image_schedule_reqs = []
for image_builder in [i['builder'] for i in companion_images]:
image_schedule_reqs.append(
api.buildbucket.schedule_request(
builder=image_builder,
properties=image_properties,
# Leave unset to avoid overriding priority from configs.
priority=None,
# TODO(olivernewman): Swarming currently breaks if many builds
# are launched simultaneously with the same
# swarming_parent_run_id. Set the swarming_parent_run_id param
# here after that bug is fixed.
))
image_builds.extend(
api.buildbucket.schedule(
schedule_build_requests=image_schedule_reqs,
step_name='schedule image builds'))
sdk_builds = api.buildbucket.collect_builds(
build_ids=[sdk_build.id for sdk_build in sdk_builds],
step_name='collect sdk build results')
# Display SDK builds status and show failures on UI, if any.
api.buildbucket_util.display_builds(
step_name='display sdk builds',
builds=sdk_builds.values(),
raise_on_failure=True,
)
# Download the individual SDK archives
fetch_sdk_artifacts(api, checkout.root_dir, sdk_builds)
# Merge the SDK archives for each target into a single archive.
merge_path = checkout.root_dir.join('scripts', 'sdk', 'merger', 'merge.py')
full_archive_path = api.path['cleanup'].join('merged_sdk_archive.tar.gz')
sdk_archives_dir = checkout.root_dir.join('archives')
sdk_archives_paths = api.file.glob_paths(
'get_sdk_paths',
sdk_archives_dir,
'*/*.tar.gz',
test_data=[
'%s/core.tar.gz' % builder_name for builder_name in builder_names
])
api.file.move('create merged_sdk_archive.tar.gz', sdk_archives_paths[0],
full_archive_path)
for sdk_archives_path in sdk_archives_paths[1:]:
api.python(
'merge %s' % sdk_archives_path,
merge_path,
args=[
'--first-archive',
sdk_archives_path,
'--second-archive',
full_archive_path,
'--output-archive',
full_archive_path,
])
# Generate a Bazel workspace along with its tests.
# These tests are being run for every SDK type.
generate_bazel_path = checkout.root_dir.join('scripts', 'sdk', 'bazel',
'generate.py')
sdk_dir = api.path['cleanup'].join('sdk-bazel')
test_workspace_dir = api.path['cleanup'].join('tests')
api.python(
'create bazel sdk',
generate_bazel_path,
args=[
'--archive',
full_archive_path,
'--output',
sdk_dir,
'--tests',
test_workspace_dir,
],
)
with api.step.nest('test sdk'):
# Generate tool_path.json to access bazel tool.
gn_results = api.build.gen(
checkout_root=checkout.root_dir,
fuchsia_build_dir=checkout.root_dir.join('out', 'default'),
target='x64',
build_type='debug',
product='products/bringup.gni',
)
bazel_path = gn_results.tool('bazel')
bazel_user_root_path = api.path['cleanup'].join('bazel')
with api.macos_sdk():
api.python(
'run tests',
test_workspace_dir.join('run.py'),
args=[
'--output_user_root',
bazel_user_root_path,
'--bazel',
bazel_path,
],
)
if image_builds:
image_builds = api.buildbucket.collect_builds(
build_ids=[image_build.id for image_build in image_builds],
step_name='collect image build results')
# Display SDK builds status and show failures on UI, if any.
api.buildbucket_util.display_builds(
step_name='display image builds',
builds=image_builds.values(),
raise_on_failure=True,
)
# Publish the core SDK.
if not publish:
return
if companion_images:
# Relocate the image build outputs into a well-known location based on
# sdk_id.
relocate_artifacts(api, image_builds, companion_images, build.id)
gcs_archive_path = 'sdk/%s/%s-amd64' % (sdk_name, api.platform.name)
new_gcs_archive_path = 'development/%s/sdk/%s-amd64/%s.tar.gz' % (
sdk_id, api.platform.name, sdk_name)
cipd_pkg_name = '%s/sdk/%s/${platform}' % (cipd_root, sdk_name)
upload_raw_sdk(
api,
sdk_name='raw-core-sdk',
gcs_archive_bucket=gcs_bucket,
gcs_archive_path=gcs_archive_path,
new_gcs_archive_path=new_gcs_archive_path,
cipd_pkg_name=cipd_pkg_name,
archive_path=full_archive_path,
revision=revision,
update_latest=True,
sdk_id=sdk_id,
build_id=str(build.id),
checkout_root=checkout.root_dir)
def upload_raw_sdk(api, sdk_name, gcs_archive_bucket, gcs_archive_path,
new_gcs_archive_path, cipd_pkg_name, archive_path, revision,
update_latest, sdk_id, build_id, checkout_root):
sdk_dir = api.path['cleanup'].join(sdk_name)
# Extract the archive to a directory for CIPD processing.
with api.step.nest('extract ' + sdk_name):
api.file.ensure_directory('create sdk dir', sdk_dir)
api.tar.extract(
step_name='unpack sdk archive',
path=archive_path,
directory=sdk_dir,
)
if revision:
with api.step.nest('upload ' + sdk_name):
# Upload the SDK to GCS and CIPD. Only upload sdk_id when update_latest is
# true.
upload_archive(
api,
gcs_archive_bucket=gcs_archive_bucket,
gcs_archive_path=gcs_archive_path,
new_gcs_archive_path=new_gcs_archive_path,
cipd_pkg_name=cipd_pkg_name,
sdk=archive_path,
out_dir=sdk_dir,
revision=revision,
update_latest=update_latest,
sdk_id=sdk_id,
build_id=build_id,
checkout_root=checkout_root,
)
def upload_archive(api, gcs_archive_bucket, gcs_archive_path,
new_gcs_archive_path, cipd_pkg_name, sdk, out_dir, revision,
update_latest, sdk_id, build_id, checkout_root):
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=sdk,
dst='%s/%s' % (gcs_archive_path, sdk_id),
link_name='archive',
name='upload fuchsia-sdk %s' % sdk_id,
# Publicly available.
unauthenticated_url=True)
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=sdk,
dst=new_gcs_archive_path,
link_name='archive',
name='upload new fuchsia-sdk %s' % sdk_id,
# Publicly available.
unauthenticated_url=True)
# Only upload a snapshot if this build was invoked by the scheduler. If the
# sdk_id equals the build_id then the sdk_id was not passed as a property by
# another recipe.
if sdk_id == build_id:
# Note that this will upload the snapshot to a location different from the
# path that api.fuchsia copied it to. This uses a path based on the hash of
# the SDK artifact, not based on the hash of the snapshot itself. Clients can
# use this to find the snapshot used to build a specific SDK artifact.
snapshot_file = api.path['cleanup'].join('jiri.snapshot')
with api.context(cwd=checkout_root):
api.jiri.snapshot(snapshot_file)
api.gsutil.upload(
bucket='fuchsia-snapshots',
src=snapshot_file,
dst=sdk_id,
link_name='jiri.snapshot',
name='upload jiri.snapshot')
if update_latest:
# Record the sdk_id of the most recently uploaded archive for downstream autorollers.
sdk_id_path = api.path['cleanup'].join('sdk_id')
api.file.write_text('write sdk_id', sdk_id_path, sdk_id)
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=sdk_id_path,
dst='%s/LATEST_ARCHIVE' % gcs_archive_path,
link_name='LATEST_ARCHIVE',
name='upload latest sdk_id')
api.gsutil.upload(
bucket=gcs_archive_bucket,
src=sdk_id_path,
dst='development/LATEST_%s' % api.platform.name.upper(),
link_name='LATEST',
name='upload new latest sdk_id')
# Upload the SDK to CIPD as well.
api.upload.cipd_package(
cipd_pkg_name,
out_dir, [api.upload.DirectoryPath(out_dir)], {'git_revision': revision},
repository=None,
extra_tags={'jiri_snapshot': sdk_id})
def GenTests(api):
def add_hash_property(build):
build.output.properties[ISOLATED_OUTPUT_KEY] = '###HASH###'
return build
revision = api.jiri.example_revision
# TODO(fxb/40777): Make this a default property.
subbuilders_properties = api.properties(subbuilders=[
'core.arm64-linux-sdk-subbuild',
'core.x64-linux-sdk-subbuild',
])
topaz_properties = api.properties(
project='integration',
manifest='fuchsia/topaz/topaz',
remote='https://fuchsia.googlesource.com/integration',
)
topaz_local_ci = topaz_properties + api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',
revision=revision,
) + api.properties(revision=revision)
topaz_global_ci = topaz_properties + api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',
revision=revision,
bucket='###global-integration-bucket###') + api.properties(
revision=revision)
topaz_release_ci = topaz_properties + api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',
git_ref='refs/heads/release',
revision=revision,
bucket='###global-integration-bucket###') + api.properties(
revision=revision)
topaz_local_cq = topaz_properties + api.buildbucket.try_build()
ci_subbuilds = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS'))
],
step_name='collect sdk build results')
ci_subbuilds_infra_failure = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(
api.buildbucket.ci_build_message(status='INFRA_FAILURE')),
add_hash_property(
api.buildbucket.ci_build_message(status='INFRA_FAILURE'))
],
step_name='collect sdk build results')
ci_subbuilds_failure = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(api.buildbucket.ci_build_message(status='FAILURE')),
add_hash_property(api.buildbucket.ci_build_message(status='FAILURE'))
],
step_name='collect sdk build results')
cq_subbuilds = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(
api.buildbucket.try_build_message(status='SUCCESS')),
add_hash_property(
api.buildbucket.try_build_message(status='SUCCESS'))
],
step_name='collect sdk build results')
ci_subbuilds_with_images = api.buildbucket.simulated_collect_output(
builds=[
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS')),
add_hash_property(api.buildbucket.ci_build_message(status='SUCCESS'))
],
step_name='collect sdk build results')
image_build = api.buildbucket.ci_build_message(
builder='###SDK_IMAGE_BUILDER###', status='SUCCESS', build_id=123456789)
image_build.output.properties['gcs_bucket'] = '###BUCKET###'
other_image_build = api.buildbucket.ci_build_message(
builder='###OTHER_SDK_IMAGE_BUILDER###',
status='SUCCESS',
build_id=9876543210)
other_image_build.output.properties['gcs_bucket'] = '###BUCKET###'
ci_image_builds = api.buildbucket.simulated_collect_output(
builds=[image_build, other_image_build],
step_name='collect image build results')
ci_image_builds_failure = api.buildbucket.simulated_collect_output(
builds=[
api.buildbucket.ci_build_message(
builder='###SDK_IMAGE_BUILDER###',
status='FAILURE',
build_id=123456789),
other_image_build,
],
step_name='collect image build results')
tags = [
'releases/0.20191019.0.1',
'releases/0.20191018.0.1',
]
describe = api.step_data('git describe',
api.raw_io.stream_output('releases/0.20191018.0.1'))
failed_describe = api.step_data('git describe', retcode=1)
publish_branch = 'refs/heads/master'
release_versions = api.step_data(
'get release versions on h3ll0.git --no-pager',
api.raw_io.stream_output('\n'.join(tags)))
companion_images = [{
'name': '###SDK_IMAGE###',
'builder': '###SDK_IMAGE_BUILDER###',
'bucket': '###DEV_BUCKET###',
}, {
'name': '###OTHER_SDK_IMAGE###',
'builder': '###OTHER_SDK_IMAGE_BUILDER###',
'bucket': '###OTHER_DEV_BUCKET###',
}]
yield (api.test('local_ci') + topaz_local_ci + ci_subbuilds)
yield (api.test('local_cq') + topaz_local_cq + cq_subbuilds)
yield (api.test('snapshot_ci') + topaz_local_ci +
api.properties(checkout_snapshot=True) + ci_subbuilds)
yield (api.test('snapshot_cq') + topaz_local_cq +
api.properties(checkout_snapshot=True) + cq_subbuilds)
yield (api.test('local_ci_mac') + topaz_local_ci + api.platform.name('mac') +
ci_subbuilds)
yield (api.test('global_ci') + topaz_global_ci + ci_subbuilds +
api.properties(
companion_images=companion_images,
publish_branch=publish_branch,
) + failed_describe + subbuilders_properties)
yield (api.test('release_ci') + topaz_release_ci + ci_subbuilds +
ci_image_builds + api.properties(
companion_images=companion_images,
publish_branch=publish_branch,
) + describe + subbuilders_properties + release_versions)
yield (api.test('local_ci_build_failure') + topaz_global_ci +
ci_subbuilds_failure)
yield (api.test('local_ci_infra_failure') + topaz_global_ci +
ci_subbuilds_infra_failure)
yield (api.test('release_ci_image_failure') + topaz_global_ci + ci_subbuilds +
ci_image_builds_failure + api.properties(
companion_images=companion_images, publish_branch=publish_branch) +
describe + release_versions)
yield (api.test('release_ci_new_upload') + topaz_release_ci + api.step_data(
'upload raw-core-sdk.cipd.cipd search fuchsia/sdk/core/${platform} ' +
'git_revision:%s' % revision, api.json.output({'result': []})) +
ci_subbuilds + ci_image_builds + api.properties(
companion_images=companion_images,
publish_branch=publish_branch,
) + describe + release_versions)