blob: 9efc158c7fb4bb70c6bd758a1f15b9cfae875d29 [file] [log] [blame]
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building and publishing CIPD prebuilts."""
from PB.recipes.fuchsia.publish_build_artifacts import InputProperties
DEPS = [
"fuchsia/build",
"fuchsia/buildbucket_util",
"fuchsia/checkout",
"fuchsia/cipd_util",
"fuchsia/gsutil",
"fuchsia/sso",
"recipe_engine/buildbucket",
"recipe_engine/file",
"recipe_engine/path",
"recipe_engine/properties",
"recipe_engine/step",
]
PROPERTIES = InputProperties
def RunSteps(api, props):
checkout = api.checkout.fuchsia_with_options(
manifest=props.manifest,
remote=props.remote,
attributes=props.checkout_attributes,
)
build_results = api.build.with_options(
checkout=checkout,
fint_params_path=props.fint_params_path,
use_sandboxing=props.enable_build_sandboxing,
)
if not api.buildbucket_util.is_dev_or_try:
revision = str(api.buildbucket.build.input.gitiles_commit.id)
assert revision
for tool in props.tools:
with api.step.nest(tool.name):
path = build_results.tool(tool.name, os=tool.os)
if tool.cipd_package:
assert tool.cipd_package.endswith("/${platform}"), (
"package %s must have a ${platform} suffix" % tool.cipd_package
)
upload_to_cipd(
api,
tool.name,
tool.cipd_package,
path,
checkout.root_dir,
props.remote,
revision,
)
if tool.gcs_bucket:
upload_to_gcs(api, tool, path)
for package in props.package_archives:
with api.step.nest(package.name):
assert "${api_level}" in package.cipd_package, (
"package %s must specify ${api_level} in path"
% package.cipd_package
)
api_level_paths = build_results.package_archive(
package.name, cpu=package.cpu
)
for api_level, path in api_level_paths.items():
upload_to_cipd(
api,
package.name,
package.cipd_package.replace("${api_level}", str(api_level)),
path,
checkout.root_dir,
props.remote,
revision,
)
def upload_to_cipd(api, name, cipd_pkg, path, checkout_root, remote, revision):
pkg_root = api.path.mkdtemp(name)
api.file.copy(
name=f"copy {name} to build artifact directory",
source=path,
dest=pkg_root,
)
# Some packages published by this recipe end up being rolled back into the
# Fuchsia checkout, where the license validation rules require a LICENSE
# file and README.fuchsia file in every CIPD package.
api.file.copy(
name="add LICENSE file to build artifact directory",
source=checkout_root / "LICENSE",
dest=pkg_root,
)
api.file.write_text(
name="add README.fuchsia file to build artifact directory",
dest=pkg_root / "README.fuchsia",
text_data=f"""\
Name: {name}
URL: {api.sso.sso_to_https(remote)}
License File: LICENSE
""",
include_log=True,
)
api.cipd_util.upload_package(
cipd_pkg,
pkg_root=pkg_root,
search_tag={"git_revision": revision},
repository=remote,
install_mode=None,
)
def upload_to_gcs(api, tool, path):
assert tool.gcs_path, f"tool {tool.name} specifies a gcs_bucket but no gcs_path"
api.gsutil.upload(
src=path,
bucket=tool.gcs_bucket,
dst=tool.gcs_path,
link_name=tool.name,
name=f"upload {tool.name} to {tool.gcs_bucket}",
)
def GenTests(api):
def test(name, tryjob=False):
repo = "https://fuchsia.googlesource.com/integration"
properties = InputProperties(
manifest="flower",
remote=repo,
fint_params_path="host-prebuilts.textproto",
checkout_attributes=["foo-package"],
enable_build_sandboxing=True,
tools=[
InputProperties.Tool(name="foo", cipd_package="tools/foo/${platform}"),
InputProperties.Tool(
name="bar",
os="win",
cipd_package="tools/bar/${platform}",
gcs_bucket="bkt",
gcs_path="bar",
),
],
package_archives=[
InputProperties.PackageArchive(
name="foo",
cpu="x64",
cipd_package="prebuilt/foo/api-${api_level}",
),
],
)
ret = api.test(name) + api.properties(properties)
if tryjob:
ret += api.buildbucket.try_build(git_repo=repo)
else:
ret += api.buildbucket.ci_build(git_repo=repo)
return ret
yield test("ci")
yield test("cq", tryjob=True)