blob: 92c8bd8f2fafaeae419344b8e9e05acd14d85ea6 [file] [log] [blame]
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# found in the LICENSE file.
from recipe_engine import recipe_api
# The path to the private key used to sign release builds. Only set on release
# builders.
RELEASE_PKEY_PATH = "/etc/release_keys/release_key.pem"
# The retcode that artifactory emits if a transient GCS error occurs.
# Keep in sync with https://fuchsia.googlesource.com/fuchsia/+/HEAD/tools/artifactory/cmd/up.go
TRANSIENT_ERROR_RETCODE = 3
class ArtifactsApi(recipe_api.RecipeApi):
"""API for interacting with build and test artifacts."""
GCS_BUCKET_PROPERTY = "artifact_gcs_bucket"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# A GCS bucket (str) to which a package repository may be uploaded.
self.gcs_bucket = None
# A unique identifier (str) giving a namespace in the GCS bucket under
# which a package repository may be uploaded.
self.namespace = None
def build_path(self):
assert self.namespace
return f"builds/{self.namespace}"
def _base_url(self, host=None):
assert self.gcs_bucket
if host:
return f"http://{host}/{self.gcs_bucket}"
return f"gs://{self.gcs_bucket}"
def debug_symbol_url(self):
"""Returns the URL (str) of the artifact bucket's debug symbol path."""
return f"{self._base_url()}/debug"
def build_url(self, host=None):
"""Returns the URL (str) of the uploaded build subdirectory.
Args:
host (str or None): The hosting address of the build artifacts; if
unprovided, the default GCS URL will be constructed.
"""
assert self.namespace
return f"{self._base_url(host)}/{self.build_path()}"
def package_repo_url(self, host=None):
"""Returns the URL (str) of an uploaded package repository.
This assumes that we have already upload()ed to this bucket.
Args:
host (str or None): The hosting address of the package repository; if
unprovided, the default GCS URL will be constructed.
"""
return f"{self.build_url(host)}/packages/repository"
def package_blob_url(self, host=None):
"""Returns the URL (str) of the blobs of an uploaded package repository.
Args:
host (str or None): The hosting address of the package repository; if
unprovided, the default GCS URL will be constructed.
"""
return f"{self._base_url(host)}/blobs"
def image_url(self, host=None):
"""Returns the URL (str) of the uploaded images.
Args:
host (str or None): The hosting address of the images; if unprovided, the
default GCS URL will be constructed.
"""
return f"{self.build_url(host)}/images"
def cloud_storage_url(self):
return self.build_url(host="console.cloud.google.com/storage/browser")
def assembly_manifest_url(self):
"""Returns the URL (str) of the assembly manifest from the GCS build artifacts."""
return f"{self._base_url()}/{self.build_path()}/assembly_manifests/fuchsia.json"
def verify_blobs(self, step_name, upload_manifest_json_input):
"""Verify blobs per the input upload manifest.
Args:
step_name (str): Name of the step.
upload_manifest_json_input (Path): Path of upload manifest as JSON.
Raises:
StepFailure: One or more blobs is not valid.
"""
cmd = [
self._gcs_util_tool,
"verify-blobs",
"-blobfs-compression-path",
self._blobfs_compression_tool,
"-manifest-path",
upload_manifest_json_input,
]
return self.m.step(step_name, cmd)
def upload(
self,
step_name,
upload_manifest_json_input,
sign_artifacts=False,
timeout_secs=45 * 60,
):
"""Upload build artifacts to GCS per the input upload manifest.
Args:
step_name (str): Name of the step.
upload_manifest_json_input (Path): Path of upload manifest as JSON.
sign_artifacts (bool): Whether to sign the artifacts and attach the
signatures to the uploaded files.
timeout_secs (int): A timeout for the step in seconds.
"""
assert self.gcs_bucket and self.namespace
cmd = [
self._gcs_util_tool,
"up",
"-bucket",
self.gcs_bucket,
"-namespace",
self.build_path(),
"-manifest-path",
upload_manifest_json_input,
]
if sign_artifacts:
cmd.extend(["-pkey", RELEASE_PKEY_PATH])
step = None
try:
step = self.m.step(step_name, cmd, timeout=timeout_secs)
except self.m.step.StepFailure as e:
step = self.m.step.active_result
if e.exc_result.was_cancelled:
raise
if e.exc_result.had_timeout:
step.presentation.status = self.m.step.EXCEPTION
raise self.m.step.InfraFailure(
f"Artifact upload timed out after {int(timeout_secs / 60)} minutes."
)
if e.exc_result.retcode == TRANSIENT_ERROR_RETCODE:
step.presentation.status = self.m.step.EXCEPTION
raise self.m.step.InfraFailure(
f"Transient GCS error during artifact upload. See '{step_name}' stdout for details."
)
raise
finally:
if step:
step.presentation.links["build_artifacts"] = self.cloud_storage_url()
# The following tools depend on this property:
# * fuchsia.googlesource.com/infra/infra/+/main/cmd/artifacts
# * bisect-tool
step.presentation.properties[self.GCS_BUCKET_PROPERTY] = self.gcs_bucket
def download(self, step_name, src, dest, from_root=False):
"""Downloads the specified artifact.
Args:
step_name (str): The name of the step.
src (str): The path of the artifact to download.
dest (Path): The path to download the artifact to.
from_root (bool): If true, the `src` will be taken to be relative to
the GCS bucket (for shared artifacts like blobs and debug binaries),
else it will be relative to the builds/UUID subdirectory.
"""
self.m.gsutil.download(
self.gcs_bucket,
src if from_root else f"{self.build_path()}/{src}",
dest,
name=step_name,
)
@property
def _blobfs_compression_tool(self):
return self.m.ensure_tool(
"blobfs-compression", self.resource("blobfs_compression/tool_manifest.json")
)
@property
def _gcs_util_tool(self):
return self.m.ensure_tool(
"gcs-util", self.resource("gcs_util/tool_manifest.json")
)