| # Copyright 2019 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| |
| from recipe_engine import recipe_api |
| |
| |
| class UploadPath(object): |
| """Represents a path to upload.""" |
| |
| def __init__(self, path): |
| self._path = path |
| |
| @property |
| def path(self): |
| """The Path associated with this object.""" |
| return self._path |
| |
| |
| class DirectoryPath(UploadPath): |
| """Represents a Path to a directory.""" |
| |
| def add_to_package(self, pkg): |
| pkg.add_dir(self.path) |
| |
| |
| class FilePath(UploadPath): |
| """Represents a Path to a file.""" |
| |
| def add_to_package(self, pkg): |
| pkg.add_file(self.path) |
| |
| |
| class UploadApi(recipe_api.RecipeApi): |
| """API for uploading build and test results.""" |
| FilePath = FilePath |
| DirectoryPath = DirectoryPath |
| |
| def file_to_gcs(self, source, bucket, subpath, namespace=None, metadata=None): |
| """Uploads a file to GCS under a subpath specific to the given build. |
| |
| Will upload the file to: |
| gs://fuchsia-infra/<bucket>/<build id>/<subpath or basename of file> |
| |
| Args: |
| path (Path): A path to the file to upload. |
| bucket (str): The name of the GCS bucket to upload to. |
| subpath (str): The end of the destination path within the |
| build-specific subdirectory. |
| namespace (str or None): A unique ID for this build. Defaults to the current |
| build ID or led run ID. |
| metadata (dict): A dictionary of metatadata values to upload along with |
| the file. |
| Returns: |
| The upload step. |
| """ |
| return self.m.gsutil.upload( |
| bucket=bucket, |
| src=source, |
| dst=self._absolute_gcs_path(subpath, namespace), |
| link_name=subpath, |
| metadata=metadata, |
| name='upload %s to %s' % (subpath, bucket)) |
| |
| def directory_to_gcs(self, source, bucket, subpath, namespace=None): |
| """Uploads a directory to GCS under a subpath specific to the given build. |
| |
| Will upload the directory to: |
| gs://fuchsia-infra/<bucket>/<build id>/<subpath> |
| |
| This operation preserves directory structure. It is also idempotent, |
| because it uses `gsutil rsync` as opposed to `gsutil cp`. |
| |
| Args: |
| path (Path): A path to the file to upload. |
| bucket (str): The name of the GCS bucket to upload to. |
| subpath (str): The end of the destination path within the |
| build-specific subdirectory. |
| namespace (str or None): A unique ID for this build. Defaults to the current |
| build ID or led run ID. |
| Returns: |
| The upload step. |
| """ |
| return self.m.gsutil.rsync( |
| bucket=bucket, |
| src=source, |
| dst=self._absolute_gcs_path(subpath, namespace), |
| link_name=subpath, |
| recursive=True, |
| multithreaded=True, |
| name='upload %s to %s' % (subpath, bucket)) |
| |
| def _absolute_gcs_path(self, relative_path, namespace=None): |
| namespace = namespace or self.m.buildbucket_util.id |
| return 'builds/%s/%s' % (namespace, relative_path) |
| |
| def test_outputs_to_catapult(self, output_dir): |
| """Uploads test outputs to Catapult from a specified directory. |
| |
| Uploads only Catapult HistogramSet JSON files with the expected catapult |
| extension. |
| |
| Args: |
| output_dir (Path): A directory containing catapult files produced by |
| the tests. |
| """ |
| for filepath in self.m.file.glob_paths( |
| 'locate catapult files', |
| output_dir, |
| pattern=self.m.catapult.FILE_PATTERN, |
| test_data=['benchmark.catapult_json'], |
| ): |
| self._upload_file_to_catapult(filepath) |
| |
| def _upload_file_to_catapult(self, filepath): |
| basename = self.m.path.basename(filepath) |
| with self.m.step.nest('upload %s' % basename): |
| self.m.catapult.upload(input_file=filepath, timeout='60s') |
| |
| def cipd_package(self, |
| pkg_name, |
| pkg_root, |
| pkg_paths, |
| search_tag, |
| repository=None, |
| install_mode='copy', |
| extra_tags=None): |
| """Creates and uploads a CIPD package containing the tool at pkg_dir. |
| |
| The tool is published to CIPD under the path pkg_name. |
| |
| Args: |
| pkg_name (basestr): The CIPD package to publish to. |
| pkg_root (Path): The absolute path to the parent directory of the package. |
| pkg_paths (list(UploadPath)): A list of UploadPath objects which specify |
| the paths to directories or files to upload. |
| search_tag (dict): The tag to search for the CIPD pin with. This should |
| contain one element and be either `git_revision` or `version`. |
| repository (str or None): The git repository where code for the package |
| lives. |
| install_mode (str or None): The install mode for the package. |
| extra_tags (dict or None): Extra tags to add to the package. |
| Returns: |
| The CIPDApi.Pin instance_id. |
| """ |
| pkg_def = self.m.cipd.PackageDefinition( |
| package_name=str(pkg_name), |
| package_root=pkg_root, |
| install_mode=install_mode) |
| for path in pkg_paths: |
| path.add_to_package(pkg_def) |
| |
| # E.g., "fuchsia/go/linux-amd64" -> "go". |
| name = str(pkg_name.split('/')[-2]) |
| pkg_def.add_version_file('.versions/%s.cipd_version' % name) |
| cipd_pkg_file = self.m.path['cleanup'].join('%s.cipd' % name) |
| |
| with self.m.step.nest('cipd') as step: |
| self.m.cipd.build_from_pkg( |
| pkg_def=pkg_def, |
| output_package=cipd_pkg_file, |
| ) |
| |
| assert len(search_tag) == 1, ( |
| 'search_tag must contain one (key: value) pair to search for.') |
| search_tag_key = search_tag.keys()[0] |
| search_tag_value = search_tag[search_tag_key] |
| cipd_pins = self.m.cipd.search( |
| pkg_name, '%s:%s' % (search_tag_key, search_tag_value)) |
| if cipd_pins: |
| self.m.step('Package is up-to-date', cmd=None) |
| assert len(cipd_pins) == 1, '%s has too many pins' % pkg_name |
| return cipd_pins[0].instance_id |
| |
| tags = {} |
| tags.update(search_tag) |
| if repository: |
| tags['git_repository'] = repository |
| if extra_tags: |
| tags.update(extra_tags) |
| |
| cipd_pin = self.m.cipd.register( |
| package_name=pkg_name, |
| package_path=cipd_pkg_file, |
| refs=['latest'], |
| tags=tags, |
| ) |
| |
| step.presentation.properties.update(cipd_pin._asdict()) |
| |
| return cipd_pin.instance_id |
| |
| def upload_isolated(self, staging_dir, upload_paths=None): |
| """Returns the hash of the isolated tree created from the provided |
| staging_dir. |
| """ |
| isolated = self.m.isolated.isolated(staging_dir) |
| upload_paths = upload_paths or [DirectoryPath(staging_dir)] |
| for path in upload_paths: |
| path.add_to_package(isolated) |
| |
| with self.m.step.nest('isolated') as step: |
| isolated_hash = isolated.archive('isolate') |
| |
| step.presentation.properties['isolated'] = isolated_hash |
| |
| return isolated_hash |