| # Copyright 2022 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Generic continuous integration recipe for projects in the Fuchsia ecosystem. |
| |
| This recipe supports projects that need the following steps in their CI jobs: |
| |
| 1. Checkout code. |
| 2. Download additional pinned dependencies not checked into version control. |
| Note that this additional download step will be subject to removal once it |
| can be handled by Git itself, per |
| http://go/git-superproject-source-control-for-fuchsia. |
| 3. Build. |
| 4. Upload build artifacts (images, blobs, drivers, etc.). |
| 5. TODO(olivernewman): Run tests in parallel shards on separate machines. |
| 6. Trigger tests in downstream projects, passing through the uploaded build |
| artifacts. |
| """ |
| |
| from google.protobuf import json_format as jsonpb |
| |
| from PB.recipes.fuchsia.build_test_upload import InputProperties |
| from PB.infra.build_test_upload.upload_manifest import CIPDUploadManifest |
| |
| PYTHON_VERSION_COMPATIBILITY = "PY3" |
| |
| DEPS = [ |
| "fuchsia/artifacts", |
| "fuchsia/buildbucket_util", |
| "fuchsia/cas_util", |
| "fuchsia/checkout", |
| "fuchsia/cipd_util", |
| "fuchsia/dpi", |
| "fuchsia/fxt", |
| "fuchsia/git", |
| "fuchsia/git_checkout", |
| "recipe_engine/context", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/properties", |
| "recipe_engine/proto", |
| "recipe_engine/step", |
| ] |
| |
| PROPERTIES = InputProperties |
| |
| |
| def RunSteps(api, props): |
| # Projects should not assume they'll always be checked out at the same |
| # absolute path, so do the checkout in a random temporary directory. |
| checkout_dir = api.path.mkdtemp("checkout") |
| |
| if props.jiri_manifest: |
| api.checkout.with_options( |
| path=checkout_dir, |
| manifest=props.jiri_manifest, |
| remote=props.remote, |
| project=props.jiri_project, |
| ) |
| with api.context(cwd=checkout_dir): |
| git_revision = api.git.rev_parse("HEAD", step_name="resolve HEAD") |
| else: |
| _, git_revision = api.git_checkout(props.remote, path=checkout_dir) |
| |
| # Compute a semantic version that a project can use as a placeholder value |
| # for operations that require a version number, if the project does not have |
| # a formal release process. |
| # |
| # NOTE: This assumes that the repo at the checkout root is the source of |
| # truth for the entire checkout, which may not be the case for some |
| # Jiri-based checkouts where the source of truth is a nested repository. |
| with api.context(cwd=checkout_dir): |
| revision_count = api.git.rev_list_count( |
| "HEAD", |
| step_name="resolve placeholder version", |
| test_data="1", |
| ) |
| placeholder_version = "0.0.0.%s" % revision_count |
| |
| upload_namespace = api.buildbucket_util.id |
| |
| # Any of these variables can be used as a format string in a command's |
| # configured arguments and it will be substituted for the actual value. For |
| # example, "--build-dir={build_dir}" in a command's args will be |
| # replaced with "--build-dir=/actual/path/to/build/dir". |
| # |
| # Where possible, this approach is preferred over requiring commands to |
| # support specific flags as it lets each project's commands be fairly |
| # infrastructure-agnostic and reduces coupling between the project and the |
| # infrastructure. |
| build_dir = api.path.mkdtemp("build") |
| variables = { |
| "build_dir": build_dir, |
| "upload_namespace": upload_namespace, |
| "placeholder_version": placeholder_version, |
| } |
| |
| def run_command(step_name, command_pb, **kwargs): |
| # Not all projects need all possible commands, so skip any un-configured |
| # commands. |
| if not command_pb.path: |
| return None |
| cmd = [checkout_dir.join(*command_pb.path.split("/"))] |
| for arg in command_pb.args: |
| # Perform variable substitutions. |
| cmd.append(arg.format(**variables)) |
| return api.step(step_name, cmd, **kwargs) |
| |
| with api.context(cwd=checkout_dir): |
| run_command("download extra dependencies", props.download_command) |
| |
| run_command("build", props.build_command) |
| |
| if props.gcs_manifest_command.path: |
| with api.step.nest("gcs upload"): |
| gcs_manifest = run_command( |
| "generate gcs manifest", |
| props.gcs_manifest_command, |
| stdout=api.json.output(), |
| ).stdout |
| api.artifacts.gcs_bucket = props.gcs_bucket |
| api.artifacts.namespace = upload_namespace |
| api.artifacts.upload_from_manifest( |
| "upload from manifest", |
| api.json.input(gcs_manifest), |
| sign_artifacts=props.sign_artifacts, |
| ) |
| |
| for cipd_cmd in props.cipd_manifest_commands: |
| with api.step.nest("cipd upload %s" % cipd_cmd.package): |
| cipd_manifest = run_command( |
| "generate cipd manifest", |
| cipd_cmd.command, |
| stdout=api.proto.output(CIPDUploadManifest, codec="JSONPB"), |
| ).stdout |
| cipd_upload_from_manifest( |
| api, |
| cipd_cmd.package, |
| cipd_manifest, |
| build_dir=build_dir, |
| repository=props.remote, |
| git_revision=git_revision, |
| ) |
| |
| if props.mos_upload_options.repo_hostname: |
| api.dpi.upload( |
| "upload to MOS-TUF repos", |
| build_dir=build_dir, |
| options=props.mos_upload_options, |
| ) |
| |
| if props.gcs_bucket and props.HasField("fxt_options"): |
| api.fxt.orchestrate_fxt_tests( |
| bucket=props.gcs_bucket, |
| namespace=upload_namespace, |
| options=props.fxt_options, |
| ) |
| |
| |
| def cipd_upload_from_manifest( |
| api, cipd_package, cipd_manifest, build_dir, repository, git_revision |
| ): |
| tree = api.cas_util.hardlink_tree(api.path.mkdtemp("cipd")) |
| for f in cipd_manifest.files: |
| # We should generally not be uploading artifacts from outside the build |
| # directory, in order to ensure everything flows through the |
| # checkout->build->upload pipeline. So disallow uploading files from |
| # outside the build directory. |
| # |
| # Links to files outside the build directory are still allowed. |
| if ".." in f.source.split("/"): |
| raise api.step.StepFailure( |
| "CIPD upload file source must within the build directory: %s" % f.source |
| ) |
| abs_source = build_dir.join(*f.source.split("/")) |
| abs_dest = tree.root.join(*f.dest.split("/")) |
| tree.register_link(abs_source, linkname=abs_dest) |
| |
| tree.create_links("create hardlinks") |
| |
| # Tryjobs should never upload to CIPD. We don't want to completely skip |
| # reading the manifest in case of a tryjob because it's useful to check that |
| # the manifest is valid in CQ. |
| if api.buildbucket_util.is_tryjob: |
| api.step.empty("dry run, skipping upload") |
| return |
| |
| api.cipd_util.upload_package( |
| cipd_package, |
| tree.root, |
| search_tag={"git_revision": git_revision}, |
| repository=repository, |
| ) |
| |
| |
| def GenTests(api): |
| default_remote = "https://fuchsia.googlesource.com/foo" |
| |
| def properties(**kwargs): |
| props = { |
| "remote": default_remote, |
| "build_command": { |
| "path": "scripts/build.sh", |
| "args": [ |
| "--build-dir", |
| "{build_dir}", |
| "--version", |
| "{placeholder_version}", |
| ], |
| }, |
| } |
| props.update(kwargs) |
| return api.properties(jsonpb.ParseDict(props, InputProperties())) |
| |
| def cipd_manifest_data(files=None): |
| if not files: |
| files = {"local/foo/bar": "package/foobar"} |
| return api.proto.output( |
| CIPDUploadManifest( |
| files=[ |
| CIPDUploadManifest.FileToUpload(source=k, dest=v) |
| for k, v in files.items() |
| ] |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("fxt_tests", git_repo=default_remote) |
| + properties( |
| gcs_manifest_command={"path": "emit_gcs_manifest.sh"}, |
| gcs_bucket="foo-artifacts", |
| sign_artifacts=True, |
| fxt_options={"image_name": "image-name"}, |
| ) |
| + api.step_data( |
| "gcs upload.generate gcs manifest", |
| stdout=api.json.output([{"source": "foo.txt", "destination": "foo.txt"}]), |
| ) |
| + api.fxt.orchestrate_fxt_tests() |
| ) |
| |
| yield ( |
| api.buildbucket_util.test("jiri__mos__cipd", git_repo=default_remote) |
| + properties( |
| jiri_manifest="path/to/manifest", |
| download_command={"path": "scripts/download_deps.sh"}, |
| mos_upload_options={ |
| "repo_hostname": "test.fuchsia-update.googleusercontent.com", |
| "gcs_bucket": "discover-cloud.appspot.com", |
| "manifest_path": "path/to/mos/manifest", |
| }, |
| cipd_manifest_commands=[ |
| { |
| "command": {"path": "scripts/emit_foo_cipd_manifest.sh"}, |
| "package": "fuchsia/tools/foo", |
| } |
| ], |
| ) |
| + api.step_data( |
| "cipd upload fuchsia/tools/foo.generate cipd manifest", |
| stdout=cipd_manifest_data(), |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test( |
| "invalid_cipd_upload", |
| git_repo=default_remote, |
| # The manifest should be validated even in presubmit. |
| tryjob=True, |
| status="failure", |
| ) |
| + properties( |
| cipd_manifest_commands=[ |
| { |
| "command": {"path": "scripts/emit_cipd_manifest.sh"}, |
| "package": "fuchsia/tools/foo", |
| } |
| ], |
| ) |
| + api.step_data( |
| "cipd upload fuchsia/tools/foo.generate cipd manifest", |
| stdout=cipd_manifest_data( |
| # Paths outside the build directory are disallowed. |
| {"foo/../../checkout-path": "package_path/foo/bar"} |
| ), |
| ) |
| ) |
| |
| yield ( |
| api.buildbucket_util.test( |
| "cipd_upload_tryjob", git_repo=default_remote, tryjob=True |
| ) |
| + properties( |
| cipd_manifest_commands=[ |
| { |
| "command": {"path": "scripts/emit_cipd_manifest.sh"}, |
| "package": "fuchsia/tools/foo", |
| } |
| ], |
| ) |
| + api.step_data( |
| "cipd upload fuchsia/tools/foo.generate cipd manifest", |
| stdout=cipd_manifest_data(), |
| ) |
| ) |