blob: 70251b3427fafaad0c19e6da886622d73177ed8c [file] [log] [blame]
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for generating docs for upload to Firebase."""
from recipe_engine.config import List
from recipe_engine.recipe_api import Property
import hashlib
DEPS = [
"fuchsia/build",
"fuchsia/buildbucket_util",
"fuchsia/checkout",
"fuchsia/git_checkout",
"recipe_engine/cipd",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/json",
"recipe_engine/path",
"recipe_engine/properties",
"recipe_engine/step",
]
GENERATORS = ["rustdoc", "clangdoc"]
REFERENCE_DOCS_REPOSITORY = "https://fuchsia.googlesource.com/reference-docs"
API_DOCS_RESOURCES_REPOSITORY = "https://fuchsia.googlesource.com/api-docs-resources"
PROPERTIES = {
"dry_run": Property(
kind=bool, help="Whether to upload docs to firebase.", default=False
),
# TODO(olivernewman): Convert to protobuf properties and make a Generator
# enum.
"generators": Property(
kind=List(str), help="Packages to build", default=GENERATORS
),
"manifest": Property(kind=str, help="Jiri manifest to use"),
"output_name": Property(
kind=str,
help="Name of top-level output reference-docs directory",
default="all",
),
"remote": Property(
kind=str,
help="Remote manifest repository",
default="https://fuchsia.googlesource.com/integration",
),
"fint_params_path": Property(kind=str, help="Path to a fint params file"),
}
FIDLDOC_COMMIT_MESSAGE = "[fidldoc] Updating fidl reference docs"
def RunSteps(api, dry_run, generators, manifest, output_name, remote, fint_params_path):
cipd_dir = api.path["start_dir"].join("cipd")
node_modules_dir = cipd_dir.join("node_modules")
with api.step.nest("ensure_packages"), api.context(infra_steps=True):
pkgs = api.cipd.EnsureFile()
pkgs.add_package("infra/nodejs/nodejs/${platform}", "latest")
api.cipd.ensure(cipd_dir, pkgs)
# firebase-tools expects to live in the node_modules subdir of where nodejs is installed.
pkgs = api.cipd.EnsureFile()
pkgs.add_package("infra/npm/firebase-tools", "latest")
api.cipd.ensure(node_modules_dir, pkgs)
with api.step.nest("checkout api docs"):
resources_dir, _ = api.git_checkout(API_DOCS_RESOURCES_REPOSITORY)
checkout = api.checkout.fuchsia_with_options(manifest=manifest, remote=remote)
build_results = api.build.with_options(
checkout=checkout, fint_params_path=fint_params_path
)
docs_dir = checkout.root_dir.join("firebase")
api.file.rmtree("remove old docs", docs_dir)
api.file.copytree("copy resources", resources_dir, docs_dir)
if "clangdoc" in generators:
with api.step.nest("clangdoc"):
gen_clang_doc(api, docs_dir, build_results)
if "rustdoc" in generators:
with api.step.nest("rustdoc"):
gen_rustdoc(api, checkout.root_dir, docs_dir, build_results.build_dir)
# Only deploy if running the default 'all' one, to avoid deploying partial docs to firebase.
if not dry_run and output_name == "all":
with api.context(cwd=docs_dir, env={"PATH": cipd_dir.join("bin")}):
api.step(
"firebase deploy",
[
node_modules_dir.join(".bin", "firebase"),
"deploy",
"--only",
"hosting",
"--debug",
],
)
def gen_rustdoc(api, checkout_root, docs_dir, build_dir):
"""Generate rust output.
The rustdoc script runs on GN targets. We find the Rust GN targets by finding
all targets that generate a Cargo.toml file, and use those.
Args:
docs_dir (Path): The output directory for documentation.
build_dir (Path): The build directory.
"""
project = api.file.read_json("read project.json", build_dir.join("project.json"))
for target in ["lib", "bin"]:
hash_input = "src/target:%s" % target
hsh = hashlib.sha1(hash_input.encode()).hexdigest()
cargo_path = build_dir.join("cargo", hsh, "Cargo.toml")
api.path.mock_add_paths(cargo_path)
skipped = []
for target, info in sorted(project["targets"].items()):
# Look for Rust targets only.
if "crate_root" not in info:
continue
# don't build docs for wasm targets, it's a very small proportion of our crates and
# they break sometimes
# TODO(fxbug.dev/97619): figure out if the toolchain is supposed to be
# //build/toolchain:unknown_wasm32 or
# //build/toolchain:unknown_wasm32-shared and add back the ")"
if "(//build/toolchain:unknown_wasm32" in target:
continue
# TODO(tmandry): Find a way that doesn't involve duplicating this logic.
hsh = hashlib.sha1(target[2:].encode()).hexdigest()
cargo_toml_path = build_dir.join("cargo", hsh, "Cargo.toml")
if not api.path.exists(cargo_toml_path):
skipped.append(target)
continue
try:
with api.context(
env={
"FUCHSIA_DIR": checkout_root,
"FUCHSIA_BUILD_DIR": build_dir,
}
):
api.step(
"rustdoc %s" % target,
[
checkout_root.join(
"tools", "devshell", "contrib", "lib", "rust", "rustdoc.py"
),
cargo_toml_path,
"--no-deps",
"--out-dir",
build_dir,
],
)
except api.step.StepFailure:
pass
# Move the output to the docs directory.
step_result = api.step(
"move output to docs",
[
"mv",
checkout_root.join("out", "cargo_target", "x86_64-fuchsia", "doc"),
docs_dir.join("public", "rust"),
],
)
step_result.presentation.logs["skipped"] = skipped
def gen_clang_doc(api, docs_dir, build_results):
"""Generate clang-doc output.
clang-doc runs on the translation units specified in the compilation database
file. This file will be generated by gn_results after filtering unwanted
directories or files. clang-doc will output documentation files directly in
docs_dir.
Args:
docs_dir (Path): The output directory for documentation.
build_results (FuchsiaBuildResults): Result of a fuchsia build.
"""
# Assumes that the current builder is configured to generate compdbs.
compdb = api.file.read_json(
"read compdb",
build_results.compdb_path,
test_data=[
{
"directory": "[START_DIR]/out/not-default",
"file": "../../foo.cpp",
"command": "clang++ foo.cpp",
},
{
"directory": "[START_DIR]/out/not-default",
"file": "../../third_party/foo.cpp",
"command": "clang++ third_party/foo.cpp",
},
{
"directory": "[START_DIR]/out/not-default",
"file": "../../out/not-default/foo.cpp",
"command": "clang++ foo.cpp",
},
{
"directory": "[START_DIR]/out/not-default",
"file": "/bin/ln",
"command": "/bin/ln -s foo bar",
},
],
include_log=False,
)
def keep_in_compdb(entry):
# Filenames are relative to the build directory, and the build
# directory is absolute.
build_dir = api.path.abs_to_path(entry["directory"])
abspath = api.path.realpath(api.path.join(build_dir, entry["file"]))
try:
path = api.path.abs_to_path(abspath)
except ValueError:
# This happens if `abspath` is not rooted in one of the path
# module's known paths, which is the case for paths in `/bin`,
# for example. That also implies that the path isn't in the
# build directory, so skip it.
return False
if build_dir.is_parent_of(path):
return False
if "third_party" in entry["file"].split(api.path.sep):
return False
return True
compdb_filtered = [entry for entry in compdb if keep_in_compdb(entry)]
with api.context(cwd=build_results.checkout.root_dir):
api.step(
"run clang-doc",
[
build_results.tool("clang-doc"),
"--output",
docs_dir.join("public", "cpp"),
"--public",
"--format=html",
api.json.input(compdb_filtered),
],
)
def GenTests(api):
base = api.properties(
fint_params_path="specs/firebase-docs.textproto", manifest="flower"
) + api.step_data(
"rustdoc.read project.json",
api.file.read_json(
json_content={
"targets": {
"//src/target:lib": {
"type": "rust_library",
"crate_root": "src/lib.rs",
},
"//src/target:lib(//build/toolchain:unknown_wasm32)": {
"type": "rust_library",
"crate_root": "src/lib.rs",
},
"//src/target:bin": {
"type": "executable",
"crate_root": "src/main.rs",
},
"//src/not_target:not_rust": {"type": "staticlib"},
"//src/target:missing_cargo": {
"type": "rust_library",
"crate_root": "src/lib.rs",
},
"//src/not_target:no_type": {},
}
}
),
)
yield (api.buildbucket_util.test("firebase_docs") + base)
yield (
api.buildbucket_util.test("firebase_docs_failing_rustdoc")
+ base
+ api.step_data("rustdoc.rustdoc //src/target:lib", retcode=1)
)