| # Copyright 2018 The Fuchsia Authors. All rights reserved. |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| """Recipe for generating docs for upload to Firebase.""" |
| |
| from recipe_engine.config import List |
| from recipe_engine.recipe_api import Property |
| |
| import hashlib |
| |
| DEPS = [ |
| "fuchsia/build", |
| "fuchsia/checkout", |
| "fuchsia/git", |
| "fuchsia/status_check", |
| "recipe_engine/buildbucket", |
| "recipe_engine/cipd", |
| "recipe_engine/context", |
| "recipe_engine/file", |
| "recipe_engine/json", |
| "recipe_engine/path", |
| "recipe_engine/properties", |
| "recipe_engine/python", |
| "recipe_engine/raw_io", |
| "recipe_engine/step", |
| ] |
| |
| GENERATORS = ["dartdoc", "rustdoc", "fidldoc", "clangdoc"] |
| |
| REFERENCE_DOCS_REPOSITORY = "https://fuchsia.googlesource.com/reference-docs" |
| API_DOCS_RESOURCES_REPOSITORY = "https://fuchsia.googlesource.com/api-docs-resources" |
| |
| PROPERTIES = { |
| "dry_run": Property( |
| kind=bool, help="Whether to upload docs to firebase.", default=False |
| ), |
| # TODO(olivernewman): Convert to protobuf properties and make a Generator |
| # enum. |
| "generators": Property( |
| kind=List(basestring), help="Packages to build", default=GENERATORS |
| ), |
| "manifest": Property(kind=str, help="Jiri manifest to use", default="topaz/topaz"), |
| "output_name": Property( |
| kind=str, |
| help="Name of top-level output reference-docs directory", |
| default="all", |
| ), |
| "remote": Property( |
| kind=str, |
| help="Remote manifest repository", |
| default="https://fuchsia.googlesource.com/integration", |
| ), |
| "fint_params_path": Property(kind=str, help="Path to a fint params file"), |
| } |
| |
| DARTDOC_PUBSPEC = """name: Fuchsia |
| homepage: https://fuchsia-docs.firebaseapp.com/dart |
| description: API documentation for fuchsia |
| dependencies: |
| """ |
| |
| DARTDOC_COMMIT_MESSAGE = "[dartdoc] Updating dart reference docs" |
| FIDLDOC_COMMIT_MESSAGE = "[fidldoc] Updating fidl reference docs" |
| |
| |
| def gen_dartdoc(api, out_dir, docs_dir, cipd_dir, output_name, dry_run): |
| """Generate dartdoc output. |
| |
| Dartdoc runs on a single package, but has the capability to generate docs for all |
| dependencies. Thus, to generate Dart documentation for Fuchsia, we first generate |
| a 'fake' package that lists the libraries we want documented. We then run `pub` |
| over that new package to fetch the dependencies, and finally `dartdoc` to generate |
| documentation for it all. |
| |
| Args: |
| out_dir (Path) - The output directory for generated files. |
| docs_dir (Path) - The output directory for documentation. |
| cipd_dir (Path) - The cipd directory. |
| output_name (str) - Output name of the directory. |
| dry_run (str) - Run but don't push. |
| """ |
| dart_packages_path = api.path["start_dir"].join("sdk", "dart") |
| api.path.mock_add_paths(dart_packages_path) |
| # If either dartdoc or dart packages path doesn't exist, we didn't checkout the |
| # repository of interest and so we won't generate dart docs on this run. |
| if not api.path.exists(dart_packages_path): |
| return # pragma: no cover |
| |
| # Make a temporary docs dir to be pushed to firebase. |
| api.file.ensure_directory("create lib dir", out_dir.join("lib")) |
| |
| # Build .packages and lib.dart importing all packages. |
| dart_imports_content = "library Fuchsia;\n" |
| dart_pubspec_content = DARTDOC_PUBSPEC |
| |
| # Gather documentable dart packages. |
| dart_packages = [ |
| api.path.basename(p) |
| for p in api.file.listdir( |
| "list dart packages", |
| dart_packages_path, |
| test_data=("fuchsia", "sdk", "other"), |
| ) |
| ] |
| |
| api.path.mock_add_paths(dart_packages_path.join("fuchsia", "lib")) |
| api.path.mock_add_paths(dart_packages_path.join("fuchsia", "pubspec.yaml")) |
| api.path.mock_add_paths(dart_packages_path.join("sdk", "lib")) |
| |
| for package in dart_packages: |
| if not api.path.exists(dart_packages_path.join(package, "lib")): |
| continue |
| |
| pubspec_path = dart_packages_path.join(package, "pubspec.yaml") |
| if not api.path.exists(pubspec_path): |
| continue |
| |
| pubspec = api.python( |
| "load %s pubspec.yaml" % package, |
| api.resource("parse_yaml.py"), |
| args=[pubspec_path], |
| stdout=api.json.output(), |
| ).stdout |
| |
| if not pubspec or pubspec["name"] != package: |
| continue # pragma: no cover |
| |
| dart_pubspec_content += " %s:\n path: %s/\n" % ( |
| package, |
| dart_packages_path.join(package), |
| ) |
| package_imports = [ |
| api.path.basename(i) |
| for i in api.file.listdir( |
| "list %s packages" % package, dart_packages_path.join(package, "lib") |
| ) |
| if api.path.basename(i).endswith(".dart") |
| ] |
| for i in package_imports: |
| dart_imports_content += "import 'package:%s/%s';\n" % (package, i) |
| |
| # Build package pubspec.yaml depending on all desired source packages. |
| api.file.write_text( |
| "write pubspec.yaml", out_dir.join("pubspec.yaml"), dart_pubspec_content |
| ) |
| api.file.write_text( |
| "write lib.dart", out_dir.join("lib", "lib.dart"), dart_imports_content |
| ) |
| |
| # Run pub over this package to fetch deps. |
| with api.context(cwd=out_dir): |
| api.step("pub", [cipd_dir.join("dart-sdk", "bin", "pub"), "get"]) |
| |
| # Record the version in case changes to dartdoc cause a regression. |
| with api.context(cwd=out_dir): |
| api.step( |
| "dartdoc version", |
| [cipd_dir.join("dart-sdk", "bin", "dartdoc"), "--version"], |
| ) |
| |
| excluded_packages = [ |
| "Dart", # Dart SDK |
| "logging", # used by Fuchsia logger |
| ] |
| # Run dartdoc over this package and generate HTML for firebase hosting. |
| # TODO(jdkoren): remove once Fuchsia Devsite has suitable Dart reference docs. |
| with api.context(cwd=out_dir): |
| api.step( |
| "dartdoc", |
| [ |
| cipd_dir.join("dart-sdk", "bin", "dartdoc"), |
| "--auto-include-dependencies", |
| "--exclude-packages", |
| ",".join(excluded_packages), |
| "--output", |
| docs_dir.join("public", "dart"), |
| ], |
| ) |
| |
| # Run dartdoc over this package and generate Markdown for Fuchsia Devsite. |
| # Also generate a TOC from the generated index. |
| md_docs_dir = api.path["start_dir"].join("dartdoc_out_md") |
| with api.context(cwd=out_dir): |
| api.step( |
| "dartdoc devsite", |
| [ |
| cipd_dir.join("dart-sdk", "bin", "dartdoc"), |
| "--auto-include-dependencies", |
| "--exclude-packages", |
| ",".join(excluded_packages), |
| "--output", |
| md_docs_dir, |
| "--format", |
| "md", |
| ], |
| ) |
| api.python( |
| "generate _toc.yaml file", |
| script=api.resource("generate_dart_toc.py"), |
| args=[ |
| "--index-file", |
| md_docs_dir.join("index.json"), |
| "--outfile", |
| md_docs_dir.join("_toc.yaml"), |
| ], |
| ) |
| |
| # Markdown docs are committed to reference docs repo. Checkout the repo here |
| # to minimize the likelihood of a race with another bot trying to commit to |
| # the same repo between this bot's checkout and commit. |
| reference_docs_dir = api.path["start_dir"].join("reference-docs") |
| with api.step.nest("checkout reference-docs"): |
| api.git.checkout(REFERENCE_DOCS_REPOSITORY, path=reference_docs_dir) |
| with api.context(cwd=reference_docs_dir): |
| reference_docs_dart_dir = reference_docs_dir.join(output_name, "dart") |
| # Clear the prior dart docs and move results in. |
| api.file.rmtree( |
| name="clear reference-docs/out/dart", source=reference_docs_dart_dir, |
| ) |
| api.file.move( |
| name="move docs to reference-docs/out/dart", |
| source=md_docs_dir, |
| dest=reference_docs_dart_dir, |
| ) |
| # Remove artifacts of dartdoc that will not be used. |
| api.file.remove( |
| name="remove reference-docs/out/dart/index.json", |
| source=reference_docs_dart_dir.join("index.json"), |
| ) |
| api.file.remove( |
| name="remove reference-docs/out/dart/categories.json", |
| source=reference_docs_dart_dir.join("categories.json"), |
| ) |
| api.file.remove( |
| name="remove reference-docs/out/dart/__404error.md", |
| source=reference_docs_dart_dir.join("__404error.md"), |
| ) |
| |
| # Add all modified files to git cache. |
| api.git.add(add_all=True) |
| # Only commit and push if there's a diff, otherwise commit fails. |
| if api.git.diff( |
| ref_base=None, cached=True, exit_code=True, ok_ret="any" |
| ).retcode: |
| api.git.commit(DARTDOC_COMMIT_MESSAGE) |
| if not dry_run: |
| api.git.push(refs=["HEAD:master"]) |
| |
| |
| def gen_rustdoc(api, docs_dir, build_dir): |
| """Generate rust output. |
| |
| The rustdoc script runs on GN targets. We find the Rust GN targets by finding |
| all targets that generate a Cargo.toml file, and use those. |
| |
| Args: |
| docs_dir (Path): The output directory for documentation. |
| build_dir (Path): The build directory. |
| """ |
| |
| project = api.file.read_json("read project.json", build_dir.join("project.json")) |
| |
| for target in ["lib", "bin"]: |
| hsh = hashlib.sha1("topaz/target:%s" % target).hexdigest() |
| cargo_path = build_dir.join("cargo", hsh, "Cargo.toml") |
| api.path.mock_add_paths(cargo_path) |
| |
| skipped = [] |
| for target, info in project["targets"].iteritems(): |
| # Look for Rust targets only. |
| if "crate_root" not in info: |
| continue |
| |
| # TODO(tmandry): Find a way that doesn't involve duplicating this logic. |
| hsh = hashlib.sha1(target[2:]).hexdigest() |
| cargo_toml_path = build_dir.join("cargo", hsh, "Cargo.toml") |
| if not api.path.exists(cargo_toml_path): |
| skipped.append(target) |
| continue |
| |
| try: |
| with api.context( |
| env={ |
| "FUCHSIA_DIR": api.path["start_dir"], |
| "FUCHSIA_BUILD_DIR": build_dir, |
| } |
| ): |
| api.step( |
| "rustdoc %s" % target, |
| [ |
| api.path["start_dir"].join( |
| "tools", "devshell", "contrib", "lib", "rust", "rustdoc.py" |
| ), |
| cargo_toml_path, |
| "--no-deps", |
| "--out-dir", |
| build_dir, |
| ], |
| ) |
| except api.step.StepFailure: |
| pass |
| |
| # Move the output to the docs directory. |
| step_result = api.step( |
| "move output to docs", |
| [ |
| "mv", |
| api.path["start_dir"].join("out", "cargo_target", "x86_64-fuchsia", "doc"), |
| docs_dir.join("public", "rust"), |
| ], |
| ) |
| |
| step_result.presentation.logs["skipped"] = skipped |
| |
| |
| def gen_fidldoc(api, build_dir, output_name, dry_run): |
| """Generate fidl output. |
| |
| The fidldoc tool runs on the all_fidl_json.txt file. Pushes the resulting |
| docs to the given docs repository. |
| |
| Args: |
| docs_dir (Path) - The output directory for documentation (repository). |
| build_dir (Path) - The build directory. |
| output_name (str) - Output name of the directory. |
| dry_run (str) - Run but don't push. |
| """ |
| out_dir = api.path["start_dir"].join("fidldoc_out") |
| all_fidl_json_txt = build_dir.join("all_fidl_json.txt") |
| all_fidl_json = api.file.read_text( |
| "read all_fidl_json.txt", all_fidl_json_txt |
| ).splitlines() |
| |
| with api.context(cwd=build_dir): |
| # Cannot use tool_paths.json here, since it maps to host_x64, which |
| # doesn't work for fidldoc because of the accompanying fidldoc.config.json |
| # that is only in host-tools. |
| fidldoc_path = build_dir.join("host-tools/fidldoc") |
| api.step( |
| "run fidldoc", |
| [fidldoc_path, "--verbose", "--path", "/reference/fidl/", "--out", out_dir] |
| + all_fidl_json, |
| ) |
| |
| # Push resulting docs to the reference-docs repository. |
| |
| # The fidldocs get checked into their own repository. Checking it out here reduces the likelihood |
| # of a race with another bot trying to commit in between this bot's checkout and commit steps. |
| reference_docs_dir = api.path["start_dir"].join("reference-docs") |
| with api.step.nest("checkout reference-docs"): |
| api.git.checkout(REFERENCE_DOCS_REPOSITORY, path=reference_docs_dir) |
| |
| with api.context(cwd=reference_docs_dir): |
| # Clear the repository and move results in. |
| api.file.rmtree( |
| name="clear reference-docs/out/fidl", |
| source=reference_docs_dir.join(output_name, "fidl"), |
| ) |
| api.file.move( |
| name="Move docs to reference-docs/out/fidl", |
| source=out_dir, |
| dest=reference_docs_dir.join(output_name, "fidl"), |
| ) |
| # Add all modified files to git cache. |
| api.git.add(add_all=True) |
| # Only commit and push if there's a diff, otherwise commit fails. |
| if api.git.diff( |
| ref_base=None, cached=True, exit_code=True, ok_ret="any" |
| ).retcode: |
| api.git.commit(FIDLDOC_COMMIT_MESSAGE) |
| if not dry_run: |
| api.git.push(refs=["HEAD:master"]) |
| |
| |
| def gen_clang_doc(api, docs_dir, build_dir, build_results): |
| """Generate clang-doc output. |
| |
| clang-doc runs on the translation units specified in the compilation database |
| file. This file will be generated by gn_results after filtering unwanted |
| directories or files. clang-doc will output documentation files directly in |
| docs_dir. |
| |
| Args: |
| docs_dir (Path): The output directory for documentation. |
| build_dir (Path): The build directory. |
| build_results (FuchsiaBuildResults): Result of a fuchsia build. |
| """ |
| with api.step.nest("filter compile commands"): |
| compile_commands = build_results.filtered_compdb(filters=["third_party"]) |
| |
| with api.context(cwd=build_dir): |
| api.step( |
| "run clang-doc", |
| [ |
| build_results.tool("clang-doc"), |
| "--output", |
| docs_dir.join("public", "cpp"), |
| "--public", |
| "--format=html", |
| compile_commands, |
| ], |
| ) |
| |
| |
| def RunSteps(api, dry_run, generators, manifest, output_name, remote, fint_params_path): |
| cipd_dir = api.path["start_dir"].join("cipd") |
| node_modules_dir = cipd_dir.join("node_modules") |
| with api.step.nest("ensure_packages"): |
| with api.context(infra_steps=True): |
| pkgs = api.cipd.EnsureFile() |
| pkgs.add_package("infra/nodejs/nodejs/${platform}", "latest") |
| if "dartdoc" in generators: |
| pkgs.add_package("dart/dart-sdk/${platform}", "dev") |
| api.cipd.ensure(cipd_dir, pkgs) |
| |
| # firebase-tools expects to live in the node_modules subdir of where nodejs is installed. |
| pkgs = api.cipd.EnsureFile() |
| pkgs.add_package("infra/npm/firebase-tools", "latest") |
| api.cipd.ensure(node_modules_dir, pkgs) |
| |
| resources_dir = api.path["start_dir"].join("api-docs-resources") |
| with api.step.nest("checkout api docs"): |
| api.git.checkout(API_DOCS_RESOURCES_REPOSITORY, path=resources_dir) |
| |
| checkout_root = api.path["start_dir"] |
| with api.step.nest("checkout fuchsia"): |
| checkout = api.checkout.fuchsia_with_options( |
| path=checkout_root, manifest=manifest, remote=remote, |
| ) |
| |
| build_results = api.build.with_options( |
| checkout=checkout, fint_params_path=fint_params_path |
| ) |
| |
| out_dir = api.path["start_dir"].join("docs_out") |
| docs_dir = api.path["start_dir"].join("firebase") |
| |
| api.file.rmtree("remove old docs", docs_dir) |
| api.file.copytree("copy resources", resources_dir, docs_dir) |
| |
| if "clangdoc" in generators: |
| with api.step.nest("clangdoc"): |
| gen_clang_doc(api, docs_dir, api.path["start_dir"], build_results) |
| if "dartdoc" in generators: |
| with api.step.nest("dartdoc"): |
| gen_dartdoc(api, out_dir, docs_dir, cipd_dir, output_name, dry_run) |
| if "fidldoc" in generators: |
| with api.step.nest("fidldoc"): |
| gen_fidldoc(api, build_results.build_dir, output_name, dry_run) |
| if "rustdoc" in generators: |
| with api.step.nest("rustdoc"): |
| gen_rustdoc(api, docs_dir, build_results.build_dir) |
| |
| # Only deploy if running the default 'all' one, to avoid deploying partial docs to firebase. |
| if not dry_run and output_name == "all": |
| with api.context(cwd=docs_dir, env={"PATH": cipd_dir.join("bin")}): |
| api.step( |
| "firebase deploy", |
| [ |
| node_modules_dir.join(".bin", "firebase"), |
| "deploy", |
| "--only", |
| "hosting", |
| "--debug", |
| ], |
| ) |
| |
| |
| def GenTests(api): |
| base = ( |
| api.buildbucket.ci_build(git_repo="https://fuchsia.googlesource.com/topaz",) |
| + api.properties(fint_params_path="specs/firebase-docs.textproto") |
| + api.step_data( |
| "dartdoc.load fuchsia pubspec.yaml", |
| stdout=api.json.output({"name": "fuchsia"}), |
| ) |
| + api.step_data( |
| "dartdoc.list fuchsia packages", api.file.listdir(["fuchsia.dart"]) |
| ) |
| + api.step_data("dartdoc.git diff", retcode=1) |
| + api.step_data( |
| "rustdoc.read project.json", |
| api.file.read_json( |
| json_content={ |
| "targets": { |
| "//topaz/target:lib": { |
| "type": "rust_library", |
| "crate_root": "src/lib.rs", |
| }, |
| "//topaz/target:bin": { |
| "type": "executable", |
| "crate_root": "src/main.rs", |
| }, |
| "//topaz/not_target:not_rust": {"type": "staticlib"}, |
| "//topaz/target:missing_cargo": { |
| "type": "rust_library", |
| "crate_root": "src/lib.rs", |
| }, |
| "//topaz/not_target:no_type": {}, |
| } |
| } |
| ), |
| ) |
| + api.step_data( |
| "fidldoc.read all_fidl_json.txt", api.raw_io.output("foo.fidl\nbar.fidl\n") |
| ) |
| + api.step_data("fidldoc.git diff", retcode=1) |
| ) |
| yield (api.status_check.test("firebase_docs") + base) |
| yield ( |
| api.status_check.test("firebase_docs_failing_rustdoc") |
| + base |
| + api.step_data("rustdoc.rustdoc //topaz/target:lib", retcode=1) |
| ) |