blob: 5852e5ddbddfa1622b9749ff615862816255ae62 [file] [log] [blame]
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for generating docs for upload to Firebase."""
from recipe_engine.config import Enum, List
from recipe_engine.recipe_api import Property
DEPS = [
'fuchsia/auto_roller',
'fuchsia/build',
'fuchsia/checkout',
'fuchsia/fuchsia',
'fuchsia/git',
'fuchsia/jiri',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/json',
'recipe_engine/file',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
]
TARGETS = ['arm64', 'x64']
BUILD_TYPES = ['debug', 'release', 'thinlto', 'lto']
GENERATORS = ['dartdoc', 'rustdoc', 'fidldoc', 'clangdoc']
FIDLDOC_COMMIT_MESSAGE = '[fidldoc] Updating fidldocs'
REFERENCE_DOCS_REPOSITORY = 'https://fuchsia.googlesource.com/reference-docs'
API_DOCS_RESOURCES_REPOSITORY = 'https://fuchsia.googlesource.com/api-docs-resources'
PROPERTIES = {
'dry_run':
Property(
kind=bool,
help='Whether to upload docs to firebase.',
default=False),
# TODO(juliehockett): Replace `basestring` with Enum(*GENERATORS) once crbug/903469 is resolved.
'generators':
Property(
kind=List(basestring), help='Packages to build',
default=GENERATORS),
'project':
Property(
kind=str,
help='Jiri remote manifest project',
default='integration'),
'manifest':
Property(kind=str, help='Jiri manifest to use', default='topaz/topaz'),
'remote':
Property(
kind=str,
help='Remote manifest repository',
default='https://fuchsia.googlesource.com/integration'),
'target':
Property(kind=Enum(*TARGETS), help='Target to build', default='x64'),
'build_type':
Property(
kind=Enum(*BUILD_TYPES), help='The build type', default='release'),
'packages':
Property(
kind=List(basestring),
help='Packages to build',
default=['//topaz/bundles:buildbot', '//bundles:kitchen_sink']),
'output_name':
Property(
kind=str,
help='Name of top-level output reference-docs directory',
default='all'),
}
DARTDOC_PUBSPEC = """name: Fuchsia
homepage: https://fuchsia-docs.firebaseapp.com/dart
description: API documentation for fuchsia
dependencies:
"""
def gen_dartdoc(api, out_dir, docs_dir, cipd_dir):
"""Generate dartdoc output.
Dartdoc runs on a single package, but has the capability to generate docs for all
dependencies. Thus, to generate Dart documentation for Fuchsia, we first generate
a 'fake' package that lists the libraries we want documented. We then run `pub`
over that new package to fetch the dependencies, and finally `dartdoc` to generate
documentation for it all.
Args:
out_dir (Path) - The output directory for generated files.
docs_dir (Path) - The output directory for documentation.
cipd_dir (Path) - The cipd directory.
"""
dart_packages_path = api.path['start_dir'].join('topaz', 'public', 'dart')
api.path.mock_add_paths(dart_packages_path)
# If either dartdoc or dart packages path doesn't exist, we didn't checkout topaz
# and so we won't generate dart docs on this run.
if not api.path.exists(dart_packages_path):
return # pragma: no cover
# Make a temporary docs dir to be pushed to firebase.
api.file.ensure_directory('create lib dir', out_dir.join('lib'))
# Build .packages and lib.dart importing all packages.
dart_imports_content = 'library Fuchsia;\n'
dart_pubspec_content = DARTDOC_PUBSPEC
# Gather documentable dart packages.
dart_packages = [
api.path.basename(p) for p in api.file.listdir(
'list dart packages',
dart_packages_path,
test_data=('fuchsia', 'topaz', 'other'))
]
api.path.mock_add_paths(dart_packages_path.join('fuchsia', 'lib'))
api.path.mock_add_paths(dart_packages_path.join('fuchsia', 'pubspec.yaml'))
api.path.mock_add_paths(dart_packages_path.join('topaz', 'lib'))
for package in dart_packages:
if not api.path.exists(dart_packages_path.join(package, 'lib')):
continue
pubspec_path = dart_packages_path.join(package, 'pubspec.yaml')
if not api.path.exists(pubspec_path):
continue
pubspec = api.python(
'load %s pubspec.yaml' % package,
api.resource('parse_yaml.py'),
args=[pubspec_path],
stdout=api.json.output()).stdout
if not pubspec or pubspec['name'] != package:
continue # pragma: no cover
dart_pubspec_content += ' %s:\n path: %s/\n' % (
package, dart_packages_path.join(package))
package_imports = [
api.path.basename(i)
for i in api.file.listdir('list %s packages' % package,
dart_packages_path.join(package, 'lib'))
if api.path.basename(i).endswith('.dart')
]
for i in package_imports:
dart_imports_content += 'import \'package:%s/%s\';\n' % (package, i)
# Build package pubspec.yaml depending on all desired source packages.
api.file.write_text('write pubspec.yaml', out_dir.join('pubspec.yaml'),
dart_pubspec_content)
api.file.write_text('write lib.dart', out_dir.join('lib', 'lib.dart'),
dart_imports_content)
# Run pub over this package to fetch deps.
with api.context(cwd=out_dir):
api.step('pub', [cipd_dir.join('dart-sdk', 'bin', 'pub'), 'get'])
# Run dartdoc over this package.
with api.context(cwd=out_dir):
api.step('dartdoc', [
cipd_dir.join('dart-sdk', 'bin', 'dartdoc'),
'--auto-include-dependencies',
'--exclude-packages',
'Dart',
'--output',
docs_dir.join('public', 'dart'),
])
def gen_rustdoc(api, docs_dir, build_dir, gn_results):
"""Generate rust output.
The rustdoc script runs on GN targets. We find the Rust GN targets by finding
all targets that generate a Cargo.toml file, and use those.
Args:
docs_dir (Path) - The output directory for documentation.
build_dir (Path) - The build directory.
gn_results (Object) - Result of a `gn gen` invocation in the fuchsia build.
"""
step_result = api.step(
'gn desc', [
gn_results.tool('gn'),
'desc',
build_dir,
'//*',
'outputs',
'--type=action',
'--format=json',
],
stdout=api.json.output())
skipped = []
test_cargo_path = build_dir.join('target', 'Cargo.toml')
api.path.mock_add_paths(test_cargo_path)
for target in step_result.stdout:
if not 'outputs' in step_result.stdout[target]:
continue
outputs = step_result.stdout[target]['outputs']
# If the target doesn't output a Cargo.toml file, it's not a rust target.
if not outputs or not outputs[0].endswith('Cargo.toml'):
continue
output = api.path['start_dir'].join(*outputs[0].split('/'))
if not api.path.exists(output):
skipped.append(target)
continue
with api.context(env={
'FUCHSIA_DIR': api.path['start_dir'],
'FUCHSIA_BUILD_DIR': build_dir,
}):
api.step('rustdoc %s' % target, [
api.path['start_dir'].join('tools', 'devshell', 'contrib', 'lib',
'rust', 'rustdoc.py'),
output,
'--no-deps',
'--out-dir',
build_dir,
])
with api.context(
env={
'RUSTC': gn_results.tool('rustc'),
'RUSTDOC': gn_results.tool('rustdoc')
},
cwd=api.path['start_dir'].join('third_party', 'rust_crates')):
api.step('cargo doc third_party/rust_crates',
[gn_results.tool('cargo'), 'doc', '--target=x86_64-fuchsia'])
# Move the output to the docs directory.
step_result = api.step('move output to docs', [
'mv',
api.path['start_dir'].join('out', 'cargo_target', 'x86_64-fuchsia',
'doc'),
docs_dir.join('public', 'rust'),
])
step_result.presentation.logs['skipped'] = skipped
def gen_fidldoc(api, build_dir, output_name, dry_run):
"""Generate fidl output.
The fidldoc tool runs on the all_fidl_json.txt file. Pushes the resulting
docs to the given docs repository.
Args:
docs_dir (Path) - The output directory for documentation (repository).
build_dir (Path) - The build directory.
output_name (str) - Output name of the directory.
dry_run (str) - Run but don't push.
"""
out_dir = api.path['start_dir'].join('fidldoc_out')
all_fidl_json_txt = build_dir.join('all_fidl_json.txt')
all_fidl_json = api.file.read_text('read all_fidl_json.txt',
all_fidl_json_txt).splitlines()
with api.context(cwd=build_dir):
# Cannot use tool_paths.json here, since it maps to host_x64, which
# doesn't work for fidldoc because of the accompanying fidldoc.config.json
# that is only in host-tools.
fidldoc_path = build_dir.join('host-tools/fidldoc')
api.step('run fidldoc', [
fidldoc_path, '--verbose', '--path', '/reference/fidl/', '--out',
out_dir
] + all_fidl_json)
# Push resulting docs to the reference-docs repository.
# The fidldocs get checked into their own repository. Checking it out here reduces the likelihood
# of a race with another bot trying to commit in between this bot's checkout and commit steps.
fidldoc_docs_dir = api.path['start_dir'].join('reference-docs')
with api.step.nest('checkout fidldoc'):
api.git.checkout(REFERENCE_DOCS_REPOSITORY, path=fidldoc_docs_dir)
with api.context(cwd=fidldoc_docs_dir):
# Clear the repository and move results in.
api.file.rmtree(
name='Clear reference-docs/out',
source=fidldoc_docs_dir.join(output_name))
api.file.move(
name='Move docs to reference-docs/out',
source=out_dir,
dest=fidldoc_docs_dir.join(output_name, 'fidl'))
# Add all modified files to git cache
api.git('add', '-A')
# Only commit and push if there's a diff, otherwise commit fails.
if api.git('diff', '--cached', '--exit-code', ok_ret='any').retcode:
api.git.commit(FIDLDOC_COMMIT_MESSAGE)
if not dry_run:
api.git.push(ref='HEAD:master')
def gen_clang_doc(api, docs_dir, build_dir, gn_results):
"""Generate clang-doc output.
clang-doc runs on the translation units specified in the compilation database
file. This file will be generated by gn_results after filtering unwanted
directories or files. clang-doc will output documentation files directly in
docs_dir.
Args:
docs_dir (Path) - The output directory for documentation.
build_dir (Path) - The build directory.
gn_results (Object) - Result of a `gn gen` invocation in the fuchsia build.
"""
with api.step.nest('filter compile commands'):
white_list_dirs = ['third_party']
compile_commands = gn_results.filtered_compdb(white_list_dirs)
with api.context(cwd=build_dir):
api.step('run clang-doc', [
gn_results.tool('clang-doc'), '--output',
docs_dir.join('public', 'cpp'), '--public', '--format=html',
compile_commands
])
def RunSteps(api, dry_run, project, manifest, remote, target, build_type,
packages, generators, output_name):
cipd_dir = api.path['start_dir'].join('cipd')
node_modules_dir = cipd_dir.join('node_modules')
with api.step.nest('ensure_packages'):
with api.context(infra_steps=True):
pkgs = api.cipd.EnsureFile()
pkgs.add_package('infra/nodejs/nodejs/${platform}', 'latest')
if 'dartdoc' in generators:
pkgs.add_package('dart/dart-sdk/${platform}', 'dev')
api.cipd.ensure(cipd_dir, pkgs)
# firebase-tools expects to live in the node_modules subdir of where nodejs is installed.
pkgs = api.cipd.EnsureFile()
pkgs.add_package('infra/npm/firebase-tools', 'latest')
api.cipd.ensure(node_modules_dir, pkgs)
resources_dir = api.path['start_dir'].join('api-docs-resources')
with api.step.nest('checkout api docs'):
api.git.checkout(API_DOCS_RESOURCES_REPOSITORY, path=resources_dir)
checkout_root = api.path['start_dir']
with api.step.nest('checkout fuchsia'):
checkout = api.checkout.fuchsia_with_options(
path=checkout_root,
manifest=manifest,
remote=remote,
project=project,
build=api.buildbucket.build,
)
gn_results = api.build.gen(
checkout_root=checkout.root_dir,
fuchsia_build_dir=api.path['start_dir'].join('out', 'default'),
target=target,
build_type=build_type,
board='boards/%s.gni' % target,
product='products/core.gni',
packages=packages + ['//tools/fidl/fidldoc'],
export_compdb=True,
)
api.build.ninja(
checkout_root=checkout.root_dir,
gn_results=gn_results,
)
out_dir = api.path['start_dir'].join('docs_out')
docs_dir = api.path['start_dir'].join('firebase')
api.file.rmtree('remove old docs', docs_dir)
api.file.copytree('copy resources', resources_dir, docs_dir)
if 'dartdoc' in generators:
with api.step.nest('dartdoc'):
gen_dartdoc(api, out_dir, docs_dir, cipd_dir)
if 'rustdoc' in generators:
with api.step.nest('rustdoc'):
gen_rustdoc(api, docs_dir, gn_results.fuchsia_build_dir, gn_results)
if 'fidldoc' in generators:
with api.step.nest('fidldoc'):
gen_fidldoc(api, gn_results.fuchsia_build_dir, output_name, dry_run)
if 'clangdoc' in generators:
with api.step.nest('clangdoc'):
gen_clang_doc(api, docs_dir, api.path['start_dir'], gn_results)
# Only deploy if running the default 'all' one, to avoid deploying partial docs to firebase.
if not dry_run and output_name == 'all':
with api.context(cwd=docs_dir, env={'PATH': cipd_dir.join('bin')}):
api.step('firebase deploy', [
node_modules_dir.join('.bin', 'firebase'),
'deploy',
'--only',
'hosting',
'--debug',
])
def GenTests(api):
yield (
api.test('firebase_docs') +
api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',) +
api.step_data(
'dartdoc.load fuchsia pubspec.yaml',
stdout=api.json.output({'name': 'fuchsia'})) +
api.step_data(
'dartdoc.list fuchsia packages',
api.file.listdir(['fuchsia.dart'])) +
api.step_data(
'rustdoc.gn desc',
stdout=api.json.output({
'//topaz/target:target_cargo': {
'outputs': ['//out/default/target/Cargo.toml']
},
'//topaz/not_target:not_target': {
'outputs':
['//out/default/not_target.h', '//out/x64/not_target.cc']
},
'//topaz/target:missing_outputs': {
'outputs': ['//out/default/missing/Cargo.toml']
},
'//topaz/not_target:no_outputs': {}
})) +
api.step_data('fidldoc.read all_fidl_json.txt',
api.raw_io.output('foo.fidl\nbar.fidl\n')) +
api.step_data('fidldoc.git diff', retcode=1)) # yapf: disable