blob: 84c5a3b012bbc5f74947a38f99111d3e01dbb97f [file] [log] [blame]
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for generating docs for upload to Firebase."""
from recipe_engine.config import Enum, List
from recipe_engine.recipe_api import Property
DEPS = [
'infra/auto_roller',
'infra/build',
'infra/checkout',
'infra/fuchsia',
'infra/git',
'infra/jiri',
'recipe_engine/buildbucket',
'recipe_engine/cipd',
'recipe_engine/context',
'recipe_engine/json',
'recipe_engine/file',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
]
TARGETS = ['arm64', 'x64']
BUILD_TYPES = ['debug', 'release', 'thinlto', 'lto']
GENERATORS = ['dartdoc', 'rustdoc']
PROPERTIES = {
'dry_run':
Property(
kind=bool,
help='Whether to upload docs to firebase.',
default=False),
# TODO(juliehockett): Replace `basestring` with Enum(*GENERATORS) once crbug/903469 is resolved.
'generators':
Property(
kind=List(basestring), help='Packages to build',
default=GENERATORS),
'project':
Property(
kind=str,
help='Jiri remote manifest project',
default='integration'),
'manifest':
Property(kind=str, help='Jiri manifest to use', default='topaz/topaz'),
'remote':
Property(
kind=str,
help='Remote manifest repository',
default='https://fuchsia.googlesource.com/integration'),
'target':
Property(kind=Enum(*TARGETS), help='Target to build', default='x64'),
'build_type':
Property(
kind=Enum(*BUILD_TYPES), help='The build type', default='release'),
'packages':
Property(
kind=List(basestring),
help='Packages to build',
default=['//topaz/bundles:buildbot', '//bundles:kitchen_sink']),
}
DARTDOC_PUBSPEC = """name: Fuchsia
homepage: https://fuchsia-docs.firebaseapp.com/dart
description: API documentation for fuchsia
dependencies:
"""
def gen_dartdoc(api, out_dir, docs_dir, cipd_dir):
'''Generate dartdoc output.
Dartdoc runs on a single package, but has the capability to generate docs for all
dependencies. Thus, to generate Dart documentation for Fuchsia, we first generate
a 'fake' package that lists the libraries we want documented. We then run `pub`
over that new package to fetch the dependencies, and finally `dartdoc` to generate
documentation for it all.
Args:
out_dir (Path) - The output directory for generated files.
docs_dir (Path) - The output directory for documentation.
cipd_dir (Path) - The cipd directory.
'''
dart_packages_path = api.path['start_dir'].join('topaz', 'public', 'dart')
api.path.mock_add_paths(dart_packages_path)
# If either dartdoc or dart packages path doesn't exist, we didn't checkout topaz
# and so we won't generate dart docs on this run.
if not api.path.exists(dart_packages_path):
return # pragma: no cover
# Make a temporary docs dir to be pushed to firebase.
api.file.ensure_directory('create lib dir', out_dir.join('lib'))
# Build .packages and lib.dart importing all packages.
dart_imports_content = 'library Fuchsia;\n'
dart_pubspec_content = DARTDOC_PUBSPEC
# Gather documentable dart packages.
dart_packages = [
api.path.basename(p) for p in api.file.listdir(
'list dart packages',
dart_packages_path,
test_data=('fuchsia', 'topaz', 'other'))
]
api.path.mock_add_paths(dart_packages_path.join('fuchsia', 'lib'))
api.path.mock_add_paths(dart_packages_path.join('fuchsia', 'pubspec.yaml'))
api.path.mock_add_paths(dart_packages_path.join('topaz', 'lib'))
for package in dart_packages:
if not api.path.exists(dart_packages_path.join(package, 'lib')):
continue
pubspec_path = dart_packages_path.join(package, 'pubspec.yaml')
if not api.path.exists(pubspec_path):
continue
pubspec = api.python(
'load %s pubspec.yaml' % package,
api.resource('parse_yaml.py'),
args=[pubspec_path],
stdout=api.json.output()).stdout
if not pubspec or pubspec['name'] != package:
continue # pragma: no cover
dart_pubspec_content += ' %s:\n path: %s/\n' % (
package, dart_packages_path.join(package))
package_imports = [
api.path.basename(i)
for i in api.file.listdir('list %s packages' % package,
dart_packages_path.join(package, 'lib'))
if api.path.basename(i).endswith('.dart')
]
for i in package_imports:
dart_imports_content += 'import \'package:%s/%s\';\n' % (package, i)
# Build package pubspec.yaml depending on all desired source packages.
api.file.write_text('write pubspec.yaml', out_dir.join('pubspec.yaml'),
dart_pubspec_content)
api.file.write_text('write lib.dart', out_dir.join('lib', 'lib.dart'),
dart_imports_content)
# Run pub over this package to fetch deps.
with api.context(cwd=out_dir):
api.step('pub', [cipd_dir.join('dart-sdk', 'bin', 'pub'), 'get'])
# Run dartdoc over this package.
with api.context(cwd=out_dir):
api.step('dartdoc', [
cipd_dir.join('dart-sdk', 'bin', 'dartdoc'),
'--auto-include-dependencies',
'--exclude-packages',
'Dart',
'--output',
docs_dir.join('public', 'dart'),
])
def gen_rustdoc(api, docs_dir, build_dir):
'''Generate rust output.
The rustdoc script runs on GN targets. We find the Rust GN targets by finding all targets that generate a Cargo.toml file, and use those.
Args:
docs_dir (Path) - The output directory for documentation.
'''
step_result = api.step(
'gn desc', [
api.path['start_dir'].join('buildtools', 'gn'),
'desc',
build_dir,
'//*',
'outputs',
'--type=action',
'--format=json',
],
stdout=api.json.output())
skipped = []
test_cargo_path = build_dir.join('target', 'Cargo.toml')
api.path.mock_add_paths(test_cargo_path)
for target in step_result.stdout:
if not 'outputs' in step_result.stdout[target]:
continue
outputs = step_result.stdout[target]['outputs']
# If the target doesn't output a Cargo.toml file, it's not a rust target.
if not outputs or not outputs[0].endswith('Cargo.toml'):
continue
output = api.path['start_dir'].join(*outputs[0].split('/'))
if not api.path.exists(output):
skipped.append(target)
continue
with api.context(env={'FUCHSIA_DIR': api.path['start_dir']}):
api.step('rustdoc %s' % target, [
api.path['start_dir'].join('tools', 'devshell', 'contrib', 'lib',
'rust', 'rustdoc.py'),
output,
"--no-deps",
"--out-dir",
build_dir,
])
platform = '%s-%s' % (api.platform.name.replace('win', 'windows'), {
'intel': {
32: '386',
64: 'x64',
},
'arm': {
32: 'armv6',
64: 'arm64',
},
}[api.platform.arch][api.platform.bits])
rust_bin_dir = api.path['start_dir'].join('buildtools', platform, 'rust',
'bin')
with api.context(
env={
'RUSTC': rust_bin_dir.join('rustc'),
'RUSTDOC': rust_bin_dir.join('rustdoc')
},
cwd=api.path['start_dir'].join('third_party', 'rust_crates')):
api.step('cargo doc third_party/rust_crates',
[rust_bin_dir.join('cargo'), 'doc', '--target=x86_64-fuchsia'])
# Move the output to the docs directory.
step_result = api.step('move output to docs', [
'mv',
api.path['start_dir'].join('out', 'cargo_target', 'x86_64-fuchsia',
'doc'),
docs_dir.join('public', 'rust'),
])
step_result.presentation.logs['skipped'] = skipped
def RunSteps(api, dry_run, project, manifest, remote, target, build_type,
packages, generators):
api.jiri.ensure_jiri()
cipd_dir = api.path['start_dir'].join('cipd')
node_modules_dir = cipd_dir.join('node_modules')
with api.step.nest('ensure_packages'):
with api.context(infra_steps=True):
pkgs = api.cipd.EnsureFile()
pkgs.add_package('infra/nodejs/nodejs/${platform}', 'latest')
if 'dartdoc' in generators:
pkgs.add_package('dart/dart-sdk/${platform}', 'dev')
api.cipd.ensure(cipd_dir, pkgs)
# firebase-tools expects to live in the node_modules subdir of where nodejs is installed.
pkgs = api.cipd.EnsureFile()
pkgs.add_package('infra/npm/firebase-tools', 'latest')
api.cipd.ensure(node_modules_dir, pkgs)
resources_dir = api.path['start_dir'].join('api-docs-resources')
api.git.checkout(
'https://fuchsia.googlesource.com/api-docs-resources', path=resources_dir)
checkout = api.checkout.fuchsia_with_options(
path=api.path['start_dir'],
manifest=manifest,
remote=remote,
project=project,
build=api.buildbucket.build,
)
build = api.build.with_options(
build_dir=api.path['start_dir'].join('out'),
checkout=checkout,
target=target,
build_type=build_type,
packages=packages,
product='products/core.gni')
out_dir = api.path['start_dir'].join('docs_out')
docs_dir = api.path['start_dir'].join('firebase')
api.file.rmtree('remove old docs', docs_dir)
api.file.copytree('copy resources', resources_dir, docs_dir)
if 'dartdoc' in generators:
with api.step.nest('dartdoc'):
gen_dartdoc(api, out_dir, docs_dir, cipd_dir)
if 'rustdoc' in generators:
with api.step.nest('rustdoc'):
gen_rustdoc(api, docs_dir, build.fuchsia_build_dir)
if not dry_run:
with api.context(cwd=docs_dir, env={'PATH': cipd_dir.join('bin')}):
api.step('firebase deploy', [
node_modules_dir.join('.bin', 'firebase'),
'deploy',
'--only',
'hosting',
'--debug',
])
def GenTests(api):
yield (api.test('firebase_docs') + api.buildbucket.ci_build(
git_repo='https://fuchsia.googlesource.com/topaz',) + api.step_data(
'dartdoc.load fuchsia pubspec.yaml',
stdout=api.json.output({'name': 'fuchsia'})) +
api.step_data('dartdoc.list fuchsia packages',
api.file.listdir(['fuchsia.dart'])) +
api.step_data(
'rustdoc.gn desc',
stdout=api.json.output({
"//topaz/target:target_cargo": {
"outputs": ["//out/x64/target/Cargo.toml"]
},
"//topaz/not_target:not_target": {
"outputs":
["//out/x64/not_target.h", "//out/x64/not_target.cc"]
},
"//topaz/target:missing_outputs": {
"outputs": ["//out/x64/missing/Cargo.toml"]
},
"//topaz/not_target:no_outputs": {}
})))