blob: 343d49cb380020b8257a8587403f6bfec64d47a2 [file] [log] [blame]
#!/usr/bin/env python
# Copyright 2018 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import StringIO
import argparse
import functools
import json
import operator
import os
import sys
import tarfile
import time
import zipfile
def generate_script(images, type):
# The bootserver must be in there or we lose.
# TODO(mcgrathr): Multiple bootservers for different platforms
# and switch in the script.
[bootserver] = [image['path'] for image in images
if image['name'] == 'bootserver']
script = '''\
dir="$(dirname "$0")"
set -x
switches = dict((switch, '"$dir/%s"' % image['path'])
for image in images if type in image
for switch in image[type])
cmd = ['exec', '"$dir/%s"' % bootserver]
for switch, path in sorted(switches.iteritems()):
cmd += [switch, path]
script += ' '.join(cmd) + '\n'
return script
class TGZArchiver(object):
"""Public interface needs to match ZipArchiver."""
def __init__(self, outfile):
self._archive =, 'w:gz', dereference=True)
def __enter__(self):
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
def _sanitize_tarinfo(executable, info):
assert info.isfile()
info.mode = 0o555 if executable else 0o444
info.uid = 0
info.gid = 0
info.uname = ''
info.gname = ''
return info
def add_path(self, path, name, executable):
filter=functools.partial(self._sanitize_tarinfo, executable))
def add_contents(self, contents, name, executable):
info = self._sanitize_tarinfo(executable, tarfile.TarInfo(name))
info.size = len(contents)
info.mtime = time.time()
self._archive.addfile(info, StringIO.StringIO(contents))
class ZipArchiver(object):
"""Public interface needs to match TGZArchiver."""
def __init__(self, outfile):
self._archive = zipfile.ZipFile(outfile, 'w', zipfile.ZIP_DEFLATED)
self._archive.comment = 'Fuchsia build archive'
def __enter__(self):
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
def add_path(self, path, name, unused_executable):
self._archive.write(path, name)
def add_contents(self, contents, name, unused_executable):
self._archive.writestr(name, contents)
def format_archiver(outfile, format):
return {'tgz': TGZArchiver, 'zip': ZipArchiver}[format](outfile)
def write_archive(outfile, format, images):
# Synthesize a sanitized form of the input.
path_images = []
for image in images:
path = image['path']
if 'archive' in image:
del image['archive']
image['path'] = image['name'] + '.' + image['type']
path_images.append((path, image))
# Generate scripts that use the sanitized file names.
content_images = [
(generate_script([image for path, image in path_images],
'bootserver_pave'), {
'name': 'pave',
'type': 'sh',
'path': ''
(generate_script([image for path, image in path_images],
'bootserver_netboot'), {
'name': 'netboot',
'type': 'sh',
'path': ''
# Self-reference.
(json.dumps([image for _, image in (path_images + content_images)],
indent=2, sort_keys=True),
'name': 'images',
'type': 'json',
'path': 'images.json',
# Canonicalize the order of the files in the archive.
path_images = sorted(path_images, key=lambda pair: pair[1]['path'])
content_images = sorted(content_images, key=lambda pair: pair[1]['path'])
def is_executable(image):
return image['type'] == 'sh' or image['type'].startswith('exe')
with format_archiver(outfile, format) as archiver:
for path, image in path_images:
archiver.add_path(path, image['path'], is_executable(image))
for contents, image in content_images:
archiver.add_contents(contents, image['path'], is_executable(image))
def write_symbol_archive(outfile, format, ids_file, files_read):
with open(ids_file, 'r') as f:
ids = [line.split() for line in f]
out_ids = ''
with format_archiver(outfile, format) as archiver:
for id, file in ids:
file = os.path.relpath(file)
name = os.path.relpath(file, '../..')
archiver.add_path(file, name, False)
out_ids += '%s %s\n' % (id, name)
archiver.add_contents(out_ids, 'ids.txt', False)
def archive_format(args, outfile):
if args.format:
return args.format
if outfile.endswith('.zip'):
return 'zip'
if outfile.endswith('.tgz') or outfile.endswith('.tar.gz'):
return 'tgz'
Cannot guess archive format from file name %r; use --format.
''' % outfile)
def main():
parser = argparse.ArgumentParser(description='Pack Fuchsia build images.')
help='Write Ninja dependencies file')
parser.add_argument('json', nargs='+',
help='Read JSON image list from FILE')
help='Write paving bootserver script to FILE')
help='Write netboot bootserver script to FILE')
help='Write archive to FILE')
help='Write symbol archive to FILE')
parser.add_argument('--format', choices=['tgz', 'zip'],
help='Archive format (default: from FILE suffix)')
args = parser.parse_args()
# Keep track of every input file for the depfile.
files_read = set()
def read_json_file(filename):
with open(filename, 'r') as f:
return json.load(f)
images = reduce(operator.add,
(read_json_file(file) for file in args.json),
outfile = None
# Write an executable script into outfile for the given bootserver mode.
def write_script_for(outfile, mode):
with os.fdopen(, os.O_CREAT | os.O_TRUNC | os.O_WRONLY,
'w') as script_file:
script_file.write(generate_script(images, mode))
# First write the local scripts that work relative to the build directory.
if args.pave:
outfile = args.pave
write_script_for(args.pave, 'bootserver_pave')
if args.netboot:
outfile = args.netboot
write_script_for(args.netboot, 'bootserver_netboot')
if args.archive:
outfile = args.archive
archive_images = [image for image in images
if (image.get('archive', False) or
'bootserver_pave' in image or
'bootserver_netboot' in image)]
files_read |= set(image['path'] for image in archive_images)
write_archive(outfile, archive_format(args, outfile), archive_images)
if args.symbol_archive:
outfile = args.symbol_archive
[ids_file] = [image['path'] for image in images
if image['name'] == 'build-id' and image['type'] == 'txt']
write_symbol_archive(outfile, archive_format(args, outfile),
ids_file, files_read)
if outfile and args.depfile:
with open(args.depfile, 'w') as depfile:
depfile.write('%s: %s\n' % (outfile, ' '.join(sorted(files_read))))
if __name__ == "__main__":