blob: b34c925b8d8307bd2deb2d34ee00f17affe0bcd2 [file] [log] [blame]
#!/usr/bin/env python3
#
# Copyright (c) 2022 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script reads a temporary directory of ffmpeg 'make' build artifacts
# and generates ffmpeg_generated.gni, the config/ directory, and CREDITS.fuchsia.
"""Creates a GN include file for building FFmpeg from source.
The way this works is a bit silly but it's easier than reverse engineering
FFmpeg's configure scripts and Makefiles and manually maintaining fuchsia
build files. It scans through build directories for object files then does a
reverse lookup against the FFmpeg source tree to find the corresponding C or
assembly file.
Running build_ffmpeg.py for all (both) architectures is required prior to
running this script.
Once you've built all architectures you may run this script.
"""
import argparse
import collections
import copy
import credits_updater
import datetime
import functools
import hashlib
import itertools
import os
import re
import shutil
COPYRIGHT = """# Copyright %d The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# NOTE: this file is autogenerated by ffmpeg/fuchsia/scripts/generate_gn.py
""" % (
datetime.datetime.now().year
)
GN_HEADER = """import("//build/config/arm.gni")
import("ffmpeg_options.gni")
# Declare empty versions of each variable for easier +=ing later.
ffmpeg_c_sources = []
ffmpeg_gas_sources = []
ffmpeg_yasm_sources = []
"""
GN_CONDITION_BEGIN = """if (%s) {
"""
GN_CONDITION_END = """}
"""
GN_C_SOURCES_BEGIN = """ffmpeg_c_sources += [
"""
GN_GAS_SOURCES_BEGIN = """ffmpeg_gas_sources += [
"""
GN_YASM_SOURCES_BEGIN = """ffmpeg_yasm_sources += [
"""
GN_SOURCE_ITEM = """ "%s",
"""
GN_SOURCE_END = """]
"""
# Controls conditional stanza generation.
_Attrs = ("ARCHITECTURE", "PROFILE")
Attr = collections.namedtuple("Attr", _Attrs)(*_Attrs)
SUPPORT_MATRIX = {
Attr.ARCHITECTURE: set(["x64", "arm64"]),
Attr.PROFILE: set([]),
}
def NormalizeFilename(name):
"""Removes leading path separators in an attempt to normalize paths."""
return name.lstrip(os.sep)
def CleanObjectFiles(object_files):
"""Removes unneeded object files due to linker errors, binary size, etc...
Args:
object_files: List of object files that needs cleaning.
"""
cleaning_list = [
"libavcodec/file_open.o", # Includes libavutil/file_open.c
"libavcodec/log2_tab.o", # Includes libavutil/log2_tab.c
"libavcodec/reverse.o", # Includes libavutil/reverse.c
"libavcodec/to_upper4.o",
"libavformat/log2_tab.o", # Includes libavutil/log2_tab.c
"libavformat/file_open.o", # Includes libavutil/file_open.c
"libavformat/mpeg4audio_sample_rates.o", # Includes libavcodec/mpeg4audio_sample_rates.h
"libavformat/mpegaudiotabs.o", # Includes libavcodec/mpegaudiotabs.h
# The following files are removed to trim down on binary size.
# TODO(ihf): Warning, it is *easy* right now to remove more files
# than is healthy and end up with a library that the linker does
# not complain about but that can't be loaded. Add some verification!
"libavcodec/audioconvert.o",
"libavcodec/resample.o",
"libavcodec/resample2.o",
"libavcodec/x86/dnxhd_mmx.o",
"libavformat/sdp.o",
"libavutil/adler32.o",
"libavutil/avsscanf.o",
"libavutil/audio_fifo.o",
"libavutil/blowfish.o",
"libavutil/cast5.o",
"libavutil/des.o",
"libavutil/file.o",
"libavutil/hash.o",
"libavutil/hmac.o",
"libavutil/lls.o",
"libavutil/murmur3.o",
"libavutil/rc4.o",
"libavutil/ripemd.o",
"libavutil/sha512.o",
"libavutil/tree.o",
"libavutil/xtea.o",
"libavutil/xga_font_data.o",
]
for name in cleaning_list:
name = name.replace("/", os.sep)
if name in object_files:
object_files.remove(name)
return object_files
def IsAssemblyFile(f):
_, ext = os.path.splitext(f)
return ext in [".S", ".asm"]
def IsGasFile(f):
_, ext = os.path.splitext(f)
return ext in [".S"]
def IsYasmFile(f):
_, ext = os.path.splitext(f)
return ext in [".asm"]
def IsCFile(f):
_, ext = os.path.splitext(f)
return ext in [".c"]
def IsSourceFile(f):
return IsAssemblyFile(f) or IsCFile(f)
def GetSourceFiles(source_dir):
"""Returns a list of source files for the given source directory.
Args:
source_dir: Path to build a source mapping for.
Returns:
A python list of source file paths.
"""
def IsSourceDir(d):
return d != ".git"
source_files = []
for root, dirs, files in os.walk(source_dir):
dirs = filter(IsSourceDir, dirs)
files = filter(IsSourceFile, files)
# Strip leading source_dir from root.
root = root[len(source_dir) :]
source_files.extend(
[NormalizeFilename(os.path.join(root, name)) for name in files]
)
return source_files
def GetObjectFiles(build_dir):
"""Returns a list of object files for the given build directory.
Args:
build_dir: Path to build an object file list for.
Returns:
A python list of object files paths.
"""
object_files = []
for root, _, files in os.walk(build_dir):
# Strip leading build_dir from root.
root = root[len(build_dir) :]
for name in files:
_, ext = os.path.splitext(name)
if ext == ".o":
name = NormalizeFilename(os.path.join(root, name))
object_files.append(name)
CleanObjectFiles(object_files)
return object_files
def GetObjectToSourceMapping(source_files):
"""Returns a map of object file paths to source file paths.
Args:
source_files: List of source file paths.
Returns:
Map with object file paths as keys and source file paths as values.
"""
object_to_sources = {}
for name in source_files:
basename, _ = os.path.splitext(name)
key = basename + ".o"
object_to_sources[key] = name
return object_to_sources
def GetSourceFileSet(object_to_sources, object_files):
"""Determines set of source files given object files.
Args:
object_to_sources: A dictionary of object to source file paths.
object_files: A list of object file paths.
Returns:
A python set of source files required to build said objects.
"""
source_set = set()
for name in object_files:
# Intentially raise a KeyError if lookup fails since something is messed
# up with our source and object lists.
source_set.add(object_to_sources[name])
return source_set
SourceListCondition = collections.namedtuple(
"SourceListCondition", [Attr.ARCHITECTURE, Attr.PROFILE]
)
class SourceSet(object):
"""A SourceSet represents a set of source files that are built on each of the
given set of SourceListConditions.
"""
def __init__(self, sources, conditions):
"""Creates a SourceSet.
Args:
sources: a python set of source files
conditions: a python set of SourceListConditions where the given sources
are to be used.
"""
self.sources = sources
self.conditions = conditions
def __repr__(self):
return "{%s, %s}" % (self.sources, self.conditions)
def __eq__(self, other):
return self.sources == other.sources and self.conditions == other.conditions
def __hash__(self):
return hash((frozenset(self.sources), frozenset(self.conditions)))
def Intersect(self, other):
"""Return a new SourceSet containing the set of source files common to both
this and the other SourceSet.
The resulting SourceSet represents the union of the architectures and
profiles of this and the other SourceSet.
"""
return SourceSet(
self.sources & other.sources, self.conditions | other.conditions
)
def Difference(self, other):
"""Return a new SourceSet containing the set of source files not present in
the other SourceSet.
The resulting SourceSet represents the intersection of the
SourceListConditions from this and the other SourceSet.
"""
return SourceSet(
self.sources - other.sources, self.conditions & other.conditions
)
def IsEmpty(self):
"""An empty SourceSet is defined as containing no source files or no
conditions (i.e., a set of files that aren't built on anywhere).
"""
return len(self.sources) == 0 or len(self.conditions) == 0
def GenerateGnStanza(self):
"""Generates a gn conditional stanza representing this source set."""
conjunctions = []
for condition in self.conditions:
if condition.ARCHITECTURE == "*":
arch_condition = None
else:
arch_condition = 'current_cpu == "%s"' % condition.ARCHITECTURE
# Profile conditions look like:
# profile == "default"
if condition.PROFILE == "*":
profile_condition = None
else:
profile_condition = 'ffmpeg_profile == "%s"' % condition.PROFILE
conjunction_parts = filter(None, [arch_condition, profile_condition])
conjunctions.append(" && ".join(conjunction_parts))
# If there is more that one clause, wrap various conditions in parens
# before joining.
if len(conjunctions) > 1:
conjunctions = ["(%s)" % x for x in conjunctions]
# Sort conjunctions to make order deterministic.
joined_conjuctions = " || ".join(sorted(conjunctions))
stanza = ""
# Output a conditional wrapper around stanzas if necessary.
if joined_conjuctions:
stanza += GN_CONDITION_BEGIN % joined_conjuctions
def indent(s):
return " %s" % s
else:
def indent(s):
return s
sources = sorted(n.replace("\\", "/") for n in self.sources)
# Write out all C sources.
c_sources = list(filter(IsCFile, sources))
if c_sources:
stanza += indent(GN_C_SOURCES_BEGIN)
for name in c_sources:
stanza += indent(GN_SOURCE_ITEM % (name))
stanza += indent(GN_SOURCE_END)
# Write out all assembly sources.
gas_sources = list(filter(IsGasFile, sources))
if gas_sources:
stanza += indent(GN_GAS_SOURCES_BEGIN)
for name in gas_sources:
stanza += indent(GN_SOURCE_ITEM % (name))
stanza += indent(GN_SOURCE_END)
# Write out all assembly sources.
yasm_sources = filter(IsYasmFile, sources)
if yasm_sources:
stanza += indent(GN_YASM_SOURCES_BEGIN)
for name in yasm_sources:
stanza += indent(GN_SOURCE_ITEM % (name))
stanza += indent(GN_SOURCE_END)
# Close the conditional if necessary.
if joined_conjuctions:
stanza += GN_CONDITION_END
else:
stanza += "\n" # Makeup the spacing for the remove conditional.
return stanza
def CreatePairwiseDisjointSets(sets):
"""Given a list of SourceSet objects, returns the pairwise disjoint sets.
NOTE: This isn't the most efficient algorithm, but given how infrequent we
need to run this and how small the input size is we'll leave it as is.
"""
disjoint_sets = list(sets)
new_sets = True
while new_sets:
new_sets = False
for pair in itertools.combinations(disjoint_sets, 2):
intersection = pair[0].Intersect(pair[1])
# Both pairs are already disjoint, nothing to do.
if intersection.IsEmpty():
continue
# Add the resulting intersection set.
new_sets = True
disjoint_sets.append(intersection)
# Calculate the resulting differences for this pair of sets.
#
# If the differences are an empty set, remove them from the list of sets,
# otherwise update the set itself.
for p in pair:
i = disjoint_sets.index(p)
difference = p.Difference(intersection)
if difference.IsEmpty():
del disjoint_sets[i]
else:
disjoint_sets[i] = difference
# Restart the calculation since the list of disjoint sets has changed.
break
return disjoint_sets
def GetAllMatchingConditions(conditions, condition_to_match):
"""Given a set of conditions, find those that match the condition_to_match.
Matches are found when all attributes of the condition have the same value as
the condition_to_match, or value is accepted for wildcard attributes within
condition_to_match.
"""
found_matches = set()
# Check all attributes of condition for matching values.
def accepts_all_values(attribute):
return getattr(condition_to_match, attribute) == "*"
attributes_to_check = [a for a in Attr if not accepts_all_values(a)]
# If all attributes allow wildcard, all conditions are considered matching
if not attributes_to_check:
return conditions
# Check all conditions and accumulate matches.
for condition in conditions:
condition_matches = True
for attribute in attributes_to_check:
if getattr(condition, attribute) != getattr(condition_to_match, attribute):
condition_matches = False
break
if condition_matches:
found_matches.add(condition)
return found_matches
def GetAttributeValuesRange(attribute, condition):
"""Get the range of values for the given attribute considering the values
of all attributes in the given condition."""
if getattr(condition, attribute) == "*":
values = copy.copy(SUPPORT_MATRIX[attribute])
else:
values = set([getattr(condition, attribute)])
return values
def GenerateConditionExpansion(condition):
"""Expand wildcard in condition into all possible matching conditions."""
architectures = GetAttributeValuesRange(Attr.ARCHITECTURE, condition)
profiles = GetAttributeValuesRange(Attr.PROFILE, condition)
return set(
SourceListCondition(arch, profile)
for (arch, profile) in itertools.product(architectures, profiles)
)
def ReduceConditionalLogic(source_set):
"""Reduces the conditions for the given SourceSet.
The reduction leverages what we know about the space of possible combinations,
finding cases where conditions span all values possible of a given attribute.
In such cases, these conditions can be flattened into a single condition with
the spanned attribute removed.
There is room for further reduction (e.g. Quine-McCluskey), not implemented
at this time."""
ConditionReduction = collections.namedtuple(
"ConditionReduction", "condition, matches"
)
reduced_conditions = set()
for condition in source_set.conditions:
condition_dict = condition._asdict()
for attribute in Attr:
# Set attribute value to wildcard and find matching attributes.
original_attribute_value = condition_dict[attribute]
condition_dict[attribute] = "*"
new_condition = SourceListCondition(**condition_dict)
# Conditions with wildcards can replace existing conditions iff the
# source set contains conditions covering all possible expansions
# of the wildcarded values.
matches = GetAllMatchingConditions(source_set.conditions, new_condition)
if matches == GenerateConditionExpansion(new_condition):
reduced_conditions.add(
ConditionReduction(new_condition, frozenset(matches))
)
else:
# This wildcard won't work, restore the original value.
condition_dict[attribute] = original_attribute_value
# Finally, find the most efficient reductions. Do a pairwise comparison of all
# reductions to de-dup and remove those that are covered by more inclusive
# conditions.
did_work = True
while did_work:
did_work = False
for reduction_pair in itertools.combinations(reduced_conditions, 2):
if reduction_pair[0].matches.issubset(reduction_pair[1].matches):
reduced_conditions.remove(reduction_pair[0])
did_work = True
break
elif reduction_pair[1].matches.issubset(reduction_pair[0].matches):
reduced_conditions.remove(reduction_pair[1])
did_work = True
break
# Apply the reductions to the source_set.
for reduction in reduced_conditions:
source_set.conditions.difference_update(reduction.matches)
source_set.conditions.add(reduction.condition)
def ParseArgs():
"""Parses the arguments and terminates program if they are not sane.
Returns:
The output of a successful call to parser.parse_args().
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"roll_dir",
metavar="<roll dir>",
help="directory for build output",
)
parser.add_argument(
"--ffmpeg-dir",
default=".",
metavar="<path>",
help="directory containing config/ and CREDITS.fuchsia",
)
parser.add_argument(
"--ffmpeg-src-dir",
default="../../../../third_party/ffmpeg/src",
metavar="<ffmpeg root directory>",
help="directory containing ffmpeg source code",
)
parser.add_argument(
"--print_licenses",
dest="print_licenses",
default=False,
action="store_true",
help="print all licenses to console",
)
args = parser.parse_args()
if not os.path.exists(args.roll_dir):
parser.error(f"Roll build output directory {args.roll_dir} does not exist")
if not os.path.exists(args.ffmpeg_dir):
parser.error(f"Fuchsia ffmpeg directory {args.ffmpeg_dir} does not exist")
if not os.path.exists(args.ffmpeg_src_dir):
parser.error(f"FFmpeg source directory {args.ffmpeg_src_dir} does not exist")
return args
def SourceSetCompare(x, y):
if len(x.sources) != len(y.sources):
return len(x.sources) - len(y.sources)
if len(x.conditions) != len(y.conditions):
return len(x.conditions) - len(y.conditions)
if len(str(x.conditions)) != len(str(y.conditions)):
return len(str(x.conditions)) - len(str(y.conditions))
return int(hashlib.md5(str(x).encode("utf-8")).hexdigest(), 16) - int(
hashlib.md5(str(y).encode("utf-8")).hexdigest(), 16
)
def WriteGn(fd, disjoint_sets):
fd.write(COPYRIGHT)
fd.write(GN_HEADER)
# Generate conditional stanza for each disjoint source set.
for s in reversed(disjoint_sets):
fd.write(s.GenerateGnStanza())
# Lists of files that are exempt from searching in GetIncludedSources.
IGNORED_INCLUDE_FILES = [
# Fuchsia generated files
"config.h",
"config_components.h",
os.path.join("libavcodec", "bsf_list.c"),
os.path.join("libavcodec", "codec_list.c"),
os.path.join("libavcodec", "parser_list.c"),
os.path.join("libavformat", "demuxer_list.c"),
os.path.join("libavformat", "muxer_list.c"),
os.path.join("libavformat", "protocol_list.c"),
os.path.join("libavutil", "avconfig.h"),
os.path.join("libavutil", "ffversion.h"),
# Current configure values are set such that we don't include these (because
# of various defines) and we also don't generate them at all, so we will
# fail to find these because they don't exist in our repository.
os.path.join("libavcodec", "aacps_tables.h"),
os.path.join("libavcodec", "aacps_fixed_tables.h"),
os.path.join("libavcodec", "aacsbr_tables.h"),
os.path.join("libavcodec", "aac_tables.h"),
os.path.join("libavcodec", "cabac_tables.h"),
os.path.join("libavcodec", "cbrt_tables.h"),
os.path.join("libavcodec", "cbrt_fixed_tables.h"),
os.path.join("libavcodec", "mpegaudio_tables.h"),
os.path.join("libavcodec", "mpegaudiodec_common_tables.h"),
os.path.join("libavcodec", "pcm_tables.h"),
os.path.join("libavcodec", "sinewin_tables.h"),
os.path.join("libavcodec", "sinewin_fixed_tables.h"),
]
# These files must never be included, and to enforce it, they must also not
# be present in the checkout.
MUST_BE_MISSING_INCLUDE_FILES = [
# This is referenced both ways.
os.path.join("macos_kperf.h"),
os.path.join("libavcodec", "macos_kperf.h"),
]
# Regex to find lines matching #include "some_dir\some_file.h".
# Also works for assembly files that use %include.
INCLUDE_REGEX = re.compile('[#%]\s*include\s+"([^"]+)"')
# Regex to find whacky includes that we might be overlooking (e.g. using macros
# or defines).
EXOTIC_INCLUDE_REGEX = re.compile('[#%]\s*include\s+[^"<\s].+')
def GetIncludedSources(file_path, source_dir, include_set, scan_only=False):
"""Recurse over include tree, accumulating absolute paths to all included
files (including the seed file) in include_set.
Pass in the set returned from previous calls to avoid re-walking parts of the
tree. Given file_path may be relative (to args.ffmpeg_src_dir) or absolute.
NOTE: This algorithm is greedy. It does not know which includes may be
excluded due to compile-time defines, so it considers any mentioned include.
NOTE: This algorithm makes hard assumptions about the include search paths.
Paths are checked in the order:
1. Directory of the file containing the #include directive
2. Directory specified by source_dir
NOTE: Files listed in IGNORED_INCLUDE_FILES will be ignored if not found. See
reasons at definition for IGNORED_INCLUDE_FILES.
"""
# Use args.ffmpeg_src_dir to correctly resolve relative file path. Use only
# absolute paths in the set to avoid same-name-errors.
if not os.path.isabs(file_path):
file_path = os.path.abspath(os.path.join(source_dir, file_path))
file_path = os.path.normpath(file_path)
current_dir = os.path.dirname(file_path)
# Already processed this file, bail out.
if file_path in include_set:
return
if not scan_only:
include_set.add(file_path)
else:
print(f"WARNING: Not checking license for: {file_path}")
for line in open(file_path):
include_match = INCLUDE_REGEX.search(line)
if not include_match:
if EXOTIC_INCLUDE_REGEX.search(line):
print(f"WARNING: Investigate whacky include line: {line}")
continue
include_file_path = include_match.group(1)
# These may or may not be where the file lives. Just storing temps here
# and we'll checking their validity below.
include_path_in_current_dir = os.path.join(current_dir, include_file_path)
include_path_in_source_dir = os.path.join(source_dir, include_file_path)
resolved_include_path = ""
# Check if file is in current directory.
if os.path.isfile(include_path_in_current_dir):
resolved_include_path = include_path_in_current_dir
# Else, check source_dir (should be FFmpeg root).
elif os.path.isfile(include_path_in_source_dir):
resolved_include_path = os.path.relpath(
include_path_in_source_dir, source_dir
)
# Else, we couldn't find it :(.
elif include_file_path in IGNORED_INCLUDE_FILES:
continue
elif include_file_path in MUST_BE_MISSING_INCLUDE_FILES:
continue
else:
exit("Failed to find file " + include_file_path)
# At this point we've found the file. Check if its in our ignore list which
# means that the list should be updated to no longer mention this file.
ignored = False
if include_file_path in IGNORED_INCLUDE_FILES:
ignored = True
print(
f"Found {include_file_path} in IGNORED_INCLUDE_FILES. "
"Consider updating the list to remove this file."
)
# Also make sure that it's not in our MUST_BE_MISSING list, since it's not
# missing anymore.
if include_file_path in MUST_BE_MISSING_INCLUDE_FILES:
exit("Found file " + include_file_path + " that should be missing!")
GetIncludedSources(
resolved_include_path, source_dir, include_set, scan_only=ignored
)
def FixBasenameCollision(old_path, new_path, content):
with open(os.path.join("src", new_path), "w") as new_file:
new_file.write(content)
def UpdateCredits(sources_to_check, source_dir, build_dir):
print("Updating ffmpeg credits")
updater = credits_updater.CreditsUpdater(source_dir)
for source_name in sources_to_check:
updater.ProcessFile(source_name)
updater.PrintStats()
updater.WriteCredits(build_dir)
def CopyConfig(source_dir, dest_dir):
files = [
"config.h",
"config_components.h",
"libavutil/avconfig.h",
"libavutil/ffversion.h",
"libavcodec/bsf_list.c",
"libavcodec/codec_list.c",
"libavcodec/parser_list.c",
"libavformat/demuxer_list.c",
"libavformat/muxer_list.c",
"libavformat/protocol_list.c",
]
if source_dir.endswith("x64"):
files.append("config.asm")
for file in files:
from_path = os.path.join(source_dir, file)
if not os.path.exists(from_path):
exit(f"Error copying {from_path}, file does not exist")
else:
to_path = os.path.join(dest_dir, file)
if not os.path.exists(os.path.dirname(to_path)):
os.makedirs(os.path.dirname(to_path))
shutil.copy2(from_path, to_path)
def main():
args = ParseArgs()
# Generate map of FFmpeg source files.
roll_dir = args.roll_dir
ffmpeg_dir = args.ffmpeg_dir
ffmpeg_src_dir = args.ffmpeg_src_dir
source_files = GetSourceFiles(ffmpeg_src_dir)
object_to_sources = GetObjectToSourceMapping(source_files)
SUPPORT_MATRIX[Attr.PROFILE] = os.listdir(roll_dir)
if not SUPPORT_MATRIX[Attr.PROFILE]:
exit(
"ERROR: found no build output in roll-dir. "
+ "Is roll-dir (%s) option correct?" % roll_dir
)
print(f"Profiles found {SUPPORT_MATRIX[Attr.PROFILE]}")
sets = []
for arch in SUPPORT_MATRIX[Attr.ARCHITECTURE]:
for profile in SUPPORT_MATRIX[Attr.PROFILE]:
# Build directories are of the form <roll_dir>/<profile>/<arch>.
build_dir = os.path.join(args.roll_dir, profile, arch)
if not os.path.exists(build_dir):
continue
print(f"Processing build directory {build_dir}")
object_files = GetObjectFiles(build_dir)
# Generate the set of source files to build said profile.
s = GetSourceFileSet(object_to_sources, object_files)
sets.append(SourceSet(s, set([SourceListCondition(arch, profile)])))
CopyConfig(build_dir, os.path.join(ffmpeg_dir, "config", profile, arch))
sets = CreatePairwiseDisjointSets(sets)
for source_set in sets:
ReduceConditionalLogic(source_set)
if not sets:
exit(
"ERROR: failed to find any source sets. "
+ "Are output directory (%s) and/or ffmpeg-src-dir (%s) arguments correct?"
% (roll_dir, args.ffmpeg_src_dir)
)
# Sort sets prior to further processing and printing to make order
# deterministic between runs.
sets = sorted(sets, key=functools.cmp_to_key(SourceSetCompare))
# Build up set of all sources and includes.
sources_to_check = set()
for source_set in sets:
for source in source_set.sources:
GetIncludedSources(source, ffmpeg_src_dir, sources_to_check)
UpdateCredits(sources_to_check, ffmpeg_src_dir, ffmpeg_dir)
gn_file_name = os.path.join(".", "ffmpeg_generated.gni")
print(f"Writing: {gn_file_name}")
with open(gn_file_name, "w") as fd:
WriteGn(fd, sets)
if __name__ == "__main__":
main()