2015-06-13 01:03:31 +02:00
|
|
|
#!/usr/bin/env python3
|
2017-11-19 14:29:49 +01:00
|
|
|
"Script to generate a build order respecting package dependencies."
|
2015-06-13 01:03:31 +02:00
|
|
|
|
2015-12-24 04:43:35 +01:00
|
|
|
import os
|
2016-04-29 14:14:28 +02:00
|
|
|
import re
|
2015-12-24 04:43:35 +01:00
|
|
|
import sys
|
2015-06-13 01:03:31 +02:00
|
|
|
|
2015-12-24 07:52:06 +01:00
|
|
|
from itertools import filterfalse
|
|
|
|
|
|
|
|
def unique_everseen(iterable, key=None):
|
2017-11-19 14:29:49 +01:00
|
|
|
"""List unique elements, preserving order. Remember all elements ever seen.
|
|
|
|
See https://docs.python.org/3/library/itertools.html#itertools-recipes
|
|
|
|
Examples:
|
|
|
|
unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
|
|
|
unique_everseen('ABBCcAD', str.lower) --> A B C D"""
|
2015-12-24 07:52:06 +01:00
|
|
|
seen = set()
|
|
|
|
seen_add = seen.add
|
|
|
|
if key is None:
|
|
|
|
for element in filterfalse(seen.__contains__, iterable):
|
|
|
|
seen_add(element)
|
|
|
|
yield element
|
|
|
|
else:
|
|
|
|
for element in iterable:
|
|
|
|
k = key(element)
|
|
|
|
if k not in seen:
|
|
|
|
seen_add(k)
|
|
|
|
yield element
|
|
|
|
|
2015-12-24 04:43:35 +01:00
|
|
|
def die(msg):
|
2017-11-19 14:29:49 +01:00
|
|
|
"Exit the process with an error message."
|
2015-12-24 04:43:35 +01:00
|
|
|
sys.exit('ERROR: ' + msg)
|
2015-06-13 01:03:31 +02:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
def parse_build_file_dependencies(path):
|
|
|
|
"Extract the dependencies of a build.sh or *.subpackage.sh file."
|
|
|
|
pkg_dep_prefix = 'TERMUX_PKG_DEPENDS='
|
|
|
|
pkg_build_dep_prefix = 'TERMUX_PKG_BUILD_DEPENDS='
|
|
|
|
subpkg_dep_prefix = 'TERMUX_SUBPKG_DEPENDS='
|
|
|
|
dependencies = []
|
|
|
|
|
|
|
|
with open(path, encoding="utf-8") as build_script:
|
|
|
|
prefix = None
|
|
|
|
for line in build_script:
|
|
|
|
if line.startswith(pkg_dep_prefix):
|
|
|
|
prefix = pkg_dep_prefix
|
|
|
|
elif line.startswith(pkg_build_dep_prefix):
|
|
|
|
prefix = pkg_build_dep_prefix
|
|
|
|
elif line.startswith(subpkg_dep_prefix):
|
|
|
|
prefix = subpkg_dep_prefix
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
dependencies_string = line[len(prefix):]
|
|
|
|
for char in "\"'\n":
|
|
|
|
dependencies_string = dependencies_string.replace(char, '')
|
2015-12-24 07:20:47 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
for dependency_value in dependencies_string.split(','):
|
|
|
|
# Replace parenthesis to ignore version qualifiers as in "gcc (>= 5.0)":
|
|
|
|
dependency_value = re.sub(r'\(.*?\)', '', dependency_value).strip()
|
|
|
|
dependency_value = re.sub('-dev$', '', dependency_value)
|
|
|
|
dependencies.append(dependency_value)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
return set(dependencies)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
2015-12-24 06:04:28 +01:00
|
|
|
class TermuxPackage(object):
|
2017-11-19 14:29:49 +01:00
|
|
|
"A main package definition represented by a directory with a build.sh file."
|
2017-11-04 01:18:32 +01:00
|
|
|
def __init__(self, dir_path):
|
|
|
|
self.dir = dir_path
|
|
|
|
self.name = os.path.basename(self.dir)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
|
|
|
# search package build.sh
|
|
|
|
build_sh_path = os.path.join(self.dir, 'build.sh')
|
|
|
|
if not os.path.isfile(build_sh_path):
|
2017-11-04 01:18:32 +01:00
|
|
|
raise Exception("build.sh not found for package '" + self.name + "'")
|
2015-12-24 07:20:47 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
self.deps = parse_build_file_dependencies(build_sh_path)
|
2016-09-16 11:48:02 +02:00
|
|
|
if 'libandroid-support' not in self.deps and self.name != 'libandroid-support':
|
|
|
|
# Every package may depend on libandroid-support without declaring it:
|
|
|
|
self.deps.add('libandroid-support')
|
2015-12-24 07:20:47 +01:00
|
|
|
|
|
|
|
# search subpackages
|
|
|
|
self.subpkgs = []
|
|
|
|
|
|
|
|
for filename in os.listdir(self.dir):
|
2017-11-19 14:29:49 +01:00
|
|
|
if not filename.endswith('.subpackage.sh'):
|
|
|
|
continue
|
2017-11-04 01:18:32 +01:00
|
|
|
subpkg = TermuxSubPackage(self.dir + '/' + filename, self)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
|
|
|
self.subpkgs.append(subpkg)
|
|
|
|
self.deps |= subpkg.deps
|
|
|
|
|
|
|
|
# Do not depend on itself
|
|
|
|
self.deps.discard(self.name)
|
|
|
|
# Do not depend on any sub package
|
|
|
|
self.deps.difference_update([subpkg.name for subpkg in self.subpkgs])
|
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
self.needed_by = set() # Populated outside constructor, reverse of deps.
|
2015-12-24 07:20:47 +01:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "<{} '{}'>".format(self.__class__.__name__, self.name)
|
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
def recursive_dependencies(self, pkgs_map):
|
|
|
|
"All the dependencies of the package, both direct and indirect."
|
|
|
|
result = []
|
|
|
|
for dependency_name in sorted(self.deps):
|
|
|
|
dependency_package = pkgs_map[dependency_name]
|
|
|
|
result += dependency_package.recursive_dependencies(pkgs_map)
|
|
|
|
result += [dependency_package]
|
|
|
|
return unique_everseen(result)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
class TermuxSubPackage:
|
|
|
|
"A sub-package represented by a ${PACKAGE_NAME}.subpackage.sh file."
|
2017-11-04 01:18:32 +01:00
|
|
|
def __init__(self, subpackage_file_path, parent):
|
2015-12-24 07:20:47 +01:00
|
|
|
if parent is None:
|
|
|
|
raise Exception("SubPackages should have a parent")
|
|
|
|
|
2017-11-04 01:18:32 +01:00
|
|
|
self.name = os.path.basename(subpackage_file_path).split('.subpackage.sh')[0]
|
2015-12-24 07:20:47 +01:00
|
|
|
self.parent = parent
|
2017-11-19 14:29:49 +01:00
|
|
|
self.deps = parse_build_file_dependencies(subpackage_file_path)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "<{} '{}' parent='{}'>".format(self.__class__.__name__, self.name, self.parent)
|
2015-12-24 04:43:35 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
def read_packages_from_directories(directories):
|
|
|
|
"""Construct a map from package name to TermuxPackage.
|
|
|
|
For subpackages this maps from the subpackage name to the parent package."""
|
|
|
|
pkgs_map = {}
|
2017-11-04 01:18:32 +01:00
|
|
|
all_packages = []
|
2017-11-19 14:29:49 +01:00
|
|
|
|
|
|
|
for package_dir in directories:
|
2017-11-04 01:18:32 +01:00
|
|
|
for pkgdir_name in sorted(os.listdir(package_dir)):
|
|
|
|
dir_path = package_dir + '/' + pkgdir_name
|
|
|
|
if os.path.isfile(dir_path + '/build.sh'):
|
2017-11-19 14:29:49 +01:00
|
|
|
new_package = TermuxPackage(package_dir + '/' + pkgdir_name)
|
2015-12-24 04:47:38 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
if new_package.name in pkgs_map:
|
|
|
|
die('Duplicated package: ' + new_package.name)
|
|
|
|
else:
|
|
|
|
pkgs_map[new_package.name] = new_package
|
|
|
|
all_packages.append(new_package)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
for subpkg in new_package.subpkgs:
|
|
|
|
if subpkg.name in pkgs_map:
|
|
|
|
die('Duplicated package: ' + subpkg.name)
|
|
|
|
else:
|
|
|
|
pkgs_map[subpkg.name] = new_package
|
|
|
|
all_packages.append(subpkg)
|
2015-12-24 04:47:38 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
for pkg in all_packages:
|
|
|
|
for dependency_name in pkg.deps:
|
|
|
|
if dependency_name not in pkgs_map:
|
|
|
|
die('Package %s depends on non-existing package "%s"' % (pkg.name, dependency_name))
|
|
|
|
dep_pkg = pkgs_map[dependency_name]
|
|
|
|
if not isinstance(pkg, TermuxSubPackage):
|
|
|
|
dep_pkg.needed_by.add(pkg)
|
|
|
|
return pkgs_map
|
2015-12-24 07:20:47 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
def generate_full_buildorder(pkgs_map):
|
|
|
|
"Generate a build order for building all packages."
|
2015-12-24 07:52:06 +01:00
|
|
|
build_order = []
|
|
|
|
|
2015-12-24 06:04:28 +01:00
|
|
|
# List of all TermuxPackages without dependencies
|
2015-12-24 07:20:47 +01:00
|
|
|
leaf_pkgs = [pkg for name, pkg in pkgs_map.items() if not pkg.deps]
|
|
|
|
|
|
|
|
if not leaf_pkgs:
|
|
|
|
die('No package without dependencies - where to start?')
|
2015-12-24 04:47:38 +01:00
|
|
|
|
2016-09-16 11:48:02 +02:00
|
|
|
# Sort alphabetically:
|
|
|
|
pkg_queue = sorted(leaf_pkgs, key=lambda p: p.name)
|
2015-12-24 04:47:38 +01:00
|
|
|
|
|
|
|
# Topological sorting
|
2015-12-24 07:20:47 +01:00
|
|
|
visited = set()
|
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
# Tracks non-visited deps for each package
|
|
|
|
remaining_deps = {}
|
|
|
|
for name, pkg in pkgs_map.items():
|
|
|
|
remaining_deps[name] = set(pkg.deps)
|
|
|
|
for subpkg in pkg.subpkgs:
|
|
|
|
remaining_deps[subpkg.name] = set(subpkg.deps)
|
|
|
|
|
2015-12-24 07:20:47 +01:00
|
|
|
while pkg_queue:
|
|
|
|
pkg = pkg_queue.pop(0)
|
|
|
|
if pkg.name in visited:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# print("Processing {}:".format(pkg.name), pkg.needed_by)
|
2015-12-24 07:34:27 +01:00
|
|
|
visited.add(pkg.name)
|
2015-12-24 04:47:38 +01:00
|
|
|
build_order.append(pkg)
|
2015-12-24 07:20:47 +01:00
|
|
|
|
|
|
|
for other_pkg in sorted(pkg.needed_by, key=lambda p: p.name):
|
2015-12-24 07:34:27 +01:00
|
|
|
# Remove this pkg from deps
|
2015-12-24 07:20:47 +01:00
|
|
|
remaining_deps[other_pkg.name].discard(pkg.name)
|
|
|
|
# ... and all its subpackages
|
|
|
|
remaining_deps[other_pkg.name].difference_update(
|
|
|
|
[subpkg.name for subpkg in pkg.subpkgs]
|
|
|
|
)
|
|
|
|
|
2015-12-24 07:34:27 +01:00
|
|
|
if not remaining_deps[other_pkg.name]: # all deps were already appended?
|
|
|
|
pkg_queue.append(other_pkg) # should be processed
|
2015-12-24 07:20:47 +01:00
|
|
|
|
|
|
|
if set(pkgs_map.values()) != set(build_order):
|
2015-12-24 04:47:38 +01:00
|
|
|
print("ERROR: Cycle exists. Remaining: ")
|
2015-12-24 07:20:47 +01:00
|
|
|
for name, pkg in pkgs_map.items():
|
2015-12-24 04:47:38 +01:00
|
|
|
if pkg not in build_order:
|
2015-12-24 07:20:47 +01:00
|
|
|
print(name, remaining_deps[name])
|
|
|
|
|
2015-12-24 04:47:38 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
2015-12-24 07:52:06 +01:00
|
|
|
return build_order
|
2015-12-24 04:47:38 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
def generate_target_buildorder(target_path, pkgs_map):
|
|
|
|
"Generate a build order for building the dependencies of the specified package."
|
|
|
|
if target_path.endswith('/'):
|
|
|
|
target_path = target_path[:-1]
|
2015-12-24 07:52:06 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
package_name = os.path.basename(target_path)
|
|
|
|
package = pkgs_map[package_name]
|
|
|
|
return package.recursive_dependencies(pkgs_map)
|
2015-12-24 07:29:34 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
def main():
|
|
|
|
"Generate the build order either for all packages or a specific one."
|
|
|
|
packages_directories = ['packages']
|
2017-11-04 01:18:32 +01:00
|
|
|
full_buildorder = len(sys.argv) == 1
|
|
|
|
if not full_buildorder:
|
|
|
|
packages_real_path = os.path.realpath('packages')
|
|
|
|
for path in sys.argv[1:]:
|
|
|
|
if not os.path.isdir(path):
|
|
|
|
die('Not a directory: ' + path)
|
2017-11-19 14:29:49 +01:00
|
|
|
if path.endswith('/'):
|
|
|
|
path = path[:-1]
|
2017-11-04 01:18:32 +01:00
|
|
|
parent_path = os.path.dirname(path)
|
|
|
|
if packages_real_path != os.path.realpath(parent_path):
|
2017-11-19 14:29:49 +01:00
|
|
|
packages_directories.append(parent_path)
|
2017-11-04 01:18:32 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
pkgs_map = read_packages_from_directories(packages_directories)
|
2015-12-24 07:29:34 +01:00
|
|
|
|
2017-11-04 01:18:32 +01:00
|
|
|
if full_buildorder:
|
2017-11-19 14:29:49 +01:00
|
|
|
build_order = generate_full_buildorder(pkgs_map)
|
2015-12-24 07:52:06 +01:00
|
|
|
else:
|
2017-11-19 14:29:49 +01:00
|
|
|
build_order = generate_target_buildorder(sys.argv[1], pkgs_map)
|
2015-12-24 07:29:34 +01:00
|
|
|
|
2017-11-19 14:29:49 +01:00
|
|
|
for pkg in build_order:
|
2017-11-04 01:18:32 +01:00
|
|
|
print(pkg.dir)
|
2017-11-19 14:29:49 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|