Bump buildroot to 2019.02
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
# Copyright (C) 2010-2013 Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
@@ -7,18 +8,18 @@ import subprocess
|
||||
# Execute the "make <pkg>-show-version" command to get the version of a given
|
||||
# list of packages, and return the version formatted as a Python dictionary.
|
||||
def get_version(pkgs):
|
||||
sys.stderr.write("Getting version for %s\n" % pkgs)
|
||||
logging.info("Getting version for %s" % pkgs)
|
||||
cmd = ["make", "-s", "--no-print-directory"]
|
||||
for pkg in pkgs:
|
||||
cmd.append("%s-show-version" % pkg)
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write("Error getting version %s\n" % pkgs)
|
||||
logging.error("Error getting version %s" % pkgs)
|
||||
sys.exit(1)
|
||||
output = output.split("\n")
|
||||
if len(output) != len(pkgs) + 1:
|
||||
sys.stderr.write("Error getting version\n")
|
||||
logging.error("Error getting version")
|
||||
sys.exit(1)
|
||||
version = {}
|
||||
for i in range(0, len(pkgs)):
|
||||
@@ -28,18 +29,18 @@ def get_version(pkgs):
|
||||
|
||||
|
||||
def _get_depends(pkgs, rule):
|
||||
sys.stderr.write("Getting dependencies for %s\n" % pkgs)
|
||||
logging.info("Getting dependencies for %s" % pkgs)
|
||||
cmd = ["make", "-s", "--no-print-directory"]
|
||||
for pkg in pkgs:
|
||||
cmd.append("%s-%s" % (pkg, rule))
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write("Error getting dependencies %s\n" % pkgs)
|
||||
logging.error("Error getting dependencies %s\n" % pkgs)
|
||||
sys.exit(1)
|
||||
output = output.split("\n")
|
||||
if len(output) != len(pkgs) + 1:
|
||||
sys.stderr.write("Error getting dependencies\n")
|
||||
logging.error("Error getting dependencies")
|
||||
sys.exit(1)
|
||||
deps = {}
|
||||
for i in range(0, len(pkgs)):
|
||||
|
||||
@@ -20,6 +20,10 @@ declare -a IGNORES=(
|
||||
# pru-software-support) legitimately install ELF binaries that
|
||||
# are not for the target architecture
|
||||
"/usr/share"
|
||||
|
||||
# Skip files in /lib/grub, since it is possible to have it
|
||||
# for a different architecture (e.g. i386 grub on x86_64).
|
||||
"/lib/grub"
|
||||
)
|
||||
|
||||
while getopts p:l:r:a:i: OPT ; do
|
||||
|
||||
@@ -39,6 +39,11 @@ is_elf() {
|
||||
|grep -E 'Requesting program interpreter:' >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# This function tells whether a given ELF executable (first argument)
|
||||
# needs a RPATH pointing to the host library directory or not. It
|
||||
# needs such an RPATH if at least of the libraries used by the ELF
|
||||
# executable is available in the host library directory. This function
|
||||
# returns 0 when a RPATH is needed, 1 otherwise.
|
||||
elf_needs_rpath() {
|
||||
local file="${1}"
|
||||
local hostdir="${2}"
|
||||
@@ -54,6 +59,13 @@ elf_needs_rpath() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# This function checks whether at least one of the RPATH of the given
|
||||
# ELF executable (first argument) properly points to the host library
|
||||
# directory (second argument), either through an absolute RPATH or a
|
||||
# relative RPATH. Having such a RPATH will make sure the ELF
|
||||
# executable will find at runtime the shared libraries it depends
|
||||
# on. This function returns 0 when a proper RPATH was found, or 1
|
||||
# otherwise.
|
||||
check_elf_has_rpath() {
|
||||
local file="${1}"
|
||||
local hostdir="${2}"
|
||||
@@ -63,7 +75,8 @@ check_elf_has_rpath() {
|
||||
for dir in ${rpath//:/ }; do
|
||||
# Remove duplicate and trailing '/' for proper match
|
||||
dir="$( sed -r -e 's:/+:/:g; s:/$::;' <<<"${dir}" )"
|
||||
[ "${dir}" = "${hostdir}/lib" -o "${dir}" = "\$ORIGIN/../lib" ] && return 0
|
||||
[ "${dir}" = "${hostdir}/lib" ] && return 0
|
||||
[ "${dir}" = "\$ORIGIN/../lib" ] && return 0
|
||||
done
|
||||
done < <( readelf -d "${file}" \
|
||||
|sed -r -e '/.* \(R(UN)?PATH\) +Library r(un)?path: \[(.+)\]$/!d' \
|
||||
|
||||
39
bsp/buildroot/support/scripts/check-merged-usr.sh
Executable file
39
bsp/buildroot/support/scripts/check-merged-usr.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Check if a given custom skeleton or overlay complies to the merged /usr
|
||||
# requirements:
|
||||
# /
|
||||
# /bin -> usr/bin
|
||||
# /lib -> usr/lib
|
||||
# /sbin -> usr/sbin
|
||||
# /usr/bin/
|
||||
# /usr/lib/
|
||||
# /usr/sbin/
|
||||
#
|
||||
# Output: the list of non-compliant paths (empty if compliant).
|
||||
#
|
||||
|
||||
# Extract the inode numbers for all of those directories. In case any is
|
||||
# a symlink, we want to get the inode of the pointed-to directory, so we
|
||||
# append '/.' to be sure we get the target directory. Since the symlinks
|
||||
# can be anyway (/bin -> /usr/bin or /usr/bin -> /bin), we do that for
|
||||
# all of them.
|
||||
#
|
||||
lib_inode=$(stat -c '%i' "${1}/lib/." 2>/dev/null)
|
||||
bin_inode=$(stat -c '%i' "${1}/bin/." 2>/dev/null)
|
||||
sbin_inode=$(stat -c '%i' "${1}/sbin/." 2>/dev/null)
|
||||
usr_lib_inode=$(stat -c '%i' "${1}/usr/lib/." 2>/dev/null)
|
||||
usr_bin_inode=$(stat -c '%i' "${1}/usr/bin/." 2>/dev/null)
|
||||
usr_sbin_inode=$(stat -c '%i' "${1}/usr/sbin/." 2>/dev/null)
|
||||
|
||||
not_merged_dirs=""
|
||||
test -z "$lib_inode" || \
|
||||
test "$lib_inode" = "$usr_lib_inode" || \
|
||||
not_merged_dirs="/lib"
|
||||
test -z "$bin_inode" || \
|
||||
test "$bin_inode" = "$usr_bin_inode" || \
|
||||
not_merged_dirs="$not_merged_dirs /bin"
|
||||
test -z "$sbin_inode" || \
|
||||
test "$sbin_inode" = "$usr_sbin_inode" || \
|
||||
not_merged_dirs="$not_merged_dirs /sbin"
|
||||
echo "${not_merged_dirs# }"
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import csv
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
17
bsp/buildroot/support/scripts/generate-gitlab-ci-yml
Executable file
17
bsp/buildroot/support/scripts/generate-gitlab-ci-yml
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
input="${1}"
|
||||
|
||||
cat "${input}"
|
||||
|
||||
(
|
||||
cd configs
|
||||
LC_ALL=C ls -1 *_defconfig
|
||||
) \
|
||||
| sed 's/$/: { extends: .defconfig }/'
|
||||
|
||||
./support/testing/run-tests -l 2>&1 \
|
||||
| sed -r -e '/^test_run \((.*)\).*/!d; s//\1: { extends: .runtime_test }/' \
|
||||
| LC_ALL=C sort
|
||||
@@ -69,14 +69,14 @@ import matplotlib.font_manager as fm # noqa: E402
|
||||
import csv # noqa: E402
|
||||
import argparse # noqa: E402
|
||||
|
||||
steps = ['extract', 'patch', 'configure', 'build',
|
||||
steps = ['download', 'extract', 'patch', 'configure', 'build',
|
||||
'install-target', 'install-staging', 'install-images',
|
||||
'install-host']
|
||||
|
||||
default_colors = ['#e60004', '#009836', '#2e1d86', '#ffed00',
|
||||
default_colors = ['#8d02ff', '#e60004', '#009836', '#2e1d86', '#ffed00',
|
||||
'#0068b5', '#f28e00', '#940084', '#97c000']
|
||||
|
||||
alternate_colors = ['#00e0e0', '#3f7f7f', '#ff0000', '#00c000',
|
||||
alternate_colors = ['#ffbe0a', '#96bdff', '#3f7f7f', '#ff0000', '#00c000',
|
||||
'#0080ff', '#c000ff', '#00eeee', '#e0e000']
|
||||
|
||||
|
||||
@@ -260,7 +260,7 @@ def read_data(input_file):
|
||||
return None
|
||||
|
||||
for row in reader:
|
||||
time = int(row[0].strip())
|
||||
time = float(row[0].strip())
|
||||
state = row[1].strip()
|
||||
step = row[2].strip()
|
||||
pkg = row[3].strip()
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#
|
||||
# Copyright (C) 2010-2013 Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import subprocess
|
||||
import argparse
|
||||
@@ -31,96 +32,6 @@ import brpkgutil
|
||||
# Modes of operation:
|
||||
MODE_FULL = 1 # draw full dependency graph for all selected packages
|
||||
MODE_PKG = 2 # draw dependency graph for a given package
|
||||
mode = 0
|
||||
|
||||
# Limit drawing the dependency graph to this depth. 0 means 'no limit'.
|
||||
max_depth = 0
|
||||
|
||||
# Whether to draw the transitive dependencies
|
||||
transitive = True
|
||||
|
||||
parser = argparse.ArgumentParser(description="Graph packages dependencies")
|
||||
parser.add_argument("--check-only", "-C", dest="check_only", action="store_true", default=False,
|
||||
help="Only do the dependency checks (circular deps...)")
|
||||
parser.add_argument("--outfile", "-o", metavar="OUT_FILE", dest="outfile",
|
||||
help="File in which to generate the dot representation")
|
||||
parser.add_argument("--package", '-p', metavar="PACKAGE",
|
||||
help="Graph the dependencies of PACKAGE")
|
||||
parser.add_argument("--depth", '-d', metavar="DEPTH", dest="depth", type=int, default=0,
|
||||
help="Limit the dependency graph to DEPTH levels; 0 means no limit.")
|
||||
parser.add_argument("--stop-on", "-s", metavar="PACKAGE", dest="stop_list", action="append",
|
||||
help="Do not graph past this package (can be given multiple times)." +
|
||||
" Can be a package name or a glob, " +
|
||||
" 'virtual' to stop on virtual packages, or " +
|
||||
"'host' to stop on host packages.")
|
||||
parser.add_argument("--exclude", "-x", metavar="PACKAGE", dest="exclude_list", action="append",
|
||||
help="Like --stop-on, but do not add PACKAGE to the graph.")
|
||||
parser.add_argument("--colours", "-c", metavar="COLOR_LIST", dest="colours",
|
||||
default="lightblue,grey,gainsboro",
|
||||
help="Comma-separated list of the three colours to use" +
|
||||
" to draw the top-level package, the target" +
|
||||
" packages, and the host packages, in this order." +
|
||||
" Defaults to: 'lightblue,grey,gainsboro'")
|
||||
parser.add_argument("--transitive", dest="transitive", action='store_true',
|
||||
default=False)
|
||||
parser.add_argument("--no-transitive", dest="transitive", action='store_false',
|
||||
help="Draw (do not draw) transitive dependencies")
|
||||
parser.add_argument("--direct", dest="direct", action='store_true', default=True,
|
||||
help="Draw direct dependencies (the default)")
|
||||
parser.add_argument("--reverse", dest="direct", action='store_false',
|
||||
help="Draw reverse dependencies")
|
||||
args = parser.parse_args()
|
||||
|
||||
check_only = args.check_only
|
||||
|
||||
if args.outfile is None:
|
||||
outfile = sys.stdout
|
||||
else:
|
||||
if check_only:
|
||||
sys.stderr.write("don't specify outfile and check-only at the same time\n")
|
||||
sys.exit(1)
|
||||
outfile = open(args.outfile, "w")
|
||||
|
||||
if args.package is None:
|
||||
mode = MODE_FULL
|
||||
else:
|
||||
mode = MODE_PKG
|
||||
rootpkg = args.package
|
||||
|
||||
max_depth = args.depth
|
||||
|
||||
if args.stop_list is None:
|
||||
stop_list = []
|
||||
else:
|
||||
stop_list = args.stop_list
|
||||
|
||||
if args.exclude_list is None:
|
||||
exclude_list = []
|
||||
else:
|
||||
exclude_list = args.exclude_list
|
||||
|
||||
transitive = args.transitive
|
||||
|
||||
if args.direct:
|
||||
get_depends_func = brpkgutil.get_depends
|
||||
arrow_dir = "forward"
|
||||
else:
|
||||
if mode == MODE_FULL:
|
||||
sys.stderr.write("--reverse needs a package\n")
|
||||
sys.exit(1)
|
||||
get_depends_func = brpkgutil.get_rdepends
|
||||
arrow_dir = "back"
|
||||
|
||||
# Get the colours: we need exactly three colours,
|
||||
# so no need not split more than 4
|
||||
# We'll let 'dot' validate the colours...
|
||||
colours = args.colours.split(',', 4)
|
||||
if len(colours) != 3:
|
||||
sys.stderr.write("Error: incorrect colour list '%s'\n" % args.colours)
|
||||
sys.exit(1)
|
||||
root_colour = colours[0]
|
||||
target_colour = colours[1]
|
||||
host_colour = colours[2]
|
||||
|
||||
allpkgs = []
|
||||
|
||||
@@ -129,7 +40,7 @@ allpkgs = []
|
||||
# Buildroot PACKAGES and return it formatted as a Python list. This
|
||||
# list is used as the starting point for full dependency graphs
|
||||
def get_targets():
|
||||
sys.stderr.write("Getting targets\n")
|
||||
logging.info("Getting targets")
|
||||
cmd = ["make", "-s", "--no-print-directory", "show-targets"]
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
output = p.communicate()[0].strip()
|
||||
@@ -145,7 +56,7 @@ def get_targets():
|
||||
# 'dependencies', which contains tuples of the form (pkg1 ->
|
||||
# pkg2_on_which_pkg1_depends, pkg3 -> pkg4_on_which_pkg3_depends) and
|
||||
# the function finally returns this list.
|
||||
def get_all_depends(pkgs):
|
||||
def get_all_depends(pkgs, get_depends_func):
|
||||
dependencies = []
|
||||
|
||||
# Filter the packages for which we already have the dependencies
|
||||
@@ -175,7 +86,7 @@ def get_all_depends(pkgs):
|
||||
deps.add(dep)
|
||||
|
||||
if len(deps) != 0:
|
||||
newdeps = get_all_depends(deps)
|
||||
newdeps = get_all_depends(deps, get_depends_func)
|
||||
if newdeps is not None:
|
||||
dependencies += newdeps
|
||||
|
||||
@@ -189,41 +100,6 @@ def pkg_node_name(pkg):
|
||||
return "_" + pkg.replace("-", "")
|
||||
|
||||
|
||||
TARGET_EXCEPTIONS = [
|
||||
"target-finalize",
|
||||
"target-post-image",
|
||||
]
|
||||
|
||||
# In full mode, start with the result of get_targets() to get the main
|
||||
# targets and then use get_all_depends() for all targets
|
||||
if mode == MODE_FULL:
|
||||
targets = get_targets()
|
||||
dependencies = []
|
||||
allpkgs.append('all')
|
||||
filtered_targets = []
|
||||
for tg in targets:
|
||||
# Skip uninteresting targets
|
||||
if tg in TARGET_EXCEPTIONS:
|
||||
continue
|
||||
dependencies.append(('all', tg))
|
||||
filtered_targets.append(tg)
|
||||
deps = get_all_depends(filtered_targets)
|
||||
if deps is not None:
|
||||
dependencies += deps
|
||||
rootpkg = 'all'
|
||||
|
||||
# In pkg mode, start directly with get_all_depends() on the requested
|
||||
# package
|
||||
elif mode == MODE_PKG:
|
||||
dependencies = get_all_depends([rootpkg])
|
||||
|
||||
# Make the dependencies a dictionnary { 'pkg':[dep1, dep2, ...] }
|
||||
dict_deps = {}
|
||||
for dep in dependencies:
|
||||
if dep[0] not in dict_deps:
|
||||
dict_deps[dep[0]] = []
|
||||
dict_deps[dep[0]].append(dep[1])
|
||||
|
||||
# Basic cache for the results of the is_dep() function, in order to
|
||||
# optimize the execution time. The cache is a dict of dict of boolean
|
||||
# values. The key to the primary dict is "pkg", and the key of the
|
||||
@@ -294,10 +170,15 @@ def remove_transitive_deps(pkg, deps):
|
||||
return new_d
|
||||
|
||||
|
||||
# List of dependencies that all/many packages have, and that we want
|
||||
# to trim when generating the dependency graph.
|
||||
MANDATORY_DEPS = ['toolchain', 'skeleton']
|
||||
|
||||
|
||||
# This function removes the dependency on some 'mandatory' package, like the
|
||||
# 'toolchain' package, or the 'skeleton' package
|
||||
def remove_mandatory_deps(pkg, deps):
|
||||
return [p for p in deps[pkg] if p not in ['toolchain', 'skeleton']]
|
||||
return [p for p in deps[pkg] if p not in MANDATORY_DEPS]
|
||||
|
||||
|
||||
# This function will check that there is no loop in the dependency chain
|
||||
@@ -312,10 +193,10 @@ def check_circular_deps(deps):
|
||||
chain.append(pkg)
|
||||
for p in deps[pkg]:
|
||||
if p in chain:
|
||||
sys.stderr.write("\nRecursion detected for : %s\n" % (p))
|
||||
logging.warning("\nRecursion detected for : %s" % (p))
|
||||
while True:
|
||||
_p = chain.pop()
|
||||
sys.stderr.write("which is a dependency of: %s\n" % (_p))
|
||||
logging.warning("which is a dependency of: %s" % (_p))
|
||||
if p == _p:
|
||||
sys.exit(1)
|
||||
recurse(p)
|
||||
@@ -329,42 +210,32 @@ def check_circular_deps(deps):
|
||||
|
||||
# This functions trims down the dependency list of all packages.
|
||||
# It applies in sequence all the dependency-elimination methods.
|
||||
def remove_extra_deps(deps):
|
||||
def remove_extra_deps(deps, rootpkg, transitive):
|
||||
for pkg in list(deps.keys()):
|
||||
if not pkg == 'all':
|
||||
if not pkg == rootpkg:
|
||||
deps[pkg] = remove_mandatory_deps(pkg, deps)
|
||||
for pkg in list(deps.keys()):
|
||||
if not transitive or pkg == 'all':
|
||||
if not transitive or pkg == rootpkg:
|
||||
deps[pkg] = remove_transitive_deps(pkg, deps)
|
||||
return deps
|
||||
|
||||
|
||||
check_circular_deps(dict_deps)
|
||||
if check_only:
|
||||
sys.exit(0)
|
||||
|
||||
dict_deps = remove_extra_deps(dict_deps)
|
||||
dict_version = brpkgutil.get_version([pkg for pkg in allpkgs
|
||||
if pkg != "all" and not pkg.startswith("root")])
|
||||
|
||||
|
||||
# Print the attributes of a node: label and fill-color
|
||||
def print_attrs(pkg):
|
||||
def print_attrs(outfile, pkg, version, depth, colors):
|
||||
name = pkg_node_name(pkg)
|
||||
if pkg == 'all':
|
||||
label = 'ALL'
|
||||
else:
|
||||
label = pkg
|
||||
if pkg == 'all' or (mode == MODE_PKG and pkg == rootpkg):
|
||||
color = root_colour
|
||||
if depth == 0:
|
||||
color = colors[0]
|
||||
else:
|
||||
if pkg.startswith('host') \
|
||||
or pkg.startswith('toolchain') \
|
||||
or pkg.startswith('rootfs'):
|
||||
color = host_colour
|
||||
color = colors[2]
|
||||
else:
|
||||
color = target_colour
|
||||
version = dict_version.get(pkg)
|
||||
color = colors[1]
|
||||
if version == "virtual":
|
||||
outfile.write("%s [label = <<I>%s</I>>]\n" % (name, label))
|
||||
else:
|
||||
@@ -372,12 +243,19 @@ def print_attrs(pkg):
|
||||
outfile.write("%s [color=%s,style=filled]\n" % (name, color))
|
||||
|
||||
|
||||
done_deps = []
|
||||
|
||||
|
||||
# Print the dependency graph of a package
|
||||
def print_pkg_deps(depth, pkg):
|
||||
def print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
|
||||
arrow_dir, draw_graph, depth, max_depth, pkg, colors):
|
||||
if pkg in done_deps:
|
||||
return
|
||||
done_deps.append(pkg)
|
||||
print_attrs(pkg)
|
||||
if draw_graph:
|
||||
print_attrs(outfile, pkg, dict_version.get(pkg), depth, colors)
|
||||
elif depth != 0:
|
||||
outfile.write("%s " % pkg)
|
||||
if pkg not in dict_deps:
|
||||
return
|
||||
for p in stop_list:
|
||||
@@ -401,14 +279,149 @@ def print_pkg_deps(depth, pkg):
|
||||
add = False
|
||||
break
|
||||
if add:
|
||||
outfile.write("%s -> %s [dir=%s]\n" % (pkg_node_name(pkg), pkg_node_name(d), arrow_dir))
|
||||
print_pkg_deps(depth + 1, d)
|
||||
if draw_graph:
|
||||
outfile.write("%s -> %s [dir=%s]\n" % (pkg_node_name(pkg), pkg_node_name(d), arrow_dir))
|
||||
print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
|
||||
arrow_dir, draw_graph, depth + 1, max_depth, d, colors)
|
||||
|
||||
|
||||
# Start printing the graph data
|
||||
outfile.write("digraph G {\n")
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Graph packages dependencies")
|
||||
parser.add_argument("--check-only", "-C", dest="check_only", action="store_true", default=False,
|
||||
help="Only do the dependency checks (circular deps...)")
|
||||
parser.add_argument("--outfile", "-o", metavar="OUT_FILE", dest="outfile",
|
||||
help="File in which to generate the dot representation")
|
||||
parser.add_argument("--package", '-p', metavar="PACKAGE",
|
||||
help="Graph the dependencies of PACKAGE")
|
||||
parser.add_argument("--depth", '-d', metavar="DEPTH", dest="depth", type=int, default=0,
|
||||
help="Limit the dependency graph to DEPTH levels; 0 means no limit.")
|
||||
parser.add_argument("--stop-on", "-s", metavar="PACKAGE", dest="stop_list", action="append",
|
||||
help="Do not graph past this package (can be given multiple times)." +
|
||||
" Can be a package name or a glob, " +
|
||||
" 'virtual' to stop on virtual packages, or " +
|
||||
"'host' to stop on host packages.")
|
||||
parser.add_argument("--exclude", "-x", metavar="PACKAGE", dest="exclude_list", action="append",
|
||||
help="Like --stop-on, but do not add PACKAGE to the graph.")
|
||||
parser.add_argument("--colors", "-c", metavar="COLOR_LIST", dest="colors",
|
||||
default="lightblue,grey,gainsboro",
|
||||
help="Comma-separated list of the three colors to use" +
|
||||
" to draw the top-level package, the target" +
|
||||
" packages, and the host packages, in this order." +
|
||||
" Defaults to: 'lightblue,grey,gainsboro'")
|
||||
parser.add_argument("--transitive", dest="transitive", action='store_true',
|
||||
default=False)
|
||||
parser.add_argument("--no-transitive", dest="transitive", action='store_false',
|
||||
help="Draw (do not draw) transitive dependencies")
|
||||
parser.add_argument("--direct", dest="direct", action='store_true', default=True,
|
||||
help="Draw direct dependencies (the default)")
|
||||
parser.add_argument("--reverse", dest="direct", action='store_false',
|
||||
help="Draw reverse dependencies")
|
||||
parser.add_argument("--quiet", '-q', dest="quiet", action='store_true',
|
||||
help="Quiet")
|
||||
parser.add_argument("--flat-list", '-f', dest="flat_list", action='store_true', default=False,
|
||||
help="Do not draw graph, just print a flat list")
|
||||
return parser.parse_args()
|
||||
|
||||
done_deps = []
|
||||
print_pkg_deps(0, rootpkg)
|
||||
|
||||
outfile.write("}\n")
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
check_only = args.check_only
|
||||
|
||||
logging.basicConfig(stream=sys.stderr, format='%(message)s',
|
||||
level=logging.WARNING if args.quiet else logging.INFO)
|
||||
|
||||
if args.outfile is None:
|
||||
outfile = sys.stdout
|
||||
else:
|
||||
if check_only:
|
||||
logging.error("don't specify outfile and check-only at the same time")
|
||||
sys.exit(1)
|
||||
outfile = open(args.outfile, "w")
|
||||
|
||||
if args.package is None:
|
||||
mode = MODE_FULL
|
||||
else:
|
||||
mode = MODE_PKG
|
||||
rootpkg = args.package
|
||||
|
||||
if args.stop_list is None:
|
||||
stop_list = []
|
||||
else:
|
||||
stop_list = args.stop_list
|
||||
|
||||
if args.exclude_list is None:
|
||||
exclude_list = []
|
||||
else:
|
||||
exclude_list = args.exclude_list
|
||||
|
||||
if args.direct:
|
||||
get_depends_func = brpkgutil.get_depends
|
||||
arrow_dir = "forward"
|
||||
else:
|
||||
if mode == MODE_FULL:
|
||||
logging.error("--reverse needs a package")
|
||||
sys.exit(1)
|
||||
get_depends_func = brpkgutil.get_rdepends
|
||||
arrow_dir = "back"
|
||||
|
||||
draw_graph = not args.flat_list
|
||||
|
||||
# Get the colors: we need exactly three colors,
|
||||
# so no need not split more than 4
|
||||
# We'll let 'dot' validate the colors...
|
||||
colors = args.colors.split(',', 4)
|
||||
if len(colors) != 3:
|
||||
logging.error("Error: incorrect color list '%s'" % args.colors)
|
||||
sys.exit(1)
|
||||
|
||||
# In full mode, start with the result of get_targets() to get the main
|
||||
# targets and then use get_all_depends() for all targets
|
||||
if mode == MODE_FULL:
|
||||
targets = get_targets()
|
||||
dependencies = []
|
||||
allpkgs.append('all')
|
||||
filtered_targets = []
|
||||
for tg in targets:
|
||||
dependencies.append(('all', tg))
|
||||
filtered_targets.append(tg)
|
||||
deps = get_all_depends(filtered_targets, get_depends_func)
|
||||
if deps is not None:
|
||||
dependencies += deps
|
||||
rootpkg = 'all'
|
||||
|
||||
# In pkg mode, start directly with get_all_depends() on the requested
|
||||
# package
|
||||
elif mode == MODE_PKG:
|
||||
dependencies = get_all_depends([rootpkg], get_depends_func)
|
||||
|
||||
# Make the dependencies a dictionnary { 'pkg':[dep1, dep2, ...] }
|
||||
dict_deps = {}
|
||||
for dep in dependencies:
|
||||
if dep[0] not in dict_deps:
|
||||
dict_deps[dep[0]] = []
|
||||
dict_deps[dep[0]].append(dep[1])
|
||||
|
||||
check_circular_deps(dict_deps)
|
||||
if check_only:
|
||||
sys.exit(0)
|
||||
|
||||
dict_deps = remove_extra_deps(dict_deps, rootpkg, args.transitive)
|
||||
dict_version = brpkgutil.get_version([pkg for pkg in allpkgs
|
||||
if pkg != "all" and not pkg.startswith("root")])
|
||||
|
||||
# Start printing the graph data
|
||||
if draw_graph:
|
||||
outfile.write("digraph G {\n")
|
||||
|
||||
print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
|
||||
arrow_dir, draw_graph, 0, args.depth, rootpkg, colors)
|
||||
|
||||
if draw_graph:
|
||||
outfile.write("}\n")
|
||||
else:
|
||||
outfile.write("\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -53,7 +53,7 @@ if head=`git rev-parse --verify --short HEAD 2>/dev/null`; then
|
||||
fi
|
||||
|
||||
# Check for mercurial and a mercurial repo.
|
||||
if hgid=`hg id 2>/dev/null`; then
|
||||
if hgid=`HGRCPATH= hg id --id --tags 2>/dev/null`; then
|
||||
tag=`printf '%s' "$hgid" | cut -d' ' -f2 --only-delimited`
|
||||
|
||||
# Do we have an untagged version?
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This script generates a report on the packaging status of X.org
|
||||
# releases in Buildroot. It does so by downloading the list of
|
||||
# tarballs that are part of a given X.org release, and compare that
|
||||
# with the packages that are available in Buildroot.
|
||||
|
||||
import BeautifulSoup
|
||||
import re
|
||||
import os
|
||||
import urllib
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
# This can be customized
|
||||
XORG_VERSION = "X11R7.7"
|
||||
|
||||
# Key names in dictionaries
|
||||
XORG_VERSION_KEY = "xorg-version"
|
||||
BR_VERSION_KEY = "br-version"
|
||||
BR_NAME_KEY = "br-name"
|
||||
|
||||
# Packages part of X.org releases that we do not want to package in
|
||||
# Buildroot (old drivers for hardware unlikely to be used in embedded
|
||||
# contexts).
|
||||
XORG_EXCEPTIONS = [
|
||||
'xf86-video-suncg6',
|
||||
'xf86-video-sunffb',
|
||||
]
|
||||
|
||||
# Get the list of tarballs of a X.org release, parse it, and return a
|
||||
# dictionary of dictionaries, of the form:
|
||||
#
|
||||
# { <name_of_package> : { XORG_VERSION_KEY: <version_of_package> },
|
||||
# <name_of_package2> : { XORG_VERSION_KEY: <version_of_package2> }}
|
||||
#
|
||||
def get_xorg_release_pkgs():
|
||||
u = urllib.URLopener().open("http://www.x.org/releases/%s/src/everything/" % XORG_VERSION)
|
||||
b = BeautifulSoup.BeautifulSoup()
|
||||
b.feed(u.read())
|
||||
links = b.findAll("a")
|
||||
packages = {}
|
||||
r = re.compile("(.*)-([0-9\.]*).tar.bz2")
|
||||
# We now have a list of all links.
|
||||
for link in links:
|
||||
href = link.get("href")
|
||||
# Skip everything but tarballs
|
||||
if not href.endswith(".tar.bz2"):
|
||||
continue
|
||||
# Separate the name and the version
|
||||
groups = r.match(href)
|
||||
if not groups:
|
||||
continue
|
||||
name = groups.group(1)
|
||||
version = groups.group(2)
|
||||
# Skip packages we don't want to hear about
|
||||
if name in XORG_EXCEPTIONS:
|
||||
continue
|
||||
packages[name] = { XORG_VERSION_KEY : version }
|
||||
return packages
|
||||
|
||||
# Files and directories in package/x11r7/ that should be ignored in
|
||||
# our processing.
|
||||
BUILDROOT_EXCEPTIONS = [
|
||||
"mcookie", # Code is directly in package directory
|
||||
"x11r7.mk",
|
||||
"Config.in",
|
||||
"xdriver_xf86-input-tslib", # From Pengutronix, not part of X.org releases
|
||||
]
|
||||
|
||||
# Prefixes of directories in package/x11r7/ that must be stripped
|
||||
# before trying to match Buildroot package names with X.org tarball
|
||||
# names.
|
||||
BUILDROOT_PREFIXES = [
|
||||
"xapp",
|
||||
"xdriver",
|
||||
"xfont",
|
||||
"xlib",
|
||||
"xserver",
|
||||
"xutil",
|
||||
"xproto",
|
||||
]
|
||||
|
||||
# From a Buildroot package name, try to see if a prefix should be
|
||||
# stripped from it. For example, passing "xapp_xlsfonts" as argument
|
||||
# to this function will return "xlsfonts".
|
||||
def buildroot_strip_prefix(dirname):
|
||||
for prefix in BUILDROOT_PREFIXES:
|
||||
if dirname.startswith(prefix + "_"):
|
||||
return dirname[len(prefix) + 1:]
|
||||
return dirname
|
||||
|
||||
# From a Buildroot package name, parse its .mk file to find the
|
||||
# Buildroot version of the package by looking at the <foo>_VERSION
|
||||
# line.
|
||||
def buildroot_get_version(dirname):
|
||||
f = open(os.path.join("package", "x11r7", dirname, dirname + ".mk"))
|
||||
r = re.compile("^([A-Z0-9_]*)_VERSION = ([0-9\.]*)$")
|
||||
for l in f.readlines():
|
||||
m = r.match(l)
|
||||
if m:
|
||||
return m.group(2)
|
||||
return None
|
||||
|
||||
# Augment the information of the X.org list of packages (given as
|
||||
# argument) by details about their packaging in Buildroot. Those
|
||||
# details are found by looking at the contents of package/x11r7/.
|
||||
def get_buildroot_pkgs(packages):
|
||||
dirs = os.listdir(os.path.join(os.getcwd(), "package", "x11r7"))
|
||||
for d in dirs:
|
||||
# Skip exceptions
|
||||
if d in BUILDROOT_EXCEPTIONS:
|
||||
continue
|
||||
pkgname = buildroot_strip_prefix(d)
|
||||
version = buildroot_get_version(d)
|
||||
if packages.has_key(pkgname):
|
||||
# There is a X.org package of the same name, so we just
|
||||
# add information to the existing dict entry.
|
||||
packages[pkgname]['br-version'] = version
|
||||
packages[pkgname]['br-name'] = d
|
||||
else:
|
||||
# There is no X.org package with this name, so we add a
|
||||
# new dict entry.
|
||||
packages[pkgname] = { BR_VERSION_KEY: version,
|
||||
BR_NAME_KEY : d }
|
||||
return packages
|
||||
|
||||
def show_summary(packages):
|
||||
FORMAT_STRING = "%40s | %15s | %15s | %-30s"
|
||||
print FORMAT_STRING % ("Package name", "Vers in BR", "Vers in X.org", "Action")
|
||||
print FORMAT_STRING % ("-" * 40, "-" * 15, "-" * 15, "-" * 30)
|
||||
pkgs = packages.keys()
|
||||
pkgs.sort()
|
||||
total_pkgs = 0
|
||||
upgrade_pkgs = 0
|
||||
add_pkgs = 0
|
||||
remove_pkgs = 0
|
||||
nothing_todo_pkgs = 0
|
||||
for pkgname in pkgs:
|
||||
pkg = packages[pkgname]
|
||||
total_pkgs += 1
|
||||
if pkg.has_key(XORG_VERSION_KEY) and not pkg.has_key(BR_VERSION_KEY):
|
||||
xorg_version = pkg[XORG_VERSION_KEY]
|
||||
br_version = "N/A"
|
||||
action = "Add to Buildroot"
|
||||
add_pkgs += 1
|
||||
elif not pkg.has_key(XORG_VERSION_KEY) and pkg.has_key(BR_VERSION_KEY):
|
||||
br_version = pkg[BR_VERSION_KEY]
|
||||
xorg_version = "N/A"
|
||||
action = "Remove from Buildroot"
|
||||
remove_pkgs += 1
|
||||
elif LooseVersion(pkg[XORG_VERSION_KEY]) > LooseVersion(pkg[BR_VERSION_KEY]):
|
||||
br_version = pkg[BR_VERSION_KEY]
|
||||
xorg_version = pkg[XORG_VERSION_KEY]
|
||||
action = "Upgrade"
|
||||
upgrade_pkgs += 1
|
||||
elif LooseVersion(pkg[XORG_VERSION_KEY]) < LooseVersion(pkg[BR_VERSION_KEY]):
|
||||
br_version = pkg[BR_VERSION_KEY]
|
||||
xorg_version = pkg[XORG_VERSION_KEY]
|
||||
action = "More recent"
|
||||
nothing_todo_pkgs += 1
|
||||
else:
|
||||
br_version = pkg[BR_VERSION_KEY]
|
||||
xorg_version = pkg[XORG_VERSION_KEY]
|
||||
action = ""
|
||||
nothing_todo_pkgs += 1
|
||||
|
||||
print FORMAT_STRING % (pkgname, br_version.center(15), xorg_version.center(15), action)
|
||||
print FORMAT_STRING % ("-" * 40, "-" * 15, "-" * 15, "-" * 30)
|
||||
STAT_FORMAT_STRING = "%40s : %3d"
|
||||
print STAT_FORMAT_STRING % ("Total number of packages", total_pkgs)
|
||||
print STAT_FORMAT_STRING % ("Packages to upgrade", upgrade_pkgs)
|
||||
print STAT_FORMAT_STRING % ("Packages to add", add_pkgs)
|
||||
print STAT_FORMAT_STRING % ("Packages to remove", remove_pkgs)
|
||||
print STAT_FORMAT_STRING % ("Packages with nothing to do", nothing_todo_pkgs)
|
||||
|
||||
packages = get_xorg_release_pkgs()
|
||||
packages = get_buildroot_pkgs(packages)
|
||||
# print packages
|
||||
show_summary(packages)
|
||||
|
||||
Reference in New Issue
Block a user