Bump buildroot to version 2017-02
TG-3 #closed
This commit is contained in:
@@ -118,8 +118,6 @@ function apply_patch {
|
||||
if [ $? != 0 ] ; then
|
||||
echo "Patch failed! Please fix ${patch}!"
|
||||
exit 1
|
||||
else
|
||||
echo "Patch OK"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
221
bsp/buildroot/support/scripts/br2-external
Executable file
221
bsp/buildroot/support/scripts/br2-external
Executable file
@@ -0,0 +1,221 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# This script must be able to run with bash-3.1, so it can't use
|
||||
# associative arrays. Instead, it emulates them using 'eval'. It
|
||||
# can however use indexed arrays, supported since at least bash-3.0.
|
||||
|
||||
# The names of the br2-external trees, once validated.
|
||||
declare -a BR2_EXT_NAMES
|
||||
|
||||
# URL to manual for help in converting old br2-external trees.
|
||||
# Escape '#' so that make does not consider it a comment.
|
||||
MANUAL_URL='https://buildroot.org/manual.html\#br2-external-converting'
|
||||
|
||||
main() {
|
||||
local OPT OPTARG
|
||||
local br2_ext ofile ofmt
|
||||
|
||||
while getopts :hkmo: OPT; do
|
||||
case "${OPT}" in
|
||||
h) help; exit 0;;
|
||||
o) ofile="${OPTARG}";;
|
||||
k) ofmt="kconfig";;
|
||||
m) ofmt="mk";;
|
||||
:) error "option '%s' expects a mandatory argument\n" "${OPTARG}";;
|
||||
\?) error "unknown option '%s'\n" "${OPTARG}";;
|
||||
esac
|
||||
done
|
||||
# Forget options; keep only positional args
|
||||
shift $((OPTIND-1))
|
||||
|
||||
case "${ofmt}" in
|
||||
mk|kconfig)
|
||||
;;
|
||||
*) error "no output format specified (-m/-k)\n";;
|
||||
esac
|
||||
if [ -z "${ofile}" ]; then
|
||||
error "no output file specified (-o)\n"
|
||||
fi
|
||||
|
||||
exec >"${ofile}"
|
||||
|
||||
do_validate ${@//:/ }
|
||||
|
||||
do_${ofmt}
|
||||
}
|
||||
|
||||
# Validates the br2-external trees passed as arguments. Makes each of
|
||||
# them canonical and store them in the global arrays BR2_EXT_NAMES
|
||||
# and BR2_EXT_PATHS.
|
||||
#
|
||||
# Note: since this script is always first called from Makefile context
|
||||
# to generate the Makefile fragment before it is called to generate the
|
||||
# Kconfig snippet, we're sure that any error in do_validate will be
|
||||
# interpreted in Makefile context. Going up to generating the Kconfig
|
||||
# snippet means that there were no error.
|
||||
#
|
||||
do_validate() {
|
||||
local br2_ext
|
||||
|
||||
if [ ${#} -eq 0 ]; then
|
||||
# No br2-external tree is valid
|
||||
return
|
||||
fi
|
||||
|
||||
for br2_ext in "${@}"; do
|
||||
do_validate_one "${br2_ext}"
|
||||
done
|
||||
}
|
||||
|
||||
do_validate_one() {
|
||||
local br2_ext="${1}"
|
||||
local br2_name br2_desc n d
|
||||
|
||||
if [ ! -d "${br2_ext}" ]; then
|
||||
error "'%s': no such file or directory\n" "${br2_ext}"
|
||||
fi
|
||||
if [ ! -r "${br2_ext}" -o ! -x "${br2_ext}" ]; then
|
||||
error "'%s': permission denied\n" "${br2_ext}"
|
||||
fi
|
||||
if [ ! -f "${br2_ext}/external.desc" ]; then
|
||||
error "'%s': does not have a name (in 'external.desc'). See %s\n" \
|
||||
"${br2_ext}" "${MANUAL_URL}"
|
||||
fi
|
||||
br2_name="$(sed -r -e '/^name: +(.*)$/!d; s//\1/' "${br2_ext}/external.desc")"
|
||||
if [ -z "${br2_name}" ]; then
|
||||
error "'%s/external.desc': does not define the name\n" "${br2_ext}"
|
||||
fi
|
||||
# Only ASCII chars in [A-Za-z0-9_] are permitted
|
||||
n="$(sed -r -e 's/[A-Za-z0-9_]//g' <<<"${br2_name}" )"
|
||||
if [ -n "${n}" ]; then
|
||||
# Escape '$' so that it gets printed
|
||||
error "'%s': name '%s' contains invalid chars: '%s'\n" \
|
||||
"${br2_ext}" "${br2_name//\$/\$\$}" "${n//\$/\$\$}"
|
||||
fi
|
||||
eval d="\"\${BR2_EXT_PATHS_${br2_name}}\""
|
||||
if [ -n "${d}" ]; then
|
||||
error "'%s': name '%s' is already used in '%s'\n" \
|
||||
"${br2_ext}" "${br2_name}" "${d}"
|
||||
fi
|
||||
br2_desc="$(sed -r -e '/^desc: +(.*)$/!d; s//\1/' "${br2_ext}/external.desc")"
|
||||
if [ ! -f "${br2_ext}/external.mk" ]; then
|
||||
error "'%s/external.mk': no such file or directory\n" "${br2_ext}"
|
||||
fi
|
||||
if [ ! -f "${br2_ext}/Config.in" ]; then
|
||||
error "'%s/Config.in': no such file or directory\n" "${br2_ext}"
|
||||
fi
|
||||
|
||||
# Register this br2-external tree, use an absolute canonical path
|
||||
br2_ext="$( cd "${br2_ext}"; pwd )"
|
||||
BR2_EXT_NAMES+=( "${br2_name}" )
|
||||
eval BR2_EXT_PATHS_${br2_name}="\"\${br2_ext}\""
|
||||
eval BR2_EXT_DESCS_${br2_name}="\"\${br2_desc:-\${br2_name}}\""
|
||||
}
|
||||
|
||||
# Generate the .mk snippet that defines makefile variables
|
||||
# for the br2-external tree
|
||||
do_mk() {
|
||||
local br2_name br2_ext
|
||||
|
||||
printf '#\n# Automatically generated file; DO NOT EDIT.\n#\n'
|
||||
printf '\n'
|
||||
|
||||
printf 'BR2_EXTERNAL ?='
|
||||
for br2_name in "${BR2_EXT_NAMES[@]}"; do
|
||||
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
|
||||
printf ' %s' "${br2_ext}"
|
||||
done
|
||||
printf '\n'
|
||||
|
||||
printf 'BR2_EXTERNAL_NAMES = \n'
|
||||
printf 'BR2_EXTERNAL_DIRS = \n'
|
||||
printf 'BR2_EXTERNAL_MKS = \n'
|
||||
|
||||
if [ ${#BR2_EXT_NAMES[@]} -eq 0 ]; then
|
||||
printf '\n'
|
||||
printf '# No br2-external tree defined.\n'
|
||||
return
|
||||
fi
|
||||
|
||||
for br2_name in "${BR2_EXT_NAMES[@]}"; do
|
||||
eval br2_desc="\"\${BR2_EXT_DESCS_${br2_name}}\""
|
||||
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
|
||||
printf '\n'
|
||||
printf 'BR2_EXTERNAL_NAMES += %s\n' "${br2_name}"
|
||||
printf 'BR2_EXTERNAL_DIRS += %s\n' "${br2_ext}"
|
||||
printf 'BR2_EXTERNAL_MKS += %s/external.mk\n' "${br2_ext}"
|
||||
printf 'export BR2_EXTERNAL_%s_PATH = %s\n' "${br2_name}" "${br2_ext}"
|
||||
printf 'export BR2_EXTERNAL_%s_DESC = %s\n' "${br2_name}" "${br2_desc}"
|
||||
done
|
||||
}
|
||||
|
||||
# Generate the kconfig snippet for the br2-external tree.
|
||||
do_kconfig() {
|
||||
local br2_name br2_ext
|
||||
|
||||
printf '#\n# Automatically generated file; DO NOT EDIT.\n#\n'
|
||||
printf '\n'
|
||||
|
||||
if [ ${#BR2_EXT_NAMES[@]} -eq 0 ]; then
|
||||
printf '# No br2-external tree defined.\n'
|
||||
return
|
||||
fi
|
||||
|
||||
printf 'menu "External options"\n'
|
||||
printf '\n'
|
||||
|
||||
for br2_name in "${BR2_EXT_NAMES[@]}"; do
|
||||
eval br2_desc="\"\${BR2_EXT_DESCS_${br2_name}}\""
|
||||
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
|
||||
if [ ${#BR2_EXT_NAMES[@]} -gt 1 ]; then
|
||||
printf 'menu "%s"\n' "${br2_desc}"
|
||||
fi
|
||||
printf 'comment "%s (in %s)"\n' "${br2_desc}" "${br2_ext}"
|
||||
printf 'config BR2_EXTERNAL_%s_PATH\n' "${br2_name}"
|
||||
printf '\tstring\n'
|
||||
printf '\tdefault "%s"\n' "${br2_ext}"
|
||||
printf 'source "%s/Config.in"\n' "${br2_ext}"
|
||||
if [ ${#BR2_EXT_NAMES[@]} -gt 1 ]; then
|
||||
printf 'endmenu # %s\n' "${br2_name}"
|
||||
fi
|
||||
printf '\n'
|
||||
done
|
||||
|
||||
printf "endmenu # User-provided options\n"
|
||||
}
|
||||
|
||||
help() {
|
||||
cat <<-_EOF_
|
||||
Usage:
|
||||
${my_name} <-m|-k> -o FILE PATH
|
||||
|
||||
With -m, ${my_name} generates the makefile fragment that defines
|
||||
variables related to the br2-external trees passed as positional
|
||||
arguments.
|
||||
|
||||
With -k, ${my_name} generates the kconfig snippet to include the
|
||||
configuration options specified in the br2-external trees passed
|
||||
as positional arguments.
|
||||
|
||||
Using -k and -m together is not possible. The last one wins.
|
||||
|
||||
Options:
|
||||
-m Generate the makefile fragment.
|
||||
|
||||
-k Generate the kconfig snippet.
|
||||
|
||||
-o FILE
|
||||
FILE in which to generate the kconfig snippet or makefile
|
||||
fragment.
|
||||
|
||||
Returns:
|
||||
0 If no error
|
||||
!0 If any error
|
||||
_EOF_
|
||||
}
|
||||
|
||||
error() { local fmt="${1}"; shift; printf "BR2_EXTERNAL_ERROR = ${fmt}" "${@}"; exit 1; }
|
||||
|
||||
my_name="${0##*/}"
|
||||
main "${@}"
|
||||
47
bsp/buildroot/support/scripts/fix-configure-powerpc64.sh
Executable file
47
bsp/buildroot/support/scripts/fix-configure-powerpc64.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This is a script to find, and correct, a problem with old versions of
|
||||
# configure that affect powerpc64 and powerpc64le.
|
||||
|
||||
# The issue causes configure to incorrectly determine that shared library
|
||||
# support is not present in the linker. This causes the package to build a
|
||||
# static library rather than a dynamic one and although the build will succeed,
|
||||
# it may cause packages that link with the static library it to fail due to
|
||||
# undefined symbols.
|
||||
|
||||
# This script searches for files named 'configure' that appear to have this
|
||||
# issue (by searching for a known bad pattern) and patching them.
|
||||
|
||||
set -e
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
echo "Usage: $0 <package build directory>"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
srcdir="$1"
|
||||
files=$(cd "$srcdir" && find . -name configure \
|
||||
-exec grep -qF 'Generated by GNU Autoconf' {} \; \
|
||||
-exec grep -qF 'ppc*-*linux*|powerpc*-*linux*)' {} \; -print)
|
||||
|
||||
# --ignore-whitespace is needed because some packages have included
|
||||
# copies of configure scripts where tabs have been replaced with spaces.
|
||||
for c in $files; do
|
||||
patch --ignore-whitespace "$srcdir"/"$c" <<'EOF'
|
||||
--- a/configure 2016-11-16 15:31:46.097447271 +1100
|
||||
+++ b/configure 2008-07-21 12:17:23.000000000 +1000
|
||||
@@ -4433,7 +4433,10 @@
|
||||
x86_64-*linux*)
|
||||
LD="${LD-ld} -m elf_x86_64"
|
||||
;;
|
||||
- ppc*-*linux*|powerpc*-*linux*)
|
||||
+ powerpcle-*linux*)
|
||||
+ LD="${LD-ld} -m elf64lppc"
|
||||
+ ;;
|
||||
+ powerpc-*linux*)
|
||||
LD="${LD-ld} -m elf64ppc"
|
||||
;;
|
||||
s390*-*linux*)
|
||||
EOF
|
||||
done
|
||||
|
||||
@@ -1,513 +0,0 @@
|
||||
## gen-manual-lists.py
|
||||
##
|
||||
## This script generates the following Buildroot manual appendices:
|
||||
## - the package tables (one for the target, the other for host tools);
|
||||
## - the deprecated items.
|
||||
##
|
||||
## Author(s):
|
||||
## - Samuel Martin <s.martin49@gmail.com>
|
||||
##
|
||||
## Copyright (C) 2013 Samuel Martin
|
||||
##
|
||||
## This program is free software; you can redistribute it and/or modify
|
||||
## it under the terms of the GNU General Public License as published by
|
||||
## the Free Software Foundation; either version 2 of the License, or
|
||||
## (at your option) any later version.
|
||||
##
|
||||
## This program is distributed in the hope that it will be useful,
|
||||
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
## GNU General Public License for more details.
|
||||
##
|
||||
## You should have received a copy of the GNU General Public License
|
||||
## along with this program; if not, write to the Free Software
|
||||
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import datetime
|
||||
from argparse import ArgumentParser
|
||||
|
||||
try:
|
||||
import kconfiglib
|
||||
except ImportError:
|
||||
message = """
|
||||
Could not find the module 'kconfiglib' in the PYTHONPATH:
|
||||
"""
|
||||
message += "\n".join([" {0}".format(path) for path in sys.path])
|
||||
message += """
|
||||
|
||||
Make sure the Kconfiglib directory is in the PYTHONPATH, then relaunch the
|
||||
script.
|
||||
|
||||
You can get kconfiglib from:
|
||||
https://github.com/ulfalizer/Kconfiglib
|
||||
|
||||
|
||||
"""
|
||||
sys.stderr.write(message)
|
||||
raise
|
||||
|
||||
|
||||
def get_symbol_subset(root, filter_func):
|
||||
""" Return a generator of kconfig items.
|
||||
|
||||
:param root_item: Root item of the generated subset of items
|
||||
:param filter_func: Filter function
|
||||
|
||||
"""
|
||||
if hasattr(root, "get_items"):
|
||||
get_items = root.get_items
|
||||
elif hasattr(root, "get_top_level_items"):
|
||||
get_items = root.get_top_level_items
|
||||
else:
|
||||
message = "The symbol does not contain any subset of symbols"
|
||||
raise Exception(message)
|
||||
for item in get_items():
|
||||
if item.is_symbol():
|
||||
if not filter_func(item):
|
||||
continue
|
||||
yield item
|
||||
elif item.is_menu() or item.is_choice():
|
||||
for i in get_symbol_subset(item, filter_func):
|
||||
yield i
|
||||
|
||||
|
||||
def get_symbol_parents(item, root=None, enable_choice=False):
|
||||
""" Return the list of the item's parents. The last item of the list is
|
||||
the closest parent, the first the furthest.
|
||||
|
||||
:param item: Item from which the parent list is generated
|
||||
:param root: Root item stopping the search (not included in the
|
||||
parent list)
|
||||
:param enable_choice: Flag enabling choices to appear in the parent list
|
||||
|
||||
"""
|
||||
parent = item.get_parent()
|
||||
parents = []
|
||||
while parent and parent != root:
|
||||
if parent.is_menu():
|
||||
parents.append(parent.get_title())
|
||||
elif enable_choice and parent.is_choice():
|
||||
parents.append(parent.get_prompts()[0])
|
||||
parent = parent.get_parent()
|
||||
if isinstance(root, kconfiglib.Menu) or \
|
||||
(enable_choice and isinstance(root, kconfiglib.Choice)):
|
||||
parents.append("") # Dummy empty parent to get a leading arrow ->
|
||||
parents.reverse()
|
||||
return parents
|
||||
|
||||
|
||||
def format_asciidoc_table(root, get_label_func, filter_func=lambda x: True,
|
||||
format_func=lambda x: x,
|
||||
enable_choice=False, sorted=True,
|
||||
item_label=None):
|
||||
""" Return the asciidoc formatted table of the items and their location.
|
||||
|
||||
:param root: Root item of the item subset
|
||||
:param get_label_func: Item's label getter function
|
||||
:param filter_func: Filter function to apply on the item subset
|
||||
:param format_func: Function to format a symbol and the table header
|
||||
:param enable_choice: Enable choices to appear as part of the item's
|
||||
location
|
||||
:param sorted: Flag to alphabetically sort the table
|
||||
|
||||
"""
|
||||
|
||||
lines = []
|
||||
for item in get_symbol_subset(root, filter_func):
|
||||
lines.append(format_func(what="symbol", symbol=item, root=root,
|
||||
get_label_func=get_label_func,
|
||||
enable_choice=enable_choice))
|
||||
if sorted:
|
||||
lines.sort(key=lambda x: x.lower())
|
||||
table = ":halign: center\n\n"
|
||||
width, columns = format_func(what="layout")
|
||||
table = "[width=\"{0}\",cols=\"{1}\",options=\"header\"]\n".format(width, columns)
|
||||
table += "|===================================================\n"
|
||||
table += format_func(what="header", header=item_label, root=root)
|
||||
table += "\n" + "".join(lines) + "\n"
|
||||
table += "|===================================================\n"
|
||||
return table
|
||||
|
||||
|
||||
class Buildroot:
|
||||
""" Buildroot configuration object.
|
||||
|
||||
"""
|
||||
root_config = "Config.in"
|
||||
package_dirname = "package"
|
||||
package_prefixes = ["BR2_PACKAGE_", "BR2_PACKAGE_HOST_"]
|
||||
re_pkg_prefix = re.compile(r"^(" + "|".join(package_prefixes) + ").*")
|
||||
deprecated_symbol = "BR2_DEPRECATED"
|
||||
list_in = """\
|
||||
//
|
||||
// Automatically generated list for Buildroot manual.
|
||||
//
|
||||
|
||||
{table}
|
||||
"""
|
||||
|
||||
list_info = {
|
||||
'target-packages': {
|
||||
'filename': "package-list",
|
||||
'root_menu': "Target packages",
|
||||
'filter': "_is_real_package",
|
||||
'format': "_format_symbol_prompt_location",
|
||||
'sorted': True,
|
||||
},
|
||||
'host-packages': {
|
||||
'filename': "host-package-list",
|
||||
'root_menu': "Host utilities",
|
||||
'filter': "_is_real_package",
|
||||
'format': "_format_symbol_prompt",
|
||||
'sorted': True,
|
||||
},
|
||||
'virtual-packages': {
|
||||
'filename': "virtual-package-list",
|
||||
'root_menu': "Target packages",
|
||||
'filter': "_is_virtual_package",
|
||||
'format': "_format_symbol_virtual",
|
||||
'sorted': True,
|
||||
},
|
||||
'deprecated': {
|
||||
'filename': "deprecated-list",
|
||||
'root_menu': None,
|
||||
'filter': "_is_deprecated_feature",
|
||||
'format': "_format_symbol_prompt_location",
|
||||
'sorted': False,
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.base_dir = os.environ.get("TOPDIR")
|
||||
self.output_dir = os.environ.get("O")
|
||||
self.package_dir = os.path.join(self.base_dir, self.package_dirname)
|
||||
self.config = kconfiglib.Config(os.path.join(self.base_dir,
|
||||
self.root_config),
|
||||
self.base_dir)
|
||||
self._deprecated = self.config.get_symbol(self.deprecated_symbol)
|
||||
|
||||
self.gen_date = datetime.datetime.utcnow()
|
||||
self.br_version_full = os.environ.get("BR2_VERSION_FULL")
|
||||
if self.br_version_full and self.br_version_full.endswith("-git"):
|
||||
self.br_version_full = self.br_version_full[:-4]
|
||||
if not self.br_version_full:
|
||||
self.br_version_full = "undefined"
|
||||
|
||||
def _get_package_symbols(self, package_name):
|
||||
""" Return a tuple containing the target and host package symbol.
|
||||
|
||||
"""
|
||||
symbols = re.sub("[-+.]", "_", package_name)
|
||||
symbols = symbols.upper()
|
||||
symbols = tuple([prefix + symbols for prefix in self.package_prefixes])
|
||||
return symbols
|
||||
|
||||
def _is_deprecated(self, symbol):
|
||||
""" Return True if the symbol is marked as deprecated, otherwise False.
|
||||
|
||||
"""
|
||||
# This also catches BR2_DEPRECATED_SINCE_xxxx_xx
|
||||
return bool([ symbol for x in symbol.get_referenced_symbols()
|
||||
if x.get_name().startswith(self._deprecated.get_name()) ])
|
||||
|
||||
def _is_package(self, symbol, type='real'):
|
||||
""" Return True if the symbol is a package or a host package, otherwise
|
||||
False.
|
||||
|
||||
:param symbol: The symbol to check
|
||||
:param type: Limit to 'real' or 'virtual' types of packages,
|
||||
with 'real' being the default.
|
||||
Note: only 'real' is (implictly) handled for now
|
||||
|
||||
"""
|
||||
if not symbol.is_symbol():
|
||||
return False
|
||||
if type == 'real' and not symbol.get_prompts():
|
||||
return False
|
||||
if type == 'virtual' and symbol.get_prompts():
|
||||
return False
|
||||
if not self.re_pkg_prefix.match(symbol.get_name()):
|
||||
return False
|
||||
pkg_name = self._get_pkg_name(symbol)
|
||||
|
||||
pattern = "^(HOST_)?" + pkg_name + "$"
|
||||
pattern = re.sub("_", ".", pattern)
|
||||
pattern = re.compile(pattern, re.IGNORECASE)
|
||||
# Here, we cannot just check for the location of the Config.in because
|
||||
# of the "virtual" package.
|
||||
#
|
||||
# So, to check that a symbol is a package (not a package option or
|
||||
# anything else), we check for the existence of the package *.mk file.
|
||||
#
|
||||
# By the way, to actually check for a package, we should grep all *.mk
|
||||
# files for the following regex:
|
||||
# "\$\(eval \$\((host-)?(generic|autotools|cmake)-package\)\)"
|
||||
#
|
||||
# Implementation details:
|
||||
#
|
||||
# * The package list is generated from the *.mk file existence, the
|
||||
# first time this function is called. Despite the memory consumption,
|
||||
# this list is stored because the execution time of this script is
|
||||
# noticeably shorter than rescanning the package sub-tree for each
|
||||
# symbol.
|
||||
if not hasattr(self, "_package_list"):
|
||||
pkg_list = []
|
||||
for _, _, files in os.walk(self.package_dir):
|
||||
for file_ in (f for f in files if f.endswith(".mk")):
|
||||
pkg_list.append(re.sub(r"(.*?)\.mk", r"\1", file_))
|
||||
setattr(self, "_package_list", pkg_list)
|
||||
for pkg in getattr(self, "_package_list"):
|
||||
if type == 'real':
|
||||
if pattern.match(pkg) and not self._exists_virt_symbol(pkg):
|
||||
return True
|
||||
if type == 'virtual':
|
||||
if pattern.match('has_' + pkg):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_real_package(self, symbol):
|
||||
return self._is_package(symbol, 'real')
|
||||
|
||||
def _is_virtual_package(self, symbol):
|
||||
return self._is_package(symbol, 'virtual')
|
||||
|
||||
def _is_deprecated_feature(self, symbol):
|
||||
return symbol.get_prompts() and self._is_deprecated(symbol)
|
||||
|
||||
def _exists_virt_symbol(self, pkg_name):
|
||||
""" Return True if a symbol exists that defines the package as
|
||||
a virtual package, False otherwise
|
||||
|
||||
:param pkg_name: The name of the package, for which to check if
|
||||
a symbol exists defining it as a virtual package
|
||||
|
||||
"""
|
||||
virt_pattern = "BR2_PACKAGE_HAS_" + pkg_name + "$"
|
||||
virt_pattern = re.sub("_", ".", virt_pattern)
|
||||
virt_pattern = re.compile(virt_pattern, re.IGNORECASE)
|
||||
for sym in self.config:
|
||||
if virt_pattern.match(sym.get_name()):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_pkg_name(self, symbol):
|
||||
""" Return the package name of the specified symbol.
|
||||
|
||||
:param symbol: The symbol to get the package name of
|
||||
|
||||
"""
|
||||
|
||||
return re.sub("BR2_PACKAGE_(HOST_)?(.*)", r"\2", symbol.get_name())
|
||||
|
||||
def _get_symbol_label(self, symbol, mark_deprecated=True):
|
||||
""" Return the label (a.k.a. prompt text) of the symbol.
|
||||
|
||||
:param symbol: The symbol
|
||||
:param mark_deprecated: Append a 'deprecated' to the label
|
||||
|
||||
"""
|
||||
label = symbol.get_prompts()[0]
|
||||
if self._is_deprecated(symbol) and mark_deprecated:
|
||||
label += " *(deprecated)*"
|
||||
return label
|
||||
|
||||
def _format_symbol_prompt(self, what=None, symbol=None, root=None,
|
||||
enable_choice=False, header=None,
|
||||
get_label_func=lambda x: x):
|
||||
if what == "layout":
|
||||
return ( "30%", "^1" )
|
||||
|
||||
if what == "header":
|
||||
return "| {0:<40}\n".format(header)
|
||||
|
||||
if what == "symbol":
|
||||
return "| {0:<40}\n".format(get_label_func(symbol))
|
||||
|
||||
message = "Invalid argument 'what': '%s'\n" % str(what)
|
||||
message += "Allowed values are: 'layout', 'header' and 'symbol'"
|
||||
raise Exception(message)
|
||||
|
||||
def _format_symbol_prompt_location(self, what=None, symbol=None, root=None,
|
||||
enable_choice=False, header=None,
|
||||
get_label_func=lambda x: x):
|
||||
if what == "layout":
|
||||
return ( "100%", "^1,4" )
|
||||
|
||||
if what == "header":
|
||||
if hasattr(root, "get_title"):
|
||||
loc_label = get_symbol_parents(root, None, enable_choice=enable_choice)
|
||||
loc_label += [root.get_title(), "..."]
|
||||
else:
|
||||
loc_label = ["Location"]
|
||||
return "| {0:<40} <| {1}\n".format(header, " -> ".join(loc_label))
|
||||
|
||||
if what == "symbol":
|
||||
parents = get_symbol_parents(symbol, root, enable_choice)
|
||||
return "| {0:<40} <| {1}\n".format(get_label_func(symbol),
|
||||
" -> ".join(parents))
|
||||
|
||||
message = "Invalid argument 'what': '%s'\n" % str(what)
|
||||
message += "Allowed values are: 'layout', 'header' and 'symbol'"
|
||||
raise Exception(message)
|
||||
|
||||
def _format_symbol_virtual(self, what=None, symbol=None, root=None,
|
||||
enable_choice=False, header=None,
|
||||
get_label_func=lambda x: "?"):
|
||||
def _symbol_is_legacy(symbol):
|
||||
selects = [ s.get_name() for s in symbol.get_selected_symbols() ]
|
||||
return ("BR2_LEGACY" in selects)
|
||||
|
||||
def _get_parent_package(sym):
|
||||
if self._is_real_package(sym):
|
||||
return None
|
||||
# Trim the symbol name from its last component (separated with
|
||||
# underscores), until we either find a symbol which is a real
|
||||
# package, or until we have no component (i.e. just 'BR2')
|
||||
name = sym.get_name()
|
||||
while name != "BR2":
|
||||
name = name.rsplit("_", 1)[0]
|
||||
s = self.config.get_symbol(name)
|
||||
if s is None:
|
||||
continue
|
||||
if self._is_real_package(s):
|
||||
return s
|
||||
return None
|
||||
|
||||
def _get_providers(symbol):
|
||||
providers = list()
|
||||
for sym in self.config:
|
||||
if not sym.is_symbol():
|
||||
continue
|
||||
if _symbol_is_legacy(sym):
|
||||
continue
|
||||
selects = sym.get_selected_symbols()
|
||||
if not selects:
|
||||
continue
|
||||
for s in selects:
|
||||
if s == symbol:
|
||||
if sym.get_prompts():
|
||||
l = self._get_symbol_label(sym,False)
|
||||
parent_pkg = _get_parent_package(sym)
|
||||
if parent_pkg is not None:
|
||||
l = self._get_symbol_label(parent_pkg, False) \
|
||||
+ " (w/ " + l + ")"
|
||||
providers.append(l)
|
||||
else:
|
||||
providers.extend(_get_providers(sym))
|
||||
return providers
|
||||
|
||||
if what == "layout":
|
||||
return ( "100%", "^1,4,4" )
|
||||
|
||||
if what == "header":
|
||||
return "| {0:<20} <| {1:<32} <| Providers\n".format("Virtual packages", "Symbols")
|
||||
|
||||
if what == "symbol":
|
||||
pkg = re.sub(r"^BR2_PACKAGE_HAS_(.+)$", r"\1", symbol.get_name())
|
||||
providers = _get_providers(symbol)
|
||||
|
||||
return "| {0:<20} <| {1:<32} <| {2}\n".format(pkg.lower(),
|
||||
'+' + symbol.get_name() + '+',
|
||||
", ".join(providers))
|
||||
|
||||
message = "Invalid argument 'what': '%s'\n" % str(what)
|
||||
message += "Allowed values are: 'layout', 'header' and 'symbol'"
|
||||
raise Exception(message)
|
||||
|
||||
|
||||
def print_list(self, list_type, enable_choice=True, enable_deprecated=True,
|
||||
dry_run=False, output=None):
|
||||
""" Print the requested list. If not dry run, then the list is
|
||||
automatically written in its own file.
|
||||
|
||||
:param list_type: The list type to be generated
|
||||
:param enable_choice: Flag enabling choices to appear in the list
|
||||
:param enable_deprecated: Flag enabling deprecated items to appear in
|
||||
the package lists
|
||||
:param dry_run: Dry run (print the list in stdout instead of
|
||||
writing the list file
|
||||
|
||||
"""
|
||||
def _get_menu(title):
|
||||
""" Return the first symbol menu matching the given title.
|
||||
|
||||
"""
|
||||
menus = self.config.get_menus()
|
||||
menu = [m for m in menus if m.get_title().lower() == title.lower()]
|
||||
if not menu:
|
||||
message = "No such menu: '{0}'".format(title)
|
||||
raise Exception(message)
|
||||
return menu[0]
|
||||
|
||||
list_config = self.list_info[list_type]
|
||||
root_title = list_config.get('root_menu')
|
||||
if root_title:
|
||||
root_item = _get_menu(root_title)
|
||||
else:
|
||||
root_item = self.config
|
||||
filter_ = getattr(self, list_config.get('filter'))
|
||||
filter_func = lambda x: filter_(x)
|
||||
format_func = getattr(self, list_config.get('format'))
|
||||
if not enable_deprecated and list_type != "deprecated":
|
||||
filter_func = lambda x: filter_(x) and not self._is_deprecated(x)
|
||||
mark_depr = list_type != "deprecated"
|
||||
get_label = lambda x: self._get_symbol_label(x, mark_depr)
|
||||
item_label = "Features" if list_type == "deprecated" else "Packages"
|
||||
|
||||
table = format_asciidoc_table(root_item, get_label,
|
||||
filter_func=filter_func,
|
||||
format_func=format_func,
|
||||
enable_choice=enable_choice,
|
||||
sorted=list_config.get('sorted'),
|
||||
item_label=item_label)
|
||||
|
||||
content = self.list_in.format(table=table)
|
||||
|
||||
if dry_run:
|
||||
print(content)
|
||||
return
|
||||
|
||||
if not output:
|
||||
output_dir = self.output_dir
|
||||
if not output_dir:
|
||||
print("Warning: Undefined output directory.")
|
||||
print("\tUse source directory as output location.")
|
||||
output_dir = self.base_dir
|
||||
output = os.path.join(output_dir,
|
||||
list_config.get('filename') + ".txt")
|
||||
if not os.path.exists(os.path.dirname(output)):
|
||||
os.makedirs(os.path.dirname(output))
|
||||
print("Writing the {0} list in:\n\t{1}".format(list_type, output))
|
||||
with open(output, 'w') as fout:
|
||||
fout.write(content)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
list_types = ['target-packages', 'host-packages', 'virtual-packages', 'deprecated']
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("list_type", nargs="?", choices=list_types,
|
||||
help="""\
|
||||
Generate the given list (generate all lists if unspecified)""")
|
||||
parser.add_argument("-n", "--dry-run", dest="dry_run", action='store_true',
|
||||
help="Output the generated list to stdout")
|
||||
parser.add_argument("--output-target", dest="output_target",
|
||||
help="Output target package file")
|
||||
parser.add_argument("--output-host", dest="output_host",
|
||||
help="Output host package file")
|
||||
parser.add_argument("--output-virtual", dest="output_virtual",
|
||||
help="Output virtual package file")
|
||||
parser.add_argument("--output-deprecated", dest="output_deprecated",
|
||||
help="Output deprecated file")
|
||||
args = parser.parse_args()
|
||||
lists = [args.list_type] if args.list_type else list_types
|
||||
buildroot = Buildroot()
|
||||
for list_name in lists:
|
||||
output = getattr(args, "output_" + list_name.split("-", 1)[0])
|
||||
buildroot.print_list(list_name, dry_run=args.dry_run, output=output)
|
||||
83
bsp/buildroot/support/scripts/get-developers
Executable file
83
bsp/buildroot/support/scripts/get-developers
Executable file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import getdeveloperlib
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('patches', metavar='P', type=argparse.FileType('r'), nargs='*',
|
||||
help='list of patches (use - to read patches from stdin)')
|
||||
parser.add_argument('-a', dest='architecture', action='store',
|
||||
help='find developers in charge of this architecture')
|
||||
parser.add_argument('-p', dest='package', action='store',
|
||||
help='find developers in charge of this package')
|
||||
parser.add_argument('-c', dest='check', action='store_const',
|
||||
const=True, help='list files not handled by any developer')
|
||||
return parser.parse_args()
|
||||
|
||||
def __main__():
|
||||
devs = getdeveloperlib.parse_developers()
|
||||
if devs is None:
|
||||
sys.exit(1)
|
||||
args = parse_args()
|
||||
|
||||
# Check that only one action is given
|
||||
action = 0
|
||||
if args.architecture is not None:
|
||||
action += 1
|
||||
if args.package is not None:
|
||||
action += 1
|
||||
if args.check:
|
||||
action += 1
|
||||
if len(args.patches) != 0:
|
||||
action += 1
|
||||
if action > 1:
|
||||
print("Cannot do more than one action")
|
||||
return
|
||||
if action == 0:
|
||||
print("No action specified")
|
||||
return
|
||||
|
||||
# Handle the check action
|
||||
if args.check:
|
||||
files = getdeveloperlib.check_developers(devs)
|
||||
for f in files:
|
||||
print(f)
|
||||
|
||||
# Handle the architecture action
|
||||
if args.architecture is not None:
|
||||
for dev in devs:
|
||||
if args.architecture in dev.architectures:
|
||||
print(dev.name)
|
||||
return
|
||||
|
||||
# Handle the package action
|
||||
if args.package is not None:
|
||||
for dev in devs:
|
||||
if args.package in dev.packages:
|
||||
print(dev.name)
|
||||
return
|
||||
|
||||
# Handle the patches action
|
||||
if len(args.patches) != 0:
|
||||
(files, infras) = getdeveloperlib.analyze_patches(args.patches)
|
||||
matching_devs = set()
|
||||
for dev in devs:
|
||||
# See if we have developers matching by package name
|
||||
for f in files:
|
||||
if dev.hasfile(f):
|
||||
matching_devs.add(dev.name)
|
||||
# See if we have developers matching by package infra
|
||||
for i in infras:
|
||||
if i in dev.infras:
|
||||
matching_devs.add(dev.name)
|
||||
|
||||
result = "--to buildroot@buildroot.org"
|
||||
for dev in matching_devs:
|
||||
result += " --cc \"%s\"" % dev
|
||||
|
||||
if result != "":
|
||||
print("git send-email %s" % result)
|
||||
|
||||
__main__()
|
||||
|
||||
200
bsp/buildroot/support/scripts/getdeveloperlib.py
Normal file
200
bsp/buildroot/support/scripts/getdeveloperlib.py
Normal file
@@ -0,0 +1,200 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import argparse
|
||||
import glob
|
||||
import subprocess
|
||||
|
||||
#
|
||||
# Patch parsing functions
|
||||
#
|
||||
|
||||
FIND_INFRA_IN_PATCH = re.compile("^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
|
||||
|
||||
def analyze_patch(patch):
|
||||
"""Parse one patch and return the list of files modified, added or
|
||||
removed by the patch."""
|
||||
files = set()
|
||||
infras = set()
|
||||
for line in patch:
|
||||
# If the patch is adding a package, find which infra it is
|
||||
m = FIND_INFRA_IN_PATCH.match(line)
|
||||
if m:
|
||||
infras.add(m.group(2))
|
||||
if not line.startswith("+++ "):
|
||||
continue
|
||||
line.strip()
|
||||
fname = line[line.find("/") + 1 : ].strip()
|
||||
if fname == "dev/null":
|
||||
continue
|
||||
files.add(fname)
|
||||
return (files, infras)
|
||||
|
||||
FIND_INFRA_IN_MK = re.compile("^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
|
||||
|
||||
def fname_get_package_infra(fname):
|
||||
"""Checks whether the file name passed as argument is a Buildroot .mk
|
||||
file describing a package, and find the infrastructure it's using."""
|
||||
if not fname.endswith(".mk"):
|
||||
return None
|
||||
|
||||
if not os.path.exists(fname):
|
||||
return None
|
||||
|
||||
with open(fname, "r") as f:
|
||||
for l in f:
|
||||
l = l.strip()
|
||||
m = FIND_INFRA_IN_MK.match(l)
|
||||
if m:
|
||||
return m.group(2)
|
||||
return None
|
||||
|
||||
def get_infras(files):
|
||||
"""Search in the list of files for .mk files, and collect the package
|
||||
infrastructures used by those .mk files."""
|
||||
infras = set()
|
||||
for fname in files:
|
||||
infra = fname_get_package_infra(fname)
|
||||
if infra:
|
||||
infras.add(infra)
|
||||
return infras
|
||||
|
||||
def analyze_patches(patches):
|
||||
"""Parse a list of patches and returns the list of files modified,
|
||||
added or removed by the patches, as well as the list of package
|
||||
infrastructures used by those patches (if any)"""
|
||||
allfiles = set()
|
||||
allinfras = set()
|
||||
for patch in patches:
|
||||
(files, infras) = analyze_patch(patch)
|
||||
allfiles = allfiles | files
|
||||
allinfras = allinfras | infras
|
||||
allinfras = allinfras | get_infras(allfiles)
|
||||
return (allfiles, allinfras)
|
||||
|
||||
#
|
||||
# DEVELOPERS file parsing functions
|
||||
#
|
||||
|
||||
class Developer:
|
||||
def __init__(self, name, files):
|
||||
self.name = name
|
||||
self.files = files
|
||||
self.packages = parse_developer_packages(files)
|
||||
self.architectures = parse_developer_architectures(files)
|
||||
self.infras = parse_developer_infras(files)
|
||||
|
||||
def hasfile(self, f):
|
||||
f = os.path.abspath(f)
|
||||
for fs in self.files:
|
||||
if f.startswith(fs):
|
||||
return True
|
||||
return False
|
||||
|
||||
def parse_developer_packages(fnames):
|
||||
"""Given a list of file patterns, travel through the Buildroot source
|
||||
tree to find which packages are implemented by those file
|
||||
patterns, and return a list of those packages."""
|
||||
packages = set()
|
||||
for fname in fnames:
|
||||
for root, dirs, files in os.walk(fname):
|
||||
for f in files:
|
||||
path = os.path.join(root, f)
|
||||
if fname_get_package_infra(path):
|
||||
pkg = os.path.splitext(f)[0]
|
||||
packages.add(pkg)
|
||||
return packages
|
||||
|
||||
def parse_arches_from_config_in(fname):
|
||||
"""Given a path to an arch/Config.in.* file, parse it to get the list
|
||||
of BR2_ARCH values for this architecture."""
|
||||
arches = set()
|
||||
with open(fname, "r") as f:
|
||||
parsing_arches = False
|
||||
for l in f:
|
||||
l = l.strip()
|
||||
if l == "config BR2_ARCH":
|
||||
parsing_arches = True
|
||||
continue
|
||||
if parsing_arches:
|
||||
m = re.match("^\s*default \"([^\"]*)\".*", l)
|
||||
if m:
|
||||
arches.add(m.group(1))
|
||||
else:
|
||||
parsing_arches = False
|
||||
return arches
|
||||
|
||||
def parse_developer_architectures(fnames):
|
||||
"""Given a list of file names, find the ones starting by
|
||||
'arch/Config.in.', and use that to determine the architecture a
|
||||
developer is working on."""
|
||||
arches = set()
|
||||
for fname in fnames:
|
||||
if not re.match("^.*/arch/Config\.in\..*$", fname):
|
||||
continue
|
||||
arches = arches | parse_arches_from_config_in(fname)
|
||||
return arches
|
||||
|
||||
def parse_developer_infras(fnames):
|
||||
infras = set()
|
||||
for fname in fnames:
|
||||
m = re.match("^package/pkg-([^.]*).mk$", fname)
|
||||
if m:
|
||||
infras.add(m.group(1))
|
||||
return infras
|
||||
|
||||
def parse_developers(basepath=None):
|
||||
"""Parse the DEVELOPERS file and return a list of Developer objects."""
|
||||
developers = []
|
||||
linen = 0
|
||||
if basepath == None:
|
||||
basepath = os.getcwd()
|
||||
with open(os.path.join(basepath, "DEVELOPERS"), "r") as f:
|
||||
files = []
|
||||
name = None
|
||||
for l in f:
|
||||
l = l.strip()
|
||||
if l.startswith("#"):
|
||||
continue
|
||||
elif l.startswith("N:"):
|
||||
if name is not None or len(files) != 0:
|
||||
print("Syntax error in DEVELOPERS file, line %d" % linen)
|
||||
name = l[2:].strip()
|
||||
elif l.startswith("F:"):
|
||||
fname = l[2:].strip()
|
||||
dev_files = glob.glob(os.path.join(basepath, fname))
|
||||
if len(dev_files) == 0:
|
||||
print("WARNING: '%s' doesn't match any file" % fname)
|
||||
files += dev_files
|
||||
elif l == "":
|
||||
if not name:
|
||||
continue
|
||||
developers.append(Developer(name, files))
|
||||
files = []
|
||||
name = None
|
||||
else:
|
||||
print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, l))
|
||||
return None
|
||||
linen += 1
|
||||
# handle last developer
|
||||
if name is not None:
|
||||
developers.append(Developer(name, files))
|
||||
return developers
|
||||
|
||||
def check_developers(developers, basepath=None):
|
||||
"""Look at the list of files versioned in Buildroot, and returns the
|
||||
list of files that are not handled by any developer"""
|
||||
if basepath == None:
|
||||
basepath = os.getcwd()
|
||||
cmd = ["git", "--git-dir", os.path.join(basepath, ".git"), "ls-files"]
|
||||
files = subprocess.check_output(cmd).strip().split("\n")
|
||||
unhandled_files = []
|
||||
for f in files:
|
||||
handled = False
|
||||
for d in developers:
|
||||
if d.hasfile(os.path.join(basepath, f)):
|
||||
handled = True
|
||||
break
|
||||
if not handled:
|
||||
unhandled_files.append(f)
|
||||
return unhandled_files
|
||||
@@ -26,6 +26,8 @@ import subprocess
|
||||
import argparse
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import pkgutil
|
||||
|
||||
# Modes of operation:
|
||||
MODE_FULL = 1 # draw full dependency graph for all selected packages
|
||||
MODE_PKG = 2 # draw dependency graph for a given package
|
||||
@@ -63,6 +65,10 @@ parser.add_argument("--transitive", dest="transitive", action='store_true',
|
||||
default=False)
|
||||
parser.add_argument("--no-transitive", dest="transitive", action='store_false',
|
||||
help="Draw (do not draw) transitive dependencies")
|
||||
parser.add_argument("--direct", dest="direct", action='store_true', default=True,
|
||||
help="Draw direct dependencies (the default)")
|
||||
parser.add_argument("--reverse", dest="direct", action='store_false',
|
||||
help="Draw reverse dependencies")
|
||||
args = parser.parse_args()
|
||||
|
||||
check_only = args.check_only
|
||||
@@ -95,6 +101,16 @@ else:
|
||||
|
||||
transitive = args.transitive
|
||||
|
||||
if args.direct:
|
||||
get_depends_func = pkgutil.get_depends
|
||||
arrow_dir = "forward"
|
||||
else:
|
||||
if mode == MODE_FULL:
|
||||
sys.stderr.write("--reverse needs a package\n")
|
||||
sys.exit(1)
|
||||
get_depends_func = pkgutil.get_rdepends
|
||||
arrow_dir = "back"
|
||||
|
||||
# Get the colours: we need exactly three colours,
|
||||
# so no need not split more than 4
|
||||
# We'll let 'dot' validate the colours...
|
||||
@@ -108,28 +124,6 @@ host_colour = colours[2]
|
||||
|
||||
allpkgs = []
|
||||
|
||||
# Execute the "make <pkg>-show-version" command to get the version of a given
|
||||
# list of packages, and return the version formatted as a Python dictionary.
|
||||
def get_version(pkgs):
|
||||
sys.stderr.write("Getting version for %s\n" % pkgs)
|
||||
cmd = ["make", "-s", "--no-print-directory" ]
|
||||
for pkg in pkgs:
|
||||
cmd.append("%s-show-version" % pkg)
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write("Error getting version %s\n" % pkgs)
|
||||
sys.exit(1)
|
||||
output = output.split("\n")
|
||||
if len(output) != len(pkgs) + 1:
|
||||
sys.stderr.write("Error getting version\n")
|
||||
sys.exit(1)
|
||||
version = {}
|
||||
for i in range(0, len(pkgs)):
|
||||
pkg = pkgs[i]
|
||||
version[pkg] = output[i]
|
||||
return version
|
||||
|
||||
# Execute the "make show-targets" command to get the list of the main
|
||||
# Buildroot PACKAGES and return it formatted as a Python list. This
|
||||
# list is used as the starting point for full dependency graphs
|
||||
@@ -144,33 +138,6 @@ def get_targets():
|
||||
return []
|
||||
return output.split(' ')
|
||||
|
||||
# Execute the "make <pkg>-show-depends" command to get the list of
|
||||
# dependencies of a given list of packages, and return the list of
|
||||
# dependencies formatted as a Python dictionary.
|
||||
def get_depends(pkgs):
|
||||
sys.stderr.write("Getting dependencies for %s\n" % pkgs)
|
||||
cmd = ["make", "-s", "--no-print-directory" ]
|
||||
for pkg in pkgs:
|
||||
cmd.append("%s-show-depends" % pkg)
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write("Error getting dependencies %s\n" % pkgs)
|
||||
sys.exit(1)
|
||||
output = output.split("\n")
|
||||
if len(output) != len(pkgs) + 1:
|
||||
sys.stderr.write("Error getting dependencies\n")
|
||||
sys.exit(1)
|
||||
deps = {}
|
||||
for i in range(0, len(pkgs)):
|
||||
pkg = pkgs[i]
|
||||
pkg_deps = output[i].split(" ")
|
||||
if pkg_deps == ['']:
|
||||
deps[pkg] = []
|
||||
else:
|
||||
deps[pkg] = pkg_deps
|
||||
return deps
|
||||
|
||||
# Recursive function that builds the tree of dependencies for a given
|
||||
# list of packages. The dependencies are built in a list called
|
||||
# 'dependencies', which contains tuples of the form (pkg1 ->
|
||||
@@ -190,7 +157,7 @@ def get_all_depends(pkgs):
|
||||
if len(filtered_pkgs) == 0:
|
||||
return []
|
||||
|
||||
depends = get_depends(filtered_pkgs)
|
||||
depends = get_depends_func(filtered_pkgs)
|
||||
|
||||
deps = set()
|
||||
for pkg in filtered_pkgs:
|
||||
@@ -363,7 +330,7 @@ if check_only:
|
||||
sys.exit(0)
|
||||
|
||||
dict_deps = remove_extra_deps(dict_deps)
|
||||
dict_version = get_version([pkg for pkg in allpkgs
|
||||
dict_version = pkgutil.get_version([pkg for pkg in allpkgs
|
||||
if pkg != "all" and not pkg.startswith("root")])
|
||||
|
||||
# Print the attributes of a node: label and fill-color
|
||||
@@ -418,7 +385,7 @@ def print_pkg_deps(depth, pkg):
|
||||
add = False
|
||||
break
|
||||
if add:
|
||||
outfile.write("%s -> %s\n" % (pkg_node_name(pkg), pkg_node_name(d)))
|
||||
outfile.write("%s -> %s [dir=%s]\n" % (pkg_node_name(pkg), pkg_node_name(d), arrow_dir))
|
||||
print_pkg_deps(depth+1, d)
|
||||
|
||||
# Start printing the graph data
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -35,7 +35,7 @@ PASSWD="${TARGET_DIR}/etc/passwd"
|
||||
SHADOW="${TARGET_DIR}/etc/shadow"
|
||||
GROUP="${TARGET_DIR}/etc/group"
|
||||
# /etc/gshadow is not part of the standard skeleton, so not everybody
|
||||
# will have it, but some may hav it, and its content must be in sync
|
||||
# will have it, but some may have it, and its content must be in sync
|
||||
# with /etc/group, so any use of gshadow must be conditional.
|
||||
GSHADOW="${TARGET_DIR}/etc/gshadow"
|
||||
|
||||
@@ -43,7 +43,7 @@ GSHADOW="${TARGET_DIR}/etc/gshadow"
|
||||
# such as:
|
||||
# BR2_DEFCONFIG="$(CONFIG_DIR)/defconfig"
|
||||
# which when sourced from a shell script will eventually try to execute
|
||||
# a command name 'CONFIG_DIR', which is plain wrong for virtually every
|
||||
# a command named 'CONFIG_DIR', which is plain wrong for virtually every
|
||||
# systems out there.
|
||||
# So, we have to scan that file instead. Sigh... :-(
|
||||
PASSWD_METHOD="$( sed -r -e '/^BR2_TARGET_GENERIC_PASSWD_METHOD="(.*)"$/!d;' \
|
||||
|
||||
@@ -94,6 +94,7 @@ for i in $(find boot/ linux/ package/ -name '*.mk' | sort) ; do
|
||||
|
||||
if test \
|
||||
$i = "boot/common.mk" -o \
|
||||
$i = "linux/linux-ext-ev3dev-linux-drivers.mk" -o \
|
||||
$i = "linux/linux-ext-fbtft.mk" -o \
|
||||
$i = "linux/linux-ext-xenomai.mk" -o \
|
||||
$i = "linux/linux-ext-rtai.mk" -o \
|
||||
|
||||
62
bsp/buildroot/support/scripts/pkgutil.py
Normal file
62
bsp/buildroot/support/scripts/pkgutil.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# Copyright (C) 2010-2013 Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
# Execute the "make <pkg>-show-version" command to get the version of a given
|
||||
# list of packages, and return the version formatted as a Python dictionary.
|
||||
def get_version(pkgs):
|
||||
sys.stderr.write("Getting version for %s\n" % pkgs)
|
||||
cmd = ["make", "-s", "--no-print-directory" ]
|
||||
for pkg in pkgs:
|
||||
cmd.append("%s-show-version" % pkg)
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write("Error getting version %s\n" % pkgs)
|
||||
sys.exit(1)
|
||||
output = output.split("\n")
|
||||
if len(output) != len(pkgs) + 1:
|
||||
sys.stderr.write("Error getting version\n")
|
||||
sys.exit(1)
|
||||
version = {}
|
||||
for i in range(0, len(pkgs)):
|
||||
pkg = pkgs[i]
|
||||
version[pkg] = output[i]
|
||||
return version
|
||||
|
||||
def _get_depends(pkgs, rule):
|
||||
sys.stderr.write("Getting dependencies for %s\n" % pkgs)
|
||||
cmd = ["make", "-s", "--no-print-directory" ]
|
||||
for pkg in pkgs:
|
||||
cmd.append("%s-%s" % (pkg, rule))
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
output = p.communicate()[0]
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write("Error getting dependencies %s\n" % pkgs)
|
||||
sys.exit(1)
|
||||
output = output.split("\n")
|
||||
if len(output) != len(pkgs) + 1:
|
||||
sys.stderr.write("Error getting dependencies\n")
|
||||
sys.exit(1)
|
||||
deps = {}
|
||||
for i in range(0, len(pkgs)):
|
||||
pkg = pkgs[i]
|
||||
pkg_deps = output[i].split(" ")
|
||||
if pkg_deps == ['']:
|
||||
deps[pkg] = []
|
||||
else:
|
||||
deps[pkg] = pkg_deps
|
||||
return deps
|
||||
|
||||
# Execute the "make <pkg>-show-depends" command to get the list of
|
||||
# dependencies of a given list of packages, and return the list of
|
||||
# dependencies formatted as a Python dictionary.
|
||||
def get_depends(pkgs):
|
||||
return _get_depends(pkgs, 'show-depends')
|
||||
|
||||
# Execute the "make <pkg>-show-rdepends" command to get the list of
|
||||
# reverse dependencies of a given list of packages, and return the
|
||||
# list of dependencies formatted as a Python dictionary.
|
||||
def get_rdepends(pkgs):
|
||||
return _get_depends(pkgs, 'show-rdepends')
|
||||
@@ -1,30 +0,0 @@
|
||||
Readme
|
||||
======
|
||||
|
||||
Kconfiglib
|
||||
----------
|
||||
|
||||
This python module, developped by Ulf Magnusson and released under the ISC
|
||||
license, is fetched from:
|
||||
|
||||
https://github.com/ulfalizer/Kconfiglib
|
||||
commit: a95f477eafc0b6708c3ce671fce7302ecec4f789
|
||||
|
||||
Kconfiglib license
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
License (ISC)
|
||||
|
||||
Copyright (c) 2011-2013, Ulf Magnusson <ulfalizer@gmail.com>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
@@ -484,9 +484,9 @@ use HTTP::Tiny;
|
||||
use Safe;
|
||||
use MetaCPAN::API::Tiny;
|
||||
|
||||
# Below, 5.022 should be aligned with the version of perl actually
|
||||
# Below, 5.024 should be aligned with the version of perl actually
|
||||
# bundled in Buildroot:
|
||||
die <<"MSG" if $] < 5.022;
|
||||
die <<"MSG" if $] < 5.024;
|
||||
This script needs a host perl with the same major version as Buildroot target perl.
|
||||
|
||||
Your current host perl is:
|
||||
@@ -494,7 +494,7 @@ Your current host perl is:
|
||||
version $]
|
||||
|
||||
You may install a local one by running:
|
||||
perlbrew install perl-5.22.2
|
||||
perlbrew install perl-5.24.0
|
||||
MSG
|
||||
|
||||
my ($help, $man, $quiet, $force, $recommend, $test, $host);
|
||||
|
||||
@@ -88,11 +88,20 @@ def build_package_dict(builddir):
|
||||
def build_package_size(filesdict, builddir):
|
||||
pkgsize = collections.defaultdict(int)
|
||||
|
||||
seeninodes = set()
|
||||
for root, _, files in os.walk(os.path.join(builddir, "target")):
|
||||
for f in files:
|
||||
fpath = os.path.join(root, f)
|
||||
if os.path.islink(fpath):
|
||||
continue
|
||||
|
||||
st = os.stat(fpath)
|
||||
if st.st_ino in seeninodes:
|
||||
# hard link
|
||||
continue
|
||||
else:
|
||||
seeninodes.add(st.st_ino)
|
||||
|
||||
frelpath = os.path.relpath(fpath, os.path.join(builddir, "target"))
|
||||
if not frelpath in filesdict:
|
||||
print("WARNING: %s is not part of any package" % frelpath)
|
||||
@@ -100,7 +109,7 @@ def build_package_size(filesdict, builddir):
|
||||
else:
|
||||
pkg = filesdict[frelpath][0]
|
||||
|
||||
pkgsize[pkg] += os.path.getsize(fpath)
|
||||
pkgsize[pkg] += st.st_size
|
||||
|
||||
return pkgsize
|
||||
|
||||
|
||||
195
bsp/buildroot/support/scripts/test-pkg
Executable file
195
bsp/buildroot/support/scripts/test-pkg
Executable file
@@ -0,0 +1,195 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
TOOLCHAINS_URL='http://autobuild.buildroot.org/toolchains/configs/toolchain-configs.csv'
|
||||
|
||||
main() {
|
||||
local o O opts
|
||||
local cfg dir pkg random toolchain
|
||||
local ret nb nb_skip nb_fail
|
||||
local -a toolchains
|
||||
|
||||
o='hc:d:p:r:'
|
||||
O='help,config-snippet:build-dir:package:,random:'
|
||||
opts="$(getopt -n "${my_name}" -o "${o}" -l "${O}" -- "${@}")"
|
||||
eval set -- "${opts}"
|
||||
|
||||
random=0
|
||||
while [ ${#} -gt 0 ]; do
|
||||
case "${1}" in
|
||||
(-h|--help)
|
||||
help; exit 0
|
||||
;;
|
||||
(-c|--config-snippet)
|
||||
cfg="${2}"; shift 2
|
||||
;;
|
||||
(-d|--build-dir)
|
||||
dir="${2}"; shift 2
|
||||
;;
|
||||
(-p|--package)
|
||||
pkg="${2}"; shift 2
|
||||
;;
|
||||
(-r|--random)
|
||||
random="${2}"; shift 2
|
||||
;;
|
||||
(--)
|
||||
shift; break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [ -z "${cfg}" ]; then
|
||||
printf "error: no config snippet specified\n" >&2; exit 1
|
||||
fi
|
||||
if [ ! -e "${cfg}" ]; then
|
||||
printf "error: %s: no such file\n" "${cfg}" >&2; exit 1
|
||||
fi
|
||||
if [ -z "${dir}" ]; then
|
||||
dir="${HOME}/br-test-pkg"
|
||||
fi
|
||||
|
||||
# Extract the URLs of the toolchains; drop internal toolchains
|
||||
# E.g.: http://server/path/to/name.config,arch,libc
|
||||
# --> http://server/path/to/name.config
|
||||
toolchains=($(curl -s "${TOOLCHAINS_URL}" \
|
||||
|sed -r -e 's/,.*//; /internal/d;' \
|
||||
|if [ ${random} -gt 0 ]; then \
|
||||
sort -R |head -n ${random}
|
||||
else
|
||||
cat
|
||||
fi |sort
|
||||
)
|
||||
)
|
||||
|
||||
if [ ${#toolchains[@]} -eq 0 ]; then
|
||||
printf "error: no toolchain found (networking issue?)\n" >&2; exit 1
|
||||
fi
|
||||
|
||||
nb=0
|
||||
nb_skip=0
|
||||
nb_fail=0
|
||||
for toolchain in "${toolchains[@]}"; do
|
||||
build_one "${dir}" "${toolchain}" "${cfg}" "${pkg}" && ret=0 || ret=${?}
|
||||
case ${ret} in
|
||||
(0) ;;
|
||||
(1) : $((nb_skip++));;
|
||||
(2) : $((nb_fail++));;
|
||||
esac
|
||||
: $((nb++))
|
||||
done
|
||||
|
||||
printf "%d builds, %d skipped, %d failed\n" ${nb} ${nb_skip} ${nb_fail}
|
||||
}
|
||||
|
||||
build_one() {
|
||||
local dir="${1}"
|
||||
local url="${2}"
|
||||
local cfg="${3}"
|
||||
local pkg="${4}"
|
||||
local toolchain
|
||||
|
||||
# Using basename(1) on a URL works nicely
|
||||
toolchain="$(basename "${url}" .config)"
|
||||
|
||||
printf "%40s: " "${toolchain}"
|
||||
|
||||
dir="${dir}/${toolchain}"
|
||||
mkdir -p "${dir}"
|
||||
|
||||
if ! curl -s "${url}" >"${dir}/.config"; then
|
||||
printf "FAILED\n"
|
||||
return 2
|
||||
fi
|
||||
|
||||
cat >>"${dir}/.config" <<-_EOF_
|
||||
BR2_INIT_NONE=y
|
||||
BR2_SYSTEM_BIN_SH_NONE=y
|
||||
# BR2_PACKAGE_BUSYBOX is not set
|
||||
# BR2_TARGET_ROOTFS_TAR is not set
|
||||
_EOF_
|
||||
cat "${cfg}" >>"${dir}/.config"
|
||||
|
||||
if ! make O="${dir}" olddefconfig >/dev/null 2>&1; then
|
||||
printf "FAILED\n"
|
||||
return 2
|
||||
fi
|
||||
# We want all the options from the snippet to be present as-is (set
|
||||
# or not set) in the actual .config; if one of them is not, it means
|
||||
# some dependency from the toolchain or arch is not available, in
|
||||
# which case this config is untestable and we skip it.
|
||||
# We don't care about the locale to sort in, as long as both sort are
|
||||
# done in the same locale.
|
||||
comm -23 <(sort "${cfg}") <(sort "${dir}/.config") >"${dir}/missing.config"
|
||||
if [ -s "${dir}/missing.config" ]; then
|
||||
printf "SKIPPED\n"
|
||||
return 1
|
||||
fi
|
||||
# Remove file, it's empty anyway.
|
||||
rm -f "${dir}/missing.config"
|
||||
|
||||
if [ -n "${pkg}" ]; then
|
||||
if ! make O="${dir}" "${pkg}-dirclean" >> "${dir}/logfile" 2>&1; then
|
||||
printf "FAILED\n"
|
||||
return 2
|
||||
fi
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
if ! make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
|
||||
printf "FAILED\n"
|
||||
return 2
|
||||
fi
|
||||
|
||||
printf "OK\n"
|
||||
}
|
||||
|
||||
help() {
|
||||
cat <<_EOF_
|
||||
test-pkg: test-build a package against various toolchains and architectures
|
||||
|
||||
The supplied config snippet is appended to each toolchain config, the
|
||||
resulting configuration is checked to ensure it still contains all options
|
||||
specified in the snippet; if any is missing, the build is skipped, on the
|
||||
assumption that the package under test requires a toolchain or architecture
|
||||
feature that is missing.
|
||||
|
||||
In case failures are noticed, you can fix the package and just re-run the
|
||||
same command again; it will re-run the test where it failed. If you did
|
||||
specify a package (with -p), the package build dir will be removed first.
|
||||
|
||||
The list of toolchains is retrieved from the Buildroot autobuilders, available
|
||||
at ${TOOLCHAINS_URL}.
|
||||
|
||||
Options:
|
||||
|
||||
-h, --help
|
||||
Print this help.
|
||||
|
||||
-c CFG, --config-snippet CFG
|
||||
Use the CFG file as the source for the config snippet. This file
|
||||
should contain all the config options required to build a package.
|
||||
|
||||
-d DIR, --build-dir DIR
|
||||
Do the builds in directory DIR, one sub-dir per toolchain.
|
||||
|
||||
-p PKG, --package PKG
|
||||
Test-build the package PKG, by running 'make PKG'; if not specified,
|
||||
just runs 'make'.
|
||||
|
||||
-r N, --random N
|
||||
Limit the tests to the N randomly selected toolchains, instead of
|
||||
building with all toolchains.
|
||||
|
||||
Example:
|
||||
|
||||
Testing libcec would require a config snippet that contains:
|
||||
BR2_PACKAGE_LIBCEC=y
|
||||
|
||||
Testing libcurl with openSSL support would require a snippet such as:
|
||||
BR2_PACKAGE_OPENSSL=y
|
||||
BR2_PACKAGE_LIBCURL=y
|
||||
|
||||
_EOF_
|
||||
}
|
||||
|
||||
my_name="${0##*/}"
|
||||
main "${@}"
|
||||
Reference in New Issue
Block a user