Bump buildroot to version 2018.02.8
This commit is contained in:
42
bsp/buildroot/utils/brmake
Executable file
42
bsp/buildroot/utils/brmake
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
# (C) 2016, "Yann E. MORIN" <yann.morin.1998@free.fr>
|
||||
# License: WTFPL, https://spdx.org/licenses/WTFPL.html
|
||||
|
||||
main() {
|
||||
local found ret start d h m mf
|
||||
|
||||
if ! which unbuffer >/dev/null 2>&1; then
|
||||
printf "you need to install 'unbuffer' (from package expect or expect-dev)\n" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
start=${SECONDS}
|
||||
|
||||
( exec 2>&1; unbuffer make "${@}"; ) \
|
||||
> >( while read line; do
|
||||
printf "%(%Y-%m-%dT%H:%M:%S)T %s\n" -1 "${line}"
|
||||
done \
|
||||
|tee -a br.log \
|
||||
|grep --colour=never -E '>>>'
|
||||
)
|
||||
ret=${?}
|
||||
|
||||
d=$((SECONDS-start))
|
||||
printf "Done in "
|
||||
h=$((d/3600))
|
||||
d=$((d%3600))
|
||||
[ ${h} -eq 0 ] || { printf "%dh " ${h}; mf="02"; }
|
||||
m=$((d/60))
|
||||
d=$((d%60))
|
||||
[ ${m} -eq 0 ] || { printf "%${mf}dmin " ${m}; sf="02"; }
|
||||
printf "%${sf}ds" ${d}
|
||||
|
||||
if [ ${ret} -ne 0 ]; then
|
||||
printf " (error code: %s)" ${ret}
|
||||
fi
|
||||
printf "\n"
|
||||
|
||||
return ${ret}
|
||||
}
|
||||
|
||||
main "${@}"
|
||||
144
bsp/buildroot/utils/check-package
Executable file
144
bsp/buildroot/utils/check-package
Executable file
@@ -0,0 +1,144 @@
|
||||
#!/usr/bin/env python
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import inspect
|
||||
import re
|
||||
import sys
|
||||
|
||||
import checkpackagelib.lib_config
|
||||
import checkpackagelib.lib_hash
|
||||
import checkpackagelib.lib_mk
|
||||
import checkpackagelib.lib_patch
|
||||
|
||||
VERBOSE_LEVEL_TO_SHOW_IGNORED_FILES = 3
|
||||
flags = None # Command line arguments.
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
# Do not use argparse.FileType("r") here because only files with known
|
||||
# format will be open based on the filename.
|
||||
parser.add_argument("files", metavar="F", type=str, nargs="*",
|
||||
help="list of files")
|
||||
|
||||
parser.add_argument("--manual-url", action="store",
|
||||
default="http://nightly.buildroot.org/",
|
||||
help="default: %(default)s")
|
||||
parser.add_argument("--verbose", "-v", action="count", default=0)
|
||||
|
||||
# Now the debug options in the order they are processed.
|
||||
parser.add_argument("--include-only", dest="include_list", action="append",
|
||||
help="run only the specified functions (debug)")
|
||||
parser.add_argument("--exclude", dest="exclude_list", action="append",
|
||||
help="do not run the specified functions (debug)")
|
||||
parser.add_argument("--dry-run", action="store_true", help="print the "
|
||||
"functions that would be called for each file (debug)")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
CONFIG_IN_FILENAME = re.compile("/Config\.\S*$")
|
||||
FILE_IS_FROM_A_PACKAGE = re.compile("package/[^/]*/")
|
||||
|
||||
|
||||
def get_lib_from_filename(fname):
|
||||
if FILE_IS_FROM_A_PACKAGE.search(fname) is None:
|
||||
return None
|
||||
if CONFIG_IN_FILENAME.search(fname):
|
||||
return checkpackagelib.lib_config
|
||||
if fname.endswith(".hash"):
|
||||
return checkpackagelib.lib_hash
|
||||
if fname.endswith(".mk"):
|
||||
return checkpackagelib.lib_mk
|
||||
if fname.endswith(".patch"):
|
||||
return checkpackagelib.lib_patch
|
||||
return None
|
||||
|
||||
|
||||
def is_a_check_function(m):
|
||||
if not inspect.isclass(m):
|
||||
return False
|
||||
# do not call the base class
|
||||
if m.__name__.startswith("_"):
|
||||
return False
|
||||
if flags.include_list and m.__name__ not in flags.include_list:
|
||||
return False
|
||||
if flags.exclude_list and m.__name__ in flags.exclude_list:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def print_warnings(warnings):
|
||||
# Avoid the need to use 'return []' at the end of every check function.
|
||||
if warnings is None:
|
||||
return 0 # No warning generated.
|
||||
|
||||
for level, message in enumerate(warnings):
|
||||
if flags.verbose >= level:
|
||||
print(message.replace("\t", "< tab >").rstrip())
|
||||
return 1 # One more warning to count.
|
||||
|
||||
|
||||
def check_file_using_lib(fname):
|
||||
# Count number of warnings generated and lines processed.
|
||||
nwarnings = 0
|
||||
nlines = 0
|
||||
|
||||
lib = get_lib_from_filename(fname)
|
||||
if not lib:
|
||||
if flags.verbose >= VERBOSE_LEVEL_TO_SHOW_IGNORED_FILES:
|
||||
print("{}: ignored".format(fname))
|
||||
return nwarnings, nlines
|
||||
classes = inspect.getmembers(lib, is_a_check_function)
|
||||
|
||||
if flags.dry_run:
|
||||
functions_to_run = [c[0] for c in classes]
|
||||
print("{}: would run: {}".format(fname, functions_to_run))
|
||||
return nwarnings, nlines
|
||||
|
||||
objects = [c[1](fname, flags.manual_url) for c in classes]
|
||||
|
||||
for cf in objects:
|
||||
nwarnings += print_warnings(cf.before())
|
||||
for lineno, text in enumerate(open(fname, "r").readlines()):
|
||||
nlines += 1
|
||||
for cf in objects:
|
||||
nwarnings += print_warnings(cf.check_line(lineno + 1, text))
|
||||
for cf in objects:
|
||||
nwarnings += print_warnings(cf.after())
|
||||
|
||||
return nwarnings, nlines
|
||||
|
||||
|
||||
def __main__():
|
||||
global flags
|
||||
flags = parse_args()
|
||||
|
||||
if len(flags.files) == 0:
|
||||
print("No files to check style")
|
||||
sys.exit(1)
|
||||
|
||||
# Accumulate number of warnings generated and lines processed.
|
||||
total_warnings = 0
|
||||
total_lines = 0
|
||||
|
||||
for fname in flags.files:
|
||||
nwarnings, nlines = check_file_using_lib(fname)
|
||||
total_warnings += nwarnings
|
||||
total_lines += nlines
|
||||
|
||||
# The warning messages are printed to stdout and can be post-processed
|
||||
# (e.g. counted by 'wc'), so for stats use stderr. Wait all warnings are
|
||||
# printed, for the case there are many of them, before printing stats.
|
||||
sys.stdout.flush()
|
||||
print("{} lines processed".format(total_lines), file=sys.stderr)
|
||||
print("{} warnings generated".format(total_warnings), file=sys.stderr)
|
||||
|
||||
if total_warnings > 0:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
__main__()
|
||||
0
bsp/buildroot/utils/checkpackagelib/__init__.py
Normal file
0
bsp/buildroot/utils/checkpackagelib/__init__.py
Normal file
16
bsp/buildroot/utils/checkpackagelib/base.py
Normal file
16
bsp/buildroot/utils/checkpackagelib/base.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
|
||||
|
||||
class _CheckFunction(object):
|
||||
def __init__(self, filename, url_to_manual):
|
||||
self.filename = filename
|
||||
self.url_to_manual = url_to_manual
|
||||
|
||||
def before(self):
|
||||
pass
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
pass
|
||||
|
||||
def after(self):
|
||||
pass
|
||||
54
bsp/buildroot/utils/checkpackagelib/lib.py
Normal file
54
bsp/buildroot/utils/checkpackagelib/lib.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
|
||||
from base import _CheckFunction
|
||||
|
||||
|
||||
class ConsecutiveEmptyLines(_CheckFunction):
|
||||
def before(self):
|
||||
self.lastline = "non empty"
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if text.strip() == "" == self.lastline.strip():
|
||||
return ["{}:{}: consecutive empty lines"
|
||||
.format(self.filename, lineno)]
|
||||
self.lastline = text
|
||||
|
||||
|
||||
class EmptyLastLine(_CheckFunction):
|
||||
def before(self):
|
||||
self.lastlineno = 0
|
||||
self.lastline = "non empty"
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
self.lastlineno = lineno
|
||||
self.lastline = text
|
||||
|
||||
def after(self):
|
||||
if self.lastline.strip() == "":
|
||||
return ["{}:{}: empty line at end of file"
|
||||
.format(self.filename, self.lastlineno)]
|
||||
|
||||
|
||||
class NewlineAtEof(_CheckFunction):
|
||||
def before(self):
|
||||
self.lastlineno = 0
|
||||
self.lastline = "\n"
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
self.lastlineno = lineno
|
||||
self.lastline = text
|
||||
|
||||
def after(self):
|
||||
if self.lastline == self.lastline.rstrip("\r\n"):
|
||||
return ["{}:{}: missing newline at end of file"
|
||||
.format(self.filename, self.lastlineno),
|
||||
self.lastline]
|
||||
|
||||
|
||||
class TrailingSpace(_CheckFunction):
|
||||
def check_line(self, lineno, text):
|
||||
line = text.rstrip("\r\n")
|
||||
if line != line.rstrip():
|
||||
return ["{}:{}: line contains trailing whitespace"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
137
bsp/buildroot/utils/checkpackagelib/lib_config.py
Normal file
137
bsp/buildroot/utils/checkpackagelib/lib_config.py
Normal file
@@ -0,0 +1,137 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
# Kconfig generates errors if someone introduces a typo like "boool" instead of
|
||||
# "bool", so below check functions don't need to check for things already
|
||||
# checked by running "make menuconfig".
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from lib import EmptyLastLine # noqa: F401
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
from lib import TrailingSpace # noqa: F401
|
||||
|
||||
|
||||
def _empty_or_comment(text):
|
||||
line = text.strip()
|
||||
# ignore empty lines and comment lines indented or not
|
||||
return line == "" or line.startswith("#")
|
||||
|
||||
|
||||
def _part_of_help_text(text):
|
||||
return text.startswith("\t ")
|
||||
|
||||
|
||||
# used in more than one check
|
||||
entries_that_should_not_be_indented = [
|
||||
"choice", "comment", "config", "endchoice", "endif", "endmenu", "if",
|
||||
"menu", "menuconfig", "source"]
|
||||
|
||||
|
||||
class AttributesOrder(_CheckFunction):
|
||||
attributes_order_convention = {
|
||||
"bool": 1, "prompt": 1, "string": 1, "default": 2, "depends": 3,
|
||||
"select": 4, "help": 5}
|
||||
|
||||
def before(self):
|
||||
self.state = 0
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if _empty_or_comment(text) or _part_of_help_text(text):
|
||||
return
|
||||
|
||||
attribute = text.split()[0]
|
||||
|
||||
if attribute in entries_that_should_not_be_indented:
|
||||
self.state = 0
|
||||
return
|
||||
if attribute not in self.attributes_order_convention.keys():
|
||||
return
|
||||
new_state = self.attributes_order_convention[attribute]
|
||||
wrong_order = self.state > new_state
|
||||
|
||||
# save to process next line
|
||||
self.state = new_state
|
||||
|
||||
if wrong_order:
|
||||
return ["{}:{}: attributes order: type, default, depends on,"
|
||||
" select, help ({}#_config_files)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
|
||||
|
||||
class HelpText(_CheckFunction):
|
||||
HELP_TEXT_FORMAT = re.compile("^\t .{,62}$")
|
||||
URL_ONLY = re.compile("^(http|https|git)://\S*$")
|
||||
|
||||
def before(self):
|
||||
self.help_text = False
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if _empty_or_comment(text):
|
||||
return
|
||||
|
||||
entry = text.split()[0]
|
||||
|
||||
if entry in entries_that_should_not_be_indented:
|
||||
self.help_text = False
|
||||
return
|
||||
if text.strip() == "help":
|
||||
self.help_text = True
|
||||
return
|
||||
|
||||
if not self.help_text:
|
||||
return
|
||||
|
||||
if self.HELP_TEXT_FORMAT.match(text.rstrip()):
|
||||
return
|
||||
if self.URL_ONLY.match(text.strip()):
|
||||
return
|
||||
return ["{}:{}: help text: <tab><2 spaces><62 chars>"
|
||||
" ({}#writing-rules-config-in)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text,
|
||||
"\t " + "123456789 " * 6 + "12"]
|
||||
|
||||
|
||||
class Indent(_CheckFunction):
|
||||
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
|
||||
entries_that_should_be_indented = [
|
||||
"bool", "default", "depends", "help", "prompt", "select", "string"]
|
||||
|
||||
def before(self):
|
||||
self.backslash = False
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if _empty_or_comment(text) or _part_of_help_text(text):
|
||||
self.backslash = False
|
||||
return
|
||||
|
||||
entry = text.split()[0]
|
||||
|
||||
last_line_ends_in_backslash = self.backslash
|
||||
|
||||
# calculate for next line
|
||||
if self.ENDS_WITH_BACKSLASH.search(text):
|
||||
self.backslash = True
|
||||
else:
|
||||
self.backslash = False
|
||||
|
||||
if last_line_ends_in_backslash:
|
||||
if text.startswith("\t"):
|
||||
return
|
||||
return ["{}:{}: continuation line should be indented using tabs"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
|
||||
if entry in self.entries_that_should_be_indented:
|
||||
if not text.startswith("\t{}".format(entry)):
|
||||
return ["{}:{}: should be indented with one tab"
|
||||
" ({}#_config_files)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
elif entry in entries_that_should_not_be_indented:
|
||||
if not text.startswith(entry):
|
||||
return ["{}:{}: should not be indented"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
55
bsp/buildroot/utils/checkpackagelib/lib_hash.py
Normal file
55
bsp/buildroot/utils/checkpackagelib/lib_hash.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
# The validity of the hashes itself is checked when building, so below check
|
||||
# functions don't need to check for things already checked by running
|
||||
# "make package-dirclean package-source".
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from lib import EmptyLastLine # noqa: F401
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
from lib import TrailingSpace # noqa: F401
|
||||
|
||||
|
||||
def _empty_line_or_comment(text):
|
||||
return text.strip() == "" or text.startswith("#")
|
||||
|
||||
|
||||
class HashNumberOfFields(_CheckFunction):
|
||||
def check_line(self, lineno, text):
|
||||
if _empty_line_or_comment(text):
|
||||
return
|
||||
|
||||
fields = text.split()
|
||||
if len(fields) != 3:
|
||||
return ["{}:{}: expected three fields ({}#adding-packages-hash)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
|
||||
|
||||
class HashType(_CheckFunction):
|
||||
len_of_hash = {"md5": 32, "sha1": 40, "sha224": 56, "sha256": 64,
|
||||
"sha384": 96, "sha512": 128}
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if _empty_line_or_comment(text):
|
||||
return
|
||||
|
||||
fields = text.split()
|
||||
if len(fields) < 2:
|
||||
return
|
||||
|
||||
htype, hexa = fields[:2]
|
||||
if htype == "none":
|
||||
return
|
||||
if htype not in self.len_of_hash.keys():
|
||||
return ["{}:{}: unexpected type of hash ({}#adding-packages-hash)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
if not re.match("^[0-9A-Fa-f]{%s}$" % self.len_of_hash[htype], hexa):
|
||||
return ["{}:{}: hash size does not match type "
|
||||
"({}#adding-packages-hash)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text,
|
||||
"expected {} hex digits".format(self.len_of_hash[htype])]
|
||||
246
bsp/buildroot/utils/checkpackagelib/lib_mk.py
Normal file
246
bsp/buildroot/utils/checkpackagelib/lib_mk.py
Normal file
@@ -0,0 +1,246 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
# There are already dependency checks during the build, so below check
|
||||
# functions don't need to check for things already checked by exploring the
|
||||
# menu options using "make menuconfig" and by running "make" with appropriate
|
||||
# packages enabled.
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from lib import EmptyLastLine # noqa: F401
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
from lib import TrailingSpace # noqa: F401
|
||||
|
||||
|
||||
class Indent(_CheckFunction):
|
||||
COMMENT = re.compile("^\s*#")
|
||||
CONDITIONAL = re.compile("^\s*(ifeq|ifneq|endif)\s")
|
||||
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
|
||||
END_DEFINE = re.compile("^\s*endef\s")
|
||||
MAKEFILE_TARGET = re.compile("^[^# \t]+:\s")
|
||||
START_DEFINE = re.compile("^\s*define\s")
|
||||
|
||||
def before(self):
|
||||
self.define = False
|
||||
self.backslash = False
|
||||
self.makefile_target = False
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.START_DEFINE.search(text):
|
||||
self.define = True
|
||||
return
|
||||
if self.END_DEFINE.search(text):
|
||||
self.define = False
|
||||
return
|
||||
|
||||
expect_tabs = False
|
||||
if self.define or self.backslash or self.makefile_target:
|
||||
expect_tabs = True
|
||||
if self.CONDITIONAL.search(text):
|
||||
expect_tabs = False
|
||||
|
||||
# calculate for next line
|
||||
if self.ENDS_WITH_BACKSLASH.search(text):
|
||||
self.backslash = True
|
||||
else:
|
||||
self.backslash = False
|
||||
|
||||
if self.MAKEFILE_TARGET.search(text):
|
||||
self.makefile_target = True
|
||||
return
|
||||
if text.strip() == "":
|
||||
self.makefile_target = False
|
||||
return
|
||||
|
||||
# comment can be indented or not inside define ... endef, so ignore it
|
||||
if self.define and self.COMMENT.search(text):
|
||||
return
|
||||
|
||||
if expect_tabs:
|
||||
if not text.startswith("\t"):
|
||||
return ["{}:{}: expected indent with tabs"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
else:
|
||||
if text.startswith("\t"):
|
||||
return ["{}:{}: unexpected indent with tabs"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
|
||||
|
||||
class PackageHeader(_CheckFunction):
|
||||
def before(self):
|
||||
self.skip = False
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.skip or lineno > 6:
|
||||
return
|
||||
|
||||
if lineno in [1, 5]:
|
||||
if lineno == 1 and text.startswith("include "):
|
||||
self.skip = True
|
||||
return
|
||||
if text.rstrip() != "#" * 80:
|
||||
return ["{}:{}: should be 80 hashes ({}#writing-rules-mk)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text,
|
||||
"#" * 80]
|
||||
elif lineno in [2, 4]:
|
||||
if text.rstrip() != "#":
|
||||
return ["{}:{}: should be 1 hash ({}#writing-rules-mk)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
elif lineno == 6:
|
||||
if text.rstrip() != "":
|
||||
return ["{}:{}: should be a blank line ({}#writing-rules-mk)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
|
||||
|
||||
class RemoveDefaultPackageSourceVariable(_CheckFunction):
|
||||
packages_that_may_contain_default_source = ["binutils", "gcc", "gdb"]
|
||||
PACKAGE_NAME = re.compile("/([^/]+)\.mk")
|
||||
|
||||
def before(self):
|
||||
package = self.PACKAGE_NAME.search(self.filename).group(1)
|
||||
package_upper = package.replace("-", "_").upper()
|
||||
self.package = package
|
||||
self.FIND_SOURCE = re.compile(
|
||||
"^{}_SOURCE\s*=\s*{}-\$\({}_VERSION\)\.tar\.gz"
|
||||
.format(package_upper, package, package_upper))
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.FIND_SOURCE.search(text):
|
||||
|
||||
if self.package in self.packages_that_may_contain_default_source:
|
||||
return
|
||||
|
||||
return ["{}:{}: remove default value of _SOURCE variable "
|
||||
"({}#generic-package-reference)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
|
||||
|
||||
class SpaceBeforeBackslash(_CheckFunction):
|
||||
TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH = re.compile(r"^.*( |\t)\\$")
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH.match(text.rstrip()):
|
||||
return ["{}:{}: use only one space before backslash"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
|
||||
|
||||
class TrailingBackslash(_CheckFunction):
|
||||
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
|
||||
|
||||
def before(self):
|
||||
self.backslash = False
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
last_line_ends_in_backslash = self.backslash
|
||||
|
||||
# calculate for next line
|
||||
if self.ENDS_WITH_BACKSLASH.search(text):
|
||||
self.backslash = True
|
||||
self.lastline = text
|
||||
return
|
||||
self.backslash = False
|
||||
|
||||
if last_line_ends_in_backslash and text.strip() == "":
|
||||
return ["{}:{}: remove trailing backslash"
|
||||
.format(self.filename, lineno - 1),
|
||||
self.lastline]
|
||||
|
||||
|
||||
class TypoInPackageVariable(_CheckFunction):
|
||||
ALLOWED = re.compile("|".join([
|
||||
"ACLOCAL_DIR",
|
||||
"ACLOCAL_HOST_DIR",
|
||||
"BR_CCACHE_INITIAL_SETUP",
|
||||
"BR_NO_CHECK_HASH_FOR",
|
||||
"LINUX_POST_PATCH_HOOKS",
|
||||
"LINUX_TOOLS",
|
||||
"LUA_RUN",
|
||||
"MKFS_JFFS2",
|
||||
"MKIMAGE_ARCH",
|
||||
"PKG_CONFIG_HOST_BINARY",
|
||||
"TARGET_FINALIZE_HOOKS",
|
||||
"XTENSA_CORE_NAME"]))
|
||||
PACKAGE_NAME = re.compile("/([^/]+)\.mk")
|
||||
VARIABLE = re.compile("^([A-Z0-9_]+_[A-Z0-9_]+)\s*(\+|)=")
|
||||
|
||||
def before(self):
|
||||
package = self.PACKAGE_NAME.search(self.filename).group(1)
|
||||
package = package.replace("-", "_").upper()
|
||||
# linux tools do not use LINUX_TOOL_ prefix for variables
|
||||
package = package.replace("LINUX_TOOL_", "")
|
||||
self.package = package
|
||||
self.REGEX = re.compile("^(HOST_)?({}_[A-Z0-9_]+)".format(package))
|
||||
self.FIND_VIRTUAL = re.compile(
|
||||
"^{}_PROVIDES\s*(\+|)=\s*(.*)".format(package))
|
||||
self.virtual = []
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
m = self.VARIABLE.search(text)
|
||||
if m is None:
|
||||
return
|
||||
|
||||
variable = m.group(1)
|
||||
|
||||
# allow to set variables for virtual package this package provides
|
||||
v = self.FIND_VIRTUAL.search(text)
|
||||
if v:
|
||||
self.virtual += v.group(2).upper().split()
|
||||
return
|
||||
for virtual in self.virtual:
|
||||
if variable.startswith("{}_".format(virtual)):
|
||||
return
|
||||
|
||||
if self.ALLOWED.match(variable):
|
||||
return
|
||||
if self.REGEX.search(text) is None:
|
||||
return ["{}:{}: possible typo: {} -> *{}*"
|
||||
.format(self.filename, lineno, variable, self.package),
|
||||
text]
|
||||
|
||||
|
||||
class UselessFlag(_CheckFunction):
|
||||
DEFAULT_AUTOTOOLS_FLAG = re.compile("^.*{}".format("|".join([
|
||||
"_AUTORECONF\s*=\s*NO",
|
||||
"_LIBTOOL_PATCH\s*=\s*YES"])))
|
||||
DEFAULT_GENERIC_FLAG = re.compile("^.*{}".format("|".join([
|
||||
"_INSTALL_IMAGES\s*=\s*NO",
|
||||
"_INSTALL_REDISTRIBUTE\s*=\s*YES",
|
||||
"_INSTALL_STAGING\s*=\s*NO",
|
||||
"_INSTALL_TARGET\s*=\s*YES"])))
|
||||
END_CONDITIONAL = re.compile("^\s*(endif)")
|
||||
START_CONDITIONAL = re.compile("^\s*(ifeq|ifneq)")
|
||||
|
||||
def before(self):
|
||||
self.conditional = 0
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.START_CONDITIONAL.search(text):
|
||||
self.conditional += 1
|
||||
return
|
||||
if self.END_CONDITIONAL.search(text):
|
||||
self.conditional -= 1
|
||||
return
|
||||
|
||||
# allow non-default conditionally overridden by default
|
||||
if self.conditional > 0:
|
||||
return
|
||||
|
||||
if self.DEFAULT_GENERIC_FLAG.search(text):
|
||||
return ["{}:{}: useless default value ({}#"
|
||||
"_infrastructure_for_packages_with_specific_build_systems)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
|
||||
if self.DEFAULT_AUTOTOOLS_FLAG.search(text) and not text.lstrip().startswith("HOST_"):
|
||||
return ["{}:{}: useless default value "
|
||||
"({}#_infrastructure_for_autotools_based_packages)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
61
bsp/buildroot/utils/checkpackagelib/lib_patch.py
Normal file
61
bsp/buildroot/utils/checkpackagelib/lib_patch.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
# The format of the patch files is tested during the build, so below check
|
||||
# functions don't need to check for things already checked by running
|
||||
# "make package-dirclean package-patch".
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
|
||||
|
||||
class ApplyOrder(_CheckFunction):
|
||||
APPLY_ORDER = re.compile("/\d{1,4}-[^/]*$")
|
||||
|
||||
def before(self):
|
||||
if not self.APPLY_ORDER.search(self.filename):
|
||||
return ["{}:0: use name <number>-<description>.patch "
|
||||
"({}#_providing_patches)"
|
||||
.format(self.filename, self.url_to_manual)]
|
||||
|
||||
|
||||
class NumberedSubject(_CheckFunction):
|
||||
NUMBERED_PATCH = re.compile("Subject:\s*\[PATCH\s*\d+/\d+\]")
|
||||
|
||||
def before(self):
|
||||
self.git_patch = False
|
||||
self.lineno = 0
|
||||
self.text = None
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if text.startswith("diff --git"):
|
||||
self.git_patch = True
|
||||
return
|
||||
if self.NUMBERED_PATCH.search(text):
|
||||
self.lineno = lineno
|
||||
self.text = text
|
||||
|
||||
def after(self):
|
||||
if self.git_patch and self.text:
|
||||
return ["{}:{}: generate your patches with 'git format-patch -N'"
|
||||
.format(self.filename, self.lineno),
|
||||
self.text]
|
||||
|
||||
|
||||
class Sob(_CheckFunction):
|
||||
SOB_ENTRY = re.compile("^Signed-off-by: .*$")
|
||||
|
||||
def before(self):
|
||||
self.found = False
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.found:
|
||||
return
|
||||
if self.SOB_ENTRY.search(text):
|
||||
self.found = True
|
||||
|
||||
def after(self):
|
||||
if not self.found:
|
||||
return ["{}:0: missing Signed-off-by in the header "
|
||||
"({}#_format_and_licensing_of_the_package_patches)"
|
||||
.format(self.filename, self.url_to_manual)]
|
||||
73
bsp/buildroot/utils/checkpackagelib/readme.txt
Normal file
73
bsp/buildroot/utils/checkpackagelib/readme.txt
Normal file
@@ -0,0 +1,73 @@
|
||||
How the scripts are structured:
|
||||
- check-package is the main engine, called by the user.
|
||||
For each input file, this script decides which parser should be used and it
|
||||
collects all classes declared in the library file and instantiates them.
|
||||
The main engine opens the input files and it serves each raw line (including
|
||||
newline!) to the method check_line() of every check object.
|
||||
Two special methods before() and after() are used to call the initialization
|
||||
of variables (for the case it needs to keep data across calls) and the
|
||||
equivalent finalization (e.g. for the case a warning must be issued if some
|
||||
pattern is not in the input file).
|
||||
- base.py contains the base class for all check functions.
|
||||
- lib.py contains the classes for common check functions.
|
||||
Each check function is explicitly included in a given type-parsing library.
|
||||
Do not include every single check function in this file, a class that will
|
||||
only parse hash files should be implemented in the hash-parsing library.
|
||||
When a warning must be issued, the check function returns an array of strings.
|
||||
Each string is a warning message and is displayed if the corresponding verbose
|
||||
level is active. When the script is called without --verbose only the first
|
||||
warning in the returned array is printed; when called with --verbose both
|
||||
first and second warnings are printed; when called with -vv until the third
|
||||
warning is printed; an so on.
|
||||
Helper functions can be defined and will not be called by the main script.
|
||||
- lib_type.py contains check functions specific to files of this type.
|
||||
|
||||
Some hints when changing this code:
|
||||
- prefer O(n) algorithms, where n is the total number of lines in the files
|
||||
processed.
|
||||
- when there is no other reason for ordering, use alphabetical order (e.g. keep
|
||||
the check functions in alphabetical order, keep the imports in alphabetical
|
||||
order, and so on).
|
||||
- keep in mind that for every class the method before() will be called before
|
||||
any line is served to be checked by the method check_line(). A class that
|
||||
checks the filename should only implement the method before(). A function that
|
||||
needs to keep data across calls (e.g. keep the last line before the one being
|
||||
processed) should initialize all variables using this method.
|
||||
- keep in mind that for every class the method after() will be called after all
|
||||
lines were served to be checked by the method check_line(). A class that
|
||||
checks the absence of a pattern in the file will need to use this method.
|
||||
- try to avoid false warnings. It's better to not issue a warning message to a
|
||||
corner case than have too many false warnings. The second can make users stop
|
||||
using the script.
|
||||
- do not check spacing in the input line in every single function. Trailing
|
||||
whitespace and wrong indentation should be checked by separate functions.
|
||||
- avoid duplicate tests. Try to test only one thing in each function.
|
||||
- in the warning message, include the url to a section from the manual, when
|
||||
applicable. It potentially will make more people know the manual.
|
||||
- use short sentences in the warning messages. A complete explanation can be
|
||||
added to show when --verbose is used.
|
||||
- when testing, verify the error message is displayed when the error pattern is
|
||||
found, but also verify the error message is not displayed for few
|
||||
well-formatted packages... there are many of these, just pick your favorite
|
||||
as golden package that should not trigger any warning message.
|
||||
- check the url displayed by the warning message works.
|
||||
|
||||
Usage examples:
|
||||
- to get a list of check functions that would be called without actually
|
||||
calling them you can use the --dry-run option:
|
||||
$ utils/check-package --dry-run package/yourfavorite/*
|
||||
|
||||
- when you just added a new check function, e.g. Something, check how it behaves
|
||||
for all current packages:
|
||||
$ utils/check-package --include-only Something $(find package -type f)
|
||||
|
||||
- the effective processing time (when the .pyc were already generated and all
|
||||
files to be processed are cached in the RAM) should stay in the order of few
|
||||
seconds:
|
||||
$ utils/check-package $(find package -type f) >/dev/null ; \
|
||||
time utils/check-package $(find package -type f) >/dev/null
|
||||
|
||||
- vim users can navigate the warnings (most editors probably have similar
|
||||
function) since warnings are generated in the form 'path/file:line: warning':
|
||||
$ find package/ -name 'Config.*' > filelist && vim -c \
|
||||
'set makeprg=utils/check-package\ $(cat\ filelist)' -c make -c copen
|
||||
206
bsp/buildroot/utils/config
Executable file
206
bsp/buildroot/utils/config
Executable file
@@ -0,0 +1,206 @@
|
||||
#!/bin/bash
|
||||
# Manipulate options in a .config file from the command line
|
||||
|
||||
myname=${0##*/}
|
||||
|
||||
# If no prefix forced, use the default BR2_
|
||||
BR2_PREFIX="${BR2_PREFIX-BR2_}"
|
||||
|
||||
usage() {
|
||||
cat >&2 <<EOL
|
||||
Manipulate options in a .config file from the command line.
|
||||
Usage:
|
||||
$myname options command ...
|
||||
commands:
|
||||
--enable|-e option Enable option
|
||||
--disable|-d option Disable option
|
||||
--set-str option string
|
||||
Set option to "string"
|
||||
--set-val option value
|
||||
Set option to value
|
||||
--undefine|-u option Undefine option
|
||||
--state|-s option Print state of option (n,y,m,undef)
|
||||
|
||||
--enable-after|-E beforeopt option
|
||||
Enable option directly after other option
|
||||
--disable-after|-D beforeopt option
|
||||
Disable option directly after other option
|
||||
|
||||
commands can be repeated multiple times
|
||||
|
||||
options:
|
||||
--file config-file .config file to change (default .config)
|
||||
--keep-case|-k Keep next symbols' case (dont' upper-case it)
|
||||
--package|-p Operate on package (set prefix to BR2_PACKAGE_)
|
||||
|
||||
$myname doesn't check the validity of the .config file. This is done at next
|
||||
make time.
|
||||
|
||||
By default, $myname will upper-case the given symbol. Use --keep-case to keep
|
||||
the case of all following symbols unchanged.
|
||||
|
||||
$myname uses 'BR2_' as the default symbol prefix. Set the environment
|
||||
variable BR2_PREFIX to the prefix to use. Eg.: BR2_PREFIX="FOO_" $myname ...
|
||||
EOL
|
||||
exit 1
|
||||
}
|
||||
|
||||
checkarg() {
|
||||
ARG="$1"
|
||||
if [ "$ARG" = "" ] ; then
|
||||
usage
|
||||
fi
|
||||
if [ "$MUNGE_CASE" = "yes" ] ; then
|
||||
ARG="`echo $ARG | tr a-z- A-Z_`"
|
||||
fi
|
||||
case "$ARG" in
|
||||
${BR2_PREFIX}*)
|
||||
ARG="${ARG/${BR2_PREFIX}/}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
txt_append() {
|
||||
local anchor="$1"
|
||||
local insert="$2"
|
||||
local infile="$3"
|
||||
local tmpfile="$infile.swp"
|
||||
|
||||
# sed append cmd: 'a\' + newline + text + newline
|
||||
cmd="$(printf "a\\%b$insert" "\n")"
|
||||
|
||||
sed -i -e "/$anchor/$cmd" "$infile"
|
||||
}
|
||||
|
||||
txt_subst() {
|
||||
local before="$1"
|
||||
local after="$2"
|
||||
local infile="$3"
|
||||
local tmpfile="$infile.swp"
|
||||
|
||||
sed -i -e "s:$before:$after:" "$infile"
|
||||
}
|
||||
|
||||
txt_delete() {
|
||||
local text="$1"
|
||||
local infile="$2"
|
||||
local tmpfile="$infile.swp"
|
||||
|
||||
sed -i -e "/$text/d" "$infile"
|
||||
}
|
||||
|
||||
set_var() {
|
||||
local name=$1 new=$2 before=$3
|
||||
|
||||
name_re="^($name=|# $name is not set)"
|
||||
before_re="^($before=|# $before is not set)"
|
||||
if test -n "$before" && grep -Eq "$before_re" "$FN"; then
|
||||
txt_append "^$before=" "$new" "$FN"
|
||||
txt_append "^# $before is not set" "$new" "$FN"
|
||||
elif grep -Eq "$name_re" "$FN"; then
|
||||
txt_subst "^$name=.*" "$new" "$FN"
|
||||
txt_subst "^# $name is not set" "$new" "$FN"
|
||||
else
|
||||
echo "$new" >>"$FN"
|
||||
fi
|
||||
}
|
||||
|
||||
undef_var() {
|
||||
local name=$1
|
||||
|
||||
txt_delete "^$name=" "$FN"
|
||||
txt_delete "^# $name is not set" "$FN"
|
||||
}
|
||||
|
||||
if [ "$1" = "--file" ]; then
|
||||
FN="$2"
|
||||
if [ "$FN" = "" ] ; then
|
||||
usage
|
||||
fi
|
||||
shift 2
|
||||
else
|
||||
FN=.config
|
||||
fi
|
||||
|
||||
if [ "$1" = "" ] ; then
|
||||
usage
|
||||
fi
|
||||
|
||||
MUNGE_CASE=yes
|
||||
while [ "$1" != "" ] ; do
|
||||
CMD="$1"
|
||||
shift
|
||||
case "$CMD" in
|
||||
--keep-case|-k)
|
||||
MUNGE_CASE=no
|
||||
continue
|
||||
;;
|
||||
--package|-p)
|
||||
BR2_PREFIX="BR2_PACKAGE_"
|
||||
continue
|
||||
;;
|
||||
--*-after|-E|-D|-M)
|
||||
checkarg "$1"
|
||||
A=$ARG
|
||||
checkarg "$2"
|
||||
B=$ARG
|
||||
shift 2
|
||||
;;
|
||||
-*)
|
||||
checkarg "$1"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
case "$CMD" in
|
||||
--enable|-e)
|
||||
set_var "${BR2_PREFIX}$ARG" "${BR2_PREFIX}$ARG=y"
|
||||
;;
|
||||
|
||||
--disable|-d)
|
||||
set_var "${BR2_PREFIX}$ARG" "# ${BR2_PREFIX}$ARG is not set"
|
||||
;;
|
||||
|
||||
--set-str)
|
||||
# sed swallows one level of escaping, so we need double-escaping
|
||||
set_var "${BR2_PREFIX}$ARG" "${BR2_PREFIX}$ARG=\"${1//\"/\\\\\"}\""
|
||||
shift
|
||||
;;
|
||||
|
||||
--set-val)
|
||||
set_var "${BR2_PREFIX}$ARG" "${BR2_PREFIX}$ARG=$1"
|
||||
shift
|
||||
;;
|
||||
--undefine|-u)
|
||||
undef_var "${BR2_PREFIX}$ARG"
|
||||
;;
|
||||
|
||||
--state|-s)
|
||||
if grep -q "# ${BR2_PREFIX}$ARG is not set" $FN ; then
|
||||
echo n
|
||||
else
|
||||
V="$(grep "^${BR2_PREFIX}$ARG=" $FN)"
|
||||
if [ $? != 0 ] ; then
|
||||
echo undef
|
||||
else
|
||||
V="${V/#${BR2_PREFIX}$ARG=/}"
|
||||
V="${V/#\"/}"
|
||||
V="${V/%\"/}"
|
||||
V="${V//\\\"/\"}"
|
||||
echo "${V}"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
|
||||
--enable-after|-E)
|
||||
set_var "${BR2_PREFIX}$B" "${BR2_PREFIX}$B=y" "${BR2_PREFIX}$A"
|
||||
;;
|
||||
|
||||
--disable-after|-D)
|
||||
set_var "${BR2_PREFIX}$B" "# ${BR2_PREFIX}$B is not set" "${BR2_PREFIX}$A"
|
||||
;;
|
||||
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
138
bsp/buildroot/utils/diffconfig
Executable file
138
bsp/buildroot/utils/diffconfig
Executable file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# diffconfig - a tool to compare .config files.
|
||||
#
|
||||
# originally written in 2006 by Matt Mackall
|
||||
# (at least, this was in his bloatwatch source code)
|
||||
# last worked on 2008 by Tim Bird for the Linux kernel
|
||||
# Adapted to Buildroot 2017 by Marcus Folkesson
|
||||
#
|
||||
|
||||
import sys, os
|
||||
|
||||
def usage():
|
||||
print("""Usage: diffconfig [-h] [-m] [<config1> <config2>]
|
||||
|
||||
Diffconfig is a simple utility for comparing two .config files.
|
||||
Using standard diff to compare .config files often includes extraneous and
|
||||
distracting information. This utility produces sorted output with only the
|
||||
changes in configuration values between the two files.
|
||||
|
||||
Added and removed items are shown with a leading plus or minus, respectively.
|
||||
Changed items show the old and new values on a single line.
|
||||
|
||||
If -m is specified, then output will be in "merge" style, which has the
|
||||
changed and new values in kernel config option format.
|
||||
|
||||
If no config files are specified, .config and .config.old are used.
|
||||
|
||||
Example usage:
|
||||
$ diffconfig .config config-with-some-changes
|
||||
-BR2_LINUX_KERNEL_INTREE_DTS_NAME "vexpress-v2p-ca9"
|
||||
BR2_LINUX_KERNEL_DTS_SUPPORT y -> n
|
||||
BR2_LINUX_KERNEL_USE_INTREE_DTS y -> n
|
||||
BR2_PACKAGE_DFU_UTIL n -> y
|
||||
BR2_PACKAGE_LIBUSB n -> y
|
||||
BR2_TARGET_GENERIC_HOSTNAME "buildroot" -> "Tuxie"
|
||||
BR2_TARGET_GENERIC_ISSUE "Welcome to Buildroot" -> "Welcome to CustomBoard"
|
||||
+BR2_PACKAGE_LIBUSB_COMPAT n
|
||||
|
||||
""")
|
||||
sys.exit(0)
|
||||
|
||||
# returns a dictionary of name/value pairs for config items in the file
|
||||
def readconfig(config_file):
|
||||
d = {}
|
||||
for line in config_file:
|
||||
line = line.strip()
|
||||
if len(line) == 0:
|
||||
continue
|
||||
if line[-11:] == " is not set":
|
||||
d[line[2:-11]] = "n"
|
||||
elif line[0] != "#":
|
||||
name, val = line.split("=", 1)
|
||||
d[name] = val
|
||||
return d
|
||||
|
||||
def print_config(op, config, value, new_value):
|
||||
global merge_style
|
||||
|
||||
if merge_style:
|
||||
if new_value:
|
||||
if new_value=="n":
|
||||
print("# %s is not set" % config)
|
||||
else:
|
||||
print("%s=%s" % (config, new_value))
|
||||
else:
|
||||
if op=="-":
|
||||
print("-%s %s" % (config, value))
|
||||
elif op=="+":
|
||||
print("+%s %s" % (config, new_value))
|
||||
else:
|
||||
print(" %s %s -> %s" % (config, value, new_value))
|
||||
|
||||
def main():
|
||||
global merge_style
|
||||
|
||||
# parse command line args
|
||||
if ("-h" in sys.argv or "--help" in sys.argv):
|
||||
usage()
|
||||
|
||||
merge_style = 0
|
||||
if "-m" in sys.argv:
|
||||
merge_style = 1
|
||||
sys.argv.remove("-m")
|
||||
|
||||
argc = len(sys.argv)
|
||||
if not (argc==1 or argc == 3):
|
||||
print("Error: incorrect number of arguments or unrecognized option")
|
||||
usage()
|
||||
|
||||
if argc == 1:
|
||||
# if no filenames given, assume .config and .config.old
|
||||
build_dir=""
|
||||
if "KBUILD_OUTPUT" in os.environ:
|
||||
build_dir = os.environ["KBUILD_OUTPUT"]+"/"
|
||||
configa_filename = build_dir + ".config.old"
|
||||
configb_filename = build_dir + ".config"
|
||||
else:
|
||||
configa_filename = sys.argv[1]
|
||||
configb_filename = sys.argv[2]
|
||||
|
||||
try:
|
||||
a = readconfig(open(configa_filename))
|
||||
b = readconfig(open(configb_filename))
|
||||
except (IOError):
|
||||
e = sys.exc_info()[1]
|
||||
print("I/O error[%s]: %s\n" % (e.args[0],e.args[1]))
|
||||
usage()
|
||||
|
||||
# print items in a but not b (accumulate, sort and print)
|
||||
old = []
|
||||
for config in a:
|
||||
if config not in b:
|
||||
old.append(config)
|
||||
old.sort()
|
||||
for config in old:
|
||||
print_config("-", config, a[config], None)
|
||||
del a[config]
|
||||
|
||||
# print items that changed (accumulate, sort, and print)
|
||||
changed = []
|
||||
for config in a:
|
||||
if a[config] != b[config]:
|
||||
changed.append(config)
|
||||
else:
|
||||
del b[config]
|
||||
changed.sort()
|
||||
for config in changed:
|
||||
print_config("->", config, a[config], b[config])
|
||||
del b[config]
|
||||
|
||||
# now print items in b but not in a
|
||||
# (items from b that were in a were removed above)
|
||||
new = sorted(b.keys())
|
||||
for config in new:
|
||||
print_config("+", config, None, b[config])
|
||||
|
||||
main()
|
||||
436
bsp/buildroot/utils/genrandconfig
Executable file
436
bsp/buildroot/utils/genrandconfig
Executable file
@@ -0,0 +1,436 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (C) 2014 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# This script generates a random configuration for testing Buildroot.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import csv
|
||||
import os
|
||||
from random import randint
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.version import StrictVersion
|
||||
import platform
|
||||
|
||||
if sys.hexversion >= 0x3000000:
|
||||
import urllib.request as _urllib
|
||||
else:
|
||||
import urllib2 as _urllib
|
||||
|
||||
|
||||
def urlopen_closing(uri):
|
||||
return contextlib.closing(_urllib.urlopen(uri))
|
||||
|
||||
|
||||
if sys.hexversion >= 0x3000000:
|
||||
def decode_byte_list(bl):
|
||||
return [b.decode() for b in bl]
|
||||
else:
|
||||
def decode_byte_list(e):
|
||||
return e
|
||||
|
||||
|
||||
class SystemInfo:
|
||||
DEFAULT_NEEDED_PROGS = ["make", "git", "gcc", "timeout"]
|
||||
DEFAULT_OPTIONAL_PROGS = ["bzr", "java", "javac", "jar"]
|
||||
|
||||
def __init__(self):
|
||||
self.needed_progs = list(self.__class__.DEFAULT_NEEDED_PROGS)
|
||||
self.optional_progs = list(self.__class__.DEFAULT_OPTIONAL_PROGS)
|
||||
self.progs = {}
|
||||
|
||||
def find_prog(self, name, flags=os.X_OK, env=os.environ):
|
||||
if not name or name[0] == os.sep:
|
||||
raise ValueError(name)
|
||||
|
||||
prog_path = env.get("PATH", None)
|
||||
# for windows compatibility, we'd need to take PATHEXT into account
|
||||
|
||||
if prog_path:
|
||||
for prog_dir in filter(None, prog_path.split(os.pathsep)):
|
||||
# os.join() not necessary: non-empty prog_dir
|
||||
# and name[0] != os.sep
|
||||
prog = prog_dir + os.sep + name
|
||||
if os.access(prog, flags):
|
||||
return prog
|
||||
# --
|
||||
return None
|
||||
|
||||
def has(self, prog):
|
||||
"""Checks whether a program is available.
|
||||
Lazily evaluates missing entries.
|
||||
|
||||
Returns: None if prog not found, else path to the program [evaluates
|
||||
to True]
|
||||
"""
|
||||
try:
|
||||
return self.progs[prog]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
have_it = self.find_prog(prog)
|
||||
# java[c] needs special care
|
||||
if have_it and prog in ('java', 'javac'):
|
||||
with open(os.devnull, "w") as devnull:
|
||||
if subprocess.call("%s -version | grep gcj" % prog,
|
||||
shell=True,
|
||||
stdout=devnull, stderr=devnull) != 1:
|
||||
have_it = False
|
||||
# --
|
||||
self.progs[prog] = have_it
|
||||
return have_it
|
||||
|
||||
def check_requirements(self):
|
||||
"""Checks program dependencies.
|
||||
|
||||
Returns: True if all mandatory programs are present, else False.
|
||||
"""
|
||||
do_check_has_prog = self.has
|
||||
|
||||
missing_requirements = False
|
||||
for prog in self.needed_progs:
|
||||
if not do_check_has_prog(prog):
|
||||
print("ERROR: your system lacks the '%s' program" % prog)
|
||||
missing_requirements = True
|
||||
|
||||
# check optional programs here,
|
||||
# else they'd get checked by each worker instance
|
||||
for prog in self.optional_progs:
|
||||
do_check_has_prog(prog)
|
||||
|
||||
return not missing_requirements
|
||||
|
||||
|
||||
def get_toolchain_configs(toolchains_csv, buildrootdir):
|
||||
"""Fetch and return the possible toolchain configurations
|
||||
|
||||
This function returns an array of toolchain configurations. Each
|
||||
toolchain configuration is itself an array of lines of the defconfig.
|
||||
"""
|
||||
|
||||
with open(toolchains_csv) as r:
|
||||
# filter empty lines and comments
|
||||
lines = [ t for t in r.readlines() if len(t.strip()) > 0 and t[0] != '#' ]
|
||||
toolchains = decode_byte_list(lines)
|
||||
configs = []
|
||||
|
||||
(_, _, _, _, hostarch) = os.uname()
|
||||
# ~2015 distros report x86 when on a 32bit install
|
||||
if hostarch == 'i686' or hostarch == 'i386' or hostarch == 'x86':
|
||||
hostarch = 'x86'
|
||||
|
||||
for row in csv.reader(toolchains):
|
||||
config = {}
|
||||
configfile = row[0]
|
||||
config_hostarch = row[1]
|
||||
keep = False
|
||||
|
||||
# Keep all toolchain configs that work regardless of the host
|
||||
# architecture
|
||||
if config_hostarch == "any":
|
||||
keep = True
|
||||
|
||||
# Keep all toolchain configs that can work on the current host
|
||||
# architecture
|
||||
if hostarch == config_hostarch:
|
||||
keep = True
|
||||
|
||||
# Assume that x86 32 bits toolchains work on x86_64 build
|
||||
# machines
|
||||
if hostarch == 'x86_64' and config_hostarch == "x86":
|
||||
keep = True
|
||||
|
||||
if not keep:
|
||||
continue
|
||||
|
||||
if not os.path.isabs(configfile):
|
||||
configfile = os.path.join(buildrootdir, configfile)
|
||||
|
||||
with open(configfile) as r:
|
||||
config = r.readlines()
|
||||
configs.append(config)
|
||||
return configs
|
||||
|
||||
|
||||
def is_toolchain_usable(configfile, config):
|
||||
"""Check if the toolchain is actually usable."""
|
||||
|
||||
with open(configfile) as configf:
|
||||
configlines = configf.readlines()
|
||||
|
||||
# Check that the toolchain configuration is still present
|
||||
for toolchainline in config:
|
||||
if toolchainline not in configlines:
|
||||
print("WARN: toolchain can't be used", file=sys.stderr)
|
||||
print(" Missing: %s" % toolchainline.strip(), file=sys.stderr)
|
||||
return False
|
||||
|
||||
# The latest Linaro toolchains on x86-64 hosts requires glibc
|
||||
# 2.14+ on the host.
|
||||
if platform.machine() == 'x86_64':
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_LINARO_ARM=y\n' in configlines or \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_LINARO_AARCH64=y\n' in configlines or \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_LINARO_ARMEB=y\n' in configlines:
|
||||
ldd_version_output = subprocess.check_output(['ldd', '--version'])
|
||||
glibc_version = ldd_version_output.splitlines()[0].split()[-1]
|
||||
if StrictVersion('2.14') > StrictVersion(glibc_version):
|
||||
print("WARN: ignoring the Linaro ARM toolchains because too old host glibc", file=sys.stderr)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def fixup_config(configfile):
|
||||
"""Finalize the configuration and reject any problematic combinations
|
||||
|
||||
This function returns 'True' when the configuration has been
|
||||
accepted, and 'False' when the configuration has not been accepted because
|
||||
it is known to fail (in which case another random configuration will be
|
||||
generated).
|
||||
"""
|
||||
|
||||
sysinfo = SystemInfo()
|
||||
with open(configfile) as configf:
|
||||
configlines = configf.readlines()
|
||||
|
||||
if "BR2_NEEDS_HOST_JAVA=y\n" in configlines and not sysinfo.has("java"):
|
||||
return False
|
||||
if "BR2_NEEDS_HOST_JAVAC=y\n" in configlines and not sysinfo.has("javac"):
|
||||
return False
|
||||
if "BR2_NEEDS_HOST_JAR=y\n" in configlines and not sysinfo.has("jar"):
|
||||
return False
|
||||
# python-nfc needs bzr
|
||||
if 'BR2_PACKAGE_PYTHON_NFC=y\n' in configlines and not sysinfo.has("bzr"):
|
||||
return False
|
||||
# The ctng toolchain is affected by PR58854
|
||||
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain tigger an assembler error with guile package when compiled with -Os (same issue as for CS ARM 2014.05-29)
|
||||
if 'BR2_PACKAGE_GUILE=y\n' in configlines and \
|
||||
'BR2_OPTIMIZE_S=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR58854
|
||||
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv6-ctng-linux-uclibcgnueabi.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR58854
|
||||
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv7-ctng-linux-gnueabihf.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR60155
|
||||
if 'BR2_PACKAGE_SDL=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR60155
|
||||
if 'BR2_PACKAGE_LIBMPEG2=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
|
||||
if 'BR2_PACKAGE_STRONGSWAN=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
|
||||
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# libffi not available on sh2a and ARMv7-M, but propagating libffi
|
||||
# arch dependencies in Buildroot is really too much work, so we
|
||||
# handle this here.
|
||||
if 'BR2_sh2a=y\n' in configlines and \
|
||||
'BR2_PACKAGE_LIBFFI=y\n' in configlines:
|
||||
return False
|
||||
if 'BR2_ARM_CPU_ARMV7M=y\n' in configlines and \
|
||||
'BR2_PACKAGE_LIBFFI=y\n' in configlines:
|
||||
return False
|
||||
if 'BR2_PACKAGE_SUNXI_BOARDS=y\n' in configlines:
|
||||
configlines.remove('BR2_PACKAGE_SUNXI_BOARDS_FEX_FILE=""\n')
|
||||
configlines.append('BR2_PACKAGE_SUNXI_BOARDS_FEX_FILE="a10/hackberry.fex"\n')
|
||||
# This MIPS uClibc toolchain fails to build the gdb package
|
||||
if 'BR2_PACKAGE_GDB=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the rt-tests package
|
||||
if 'BR2_PACKAGE_RT_TESTS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the civetweb package
|
||||
if 'BR2_PACKAGE_CIVETWEB=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS ctng toolchain fails to build the python3 package
|
||||
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the strace package
|
||||
if 'BR2_PACKAGE_STRACE=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the cdrkit package
|
||||
if 'BR2_PACKAGE_CDRKIT=y\n' in configlines and \
|
||||
'BR2_STATIC_LIBS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# uClibc vfork static linking issue
|
||||
if 'BR2_PACKAGE_ALSA_LIB=y\n' in configlines and \
|
||||
'BR2_STATIC_LIBS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/i486-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the weston package
|
||||
if 'BR2_PACKAGE_WESTON=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
|
||||
'BR2_PACKAGE_BOOST=y\n' in configlines:
|
||||
return False
|
||||
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
|
||||
'BR2_PACKAGE_QT5BASE_GUI=y\n' in configlines:
|
||||
return False
|
||||
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
|
||||
'BR2_PACKAGE_QT_GUI_MODULE=y\n' in configlines:
|
||||
return False
|
||||
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
|
||||
'BR2_PACKAGE_FLANN=y\n' in configlines:
|
||||
return False
|
||||
# or1k affected by binutils PR21464
|
||||
if 'BR2_or1k=y\n' in configlines and \
|
||||
'BR2_PACKAGE_QT_GUI_MODULE=y\n' in configlines:
|
||||
return False
|
||||
|
||||
with open(configfile, "w+") as configf:
|
||||
configf.writelines(configlines)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def gen_config(args):
|
||||
"""Generate a new random configuration
|
||||
|
||||
This function generates the configuration, by choosing a random
|
||||
toolchain configuration and then generating a random selection of
|
||||
packages.
|
||||
"""
|
||||
|
||||
# Select a random toolchain configuration
|
||||
configs = get_toolchain_configs(args.toolchains_csv, args.buildrootdir)
|
||||
|
||||
i = randint(0, len(configs) - 1)
|
||||
toolchainconfig = configs[i]
|
||||
|
||||
configlines = list(toolchainconfig)
|
||||
|
||||
# Combine with the minimal configuration
|
||||
minimalconfigfile = os.path.join(args.buildrootdir,
|
||||
'support/config-fragments/minimal.config')
|
||||
with open(minimalconfigfile) as minimalf:
|
||||
configlines += minimalf.readlines()
|
||||
|
||||
# Allow hosts with old certificates to download over https
|
||||
configlines.append("BR2_WGET=\"wget --passive-ftp -nd -t 3 --no-check-certificate\"\n")
|
||||
|
||||
# Amend the configuration with a few things.
|
||||
if randint(0, 20) == 0:
|
||||
configlines.append("BR2_ENABLE_DEBUG=y\n")
|
||||
if randint(0, 1) == 0:
|
||||
configlines.append("BR2_INIT_BUSYBOX=y\n")
|
||||
elif randint(0, 15) == 0:
|
||||
configlines.append("BR2_INIT_SYSTEMD=y\n")
|
||||
elif randint(0, 10) == 0:
|
||||
configlines.append("BR2_ROOTFS_DEVICE_CREATION_DYNAMIC_EUDEV=y\n")
|
||||
if randint(0, 20) == 0:
|
||||
configlines.append("BR2_STATIC_LIBS=y\n")
|
||||
if randint(0, 20) == 0:
|
||||
configlines.append("BR2_PACKAGE_PYTHON_PY_ONLY=y\n")
|
||||
|
||||
# Write out the configuration file
|
||||
if not os.path.exists(args.outputdir):
|
||||
os.makedirs(args.outputdir)
|
||||
if args.outputdir == os.path.abspath(os.path.join(args.buildrootdir, "output")):
|
||||
configfile = os.path.join(args.buildrootdir, ".config")
|
||||
else:
|
||||
configfile = os.path.join(args.outputdir, ".config")
|
||||
with open(configfile, "w+") as configf:
|
||||
configf.writelines(configlines)
|
||||
|
||||
subprocess.check_call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
|
||||
"olddefconfig"])
|
||||
|
||||
if not is_toolchain_usable(configfile, toolchainconfig):
|
||||
return 2
|
||||
|
||||
# Now, generate the random selection of packages, and fixup
|
||||
# things if needed.
|
||||
# Safe-guard, in case we can not quickly come to a valid
|
||||
# configuration: allow at most 100 (arbitrary) iterations.
|
||||
bounded_loop = 100
|
||||
while True:
|
||||
if bounded_loop == 0:
|
||||
print("ERROR: cannot generate random configuration after 100 iterations",
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
bounded_loop -= 1
|
||||
subprocess.check_call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
|
||||
"KCONFIG_PROBABILITY=%d" % randint(1, 30),
|
||||
"randpackageconfig"])
|
||||
|
||||
if fixup_config(configfile):
|
||||
break
|
||||
|
||||
subprocess.check_call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
|
||||
"olddefconfig"])
|
||||
|
||||
subprocess.check_call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
|
||||
"savedefconfig"])
|
||||
|
||||
return subprocess.call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
|
||||
"core-dependencies"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="Generate a random configuration")
|
||||
parser.add_argument("--outputdir", "-o",
|
||||
help="Output directory (relative to current directory)",
|
||||
type=str, default='output')
|
||||
parser.add_argument("--buildrootdir", "-b",
|
||||
help="Buildroot directory (relative to current directory)",
|
||||
type=str, default='.')
|
||||
parser.add_argument("--toolchains-csv",
|
||||
help="Path of the toolchain configuration file",
|
||||
type=str,
|
||||
default="support/config-fragments/autobuild/toolchain-configs.csv")
|
||||
args = parser.parse_args()
|
||||
|
||||
# We need the absolute path to use with O=, because the relative
|
||||
# path to the output directory here is not relative to the
|
||||
# Buildroot sources, but to the current directory.
|
||||
args.outputdir = os.path.abspath(args.outputdir)
|
||||
|
||||
try:
|
||||
ret = gen_config(args)
|
||||
except Exception as e:
|
||||
print(str(e), file=sys.stderr)
|
||||
parser.exit(1)
|
||||
parser.exit(ret)
|
||||
109
bsp/buildroot/utils/get-developers
Executable file
109
bsp/buildroot/utils/get-developers
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import getdeveloperlib
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('patches', metavar='P', type=argparse.FileType('r'), nargs='*',
|
||||
help='list of patches (use - to read patches from stdin)')
|
||||
parser.add_argument('-a', dest='architecture', action='store',
|
||||
help='find developers in charge of this architecture')
|
||||
parser.add_argument('-p', dest='package', action='store',
|
||||
help='find developers in charge of this package')
|
||||
parser.add_argument('-f', dest='files', nargs='*',
|
||||
help='find developers in charge of these files')
|
||||
parser.add_argument('-c', dest='check', action='store_const',
|
||||
const=True, help='list files not handled by any developer')
|
||||
parser.add_argument('-e', dest='email', action='store_const',
|
||||
const=True, help='only list affected developer email addresses')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def __main__():
|
||||
# DEVELOPERS is one level up from here
|
||||
devs_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
|
||||
devs = getdeveloperlib.parse_developers(devs_dir)
|
||||
if devs is None:
|
||||
sys.exit(1)
|
||||
args = parse_args()
|
||||
|
||||
# Check that only one action is given
|
||||
action = 0
|
||||
if args.architecture is not None:
|
||||
action += 1
|
||||
if args.package is not None:
|
||||
action += 1
|
||||
if args.files:
|
||||
action += 1
|
||||
if args.check:
|
||||
action += 1
|
||||
if len(args.patches) != 0:
|
||||
action += 1
|
||||
if action > 1:
|
||||
print("Cannot do more than one action")
|
||||
return
|
||||
if action == 0:
|
||||
print("No action specified")
|
||||
return
|
||||
|
||||
# Handle the check action
|
||||
if args.check:
|
||||
files = getdeveloperlib.check_developers(devs, devs_dir)
|
||||
for f in files:
|
||||
print(f)
|
||||
|
||||
# Handle the architecture action
|
||||
if args.architecture is not None:
|
||||
for dev in devs:
|
||||
if args.architecture in dev.architectures:
|
||||
print(dev.name)
|
||||
return
|
||||
|
||||
# Handle the package action
|
||||
if args.package is not None:
|
||||
for dev in devs:
|
||||
if args.package in dev.packages:
|
||||
print(dev.name)
|
||||
return
|
||||
|
||||
# Handle the files action
|
||||
if args.files is not None:
|
||||
args.files = [os.path.abspath(f) for f in args.files]
|
||||
for dev in devs:
|
||||
for devfile in dev.files:
|
||||
commonfiles = [f for f in args.files if f.startswith(devfile)]
|
||||
if commonfiles:
|
||||
print(dev.name)
|
||||
break
|
||||
|
||||
# Handle the patches action
|
||||
if len(args.patches) != 0:
|
||||
(files, infras) = getdeveloperlib.analyze_patches(args.patches)
|
||||
matching_devs = set()
|
||||
for dev in devs:
|
||||
# See if we have developers matching by package name
|
||||
for f in files:
|
||||
if dev.hasfile(f):
|
||||
matching_devs.add(dev.name)
|
||||
# See if we have developers matching by package infra
|
||||
for i in infras:
|
||||
if i in dev.infras:
|
||||
matching_devs.add(dev.name)
|
||||
|
||||
if args.email:
|
||||
for dev in matching_devs:
|
||||
print(dev)
|
||||
else:
|
||||
result = "--to buildroot@buildroot.org"
|
||||
for dev in matching_devs:
|
||||
result += " --cc \"%s\"" % dev
|
||||
|
||||
if result != "":
|
||||
print("git send-email %s" % result)
|
||||
|
||||
|
||||
__main__()
|
||||
212
bsp/buildroot/utils/getdeveloperlib.py
Normal file
212
bsp/buildroot/utils/getdeveloperlib.py
Normal file
@@ -0,0 +1,212 @@
|
||||
import os
|
||||
import re
|
||||
import glob
|
||||
import subprocess
|
||||
|
||||
#
|
||||
# Patch parsing functions
|
||||
#
|
||||
|
||||
FIND_INFRA_IN_PATCH = re.compile("^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
|
||||
|
||||
|
||||
def analyze_patch(patch):
|
||||
"""Parse one patch and return the list of files modified, added or
|
||||
removed by the patch."""
|
||||
files = set()
|
||||
infras = set()
|
||||
for line in patch:
|
||||
# If the patch is adding a package, find which infra it is
|
||||
m = FIND_INFRA_IN_PATCH.match(line)
|
||||
if m:
|
||||
infras.add(m.group(2))
|
||||
if not line.startswith("+++ "):
|
||||
continue
|
||||
line.strip()
|
||||
fname = line[line.find("/") + 1:].strip()
|
||||
if fname == "dev/null":
|
||||
continue
|
||||
files.add(fname)
|
||||
return (files, infras)
|
||||
|
||||
|
||||
FIND_INFRA_IN_MK = re.compile("^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
|
||||
|
||||
|
||||
def fname_get_package_infra(fname):
|
||||
"""Checks whether the file name passed as argument is a Buildroot .mk
|
||||
file describing a package, and find the infrastructure it's using."""
|
||||
if not fname.endswith(".mk"):
|
||||
return None
|
||||
|
||||
if not os.path.exists(fname):
|
||||
return None
|
||||
|
||||
with open(fname, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
m = FIND_INFRA_IN_MK.match(line)
|
||||
if m:
|
||||
return m.group(2)
|
||||
return None
|
||||
|
||||
|
||||
def get_infras(files):
|
||||
"""Search in the list of files for .mk files, and collect the package
|
||||
infrastructures used by those .mk files."""
|
||||
infras = set()
|
||||
for fname in files:
|
||||
infra = fname_get_package_infra(fname)
|
||||
if infra:
|
||||
infras.add(infra)
|
||||
return infras
|
||||
|
||||
|
||||
def analyze_patches(patches):
|
||||
"""Parse a list of patches and returns the list of files modified,
|
||||
added or removed by the patches, as well as the list of package
|
||||
infrastructures used by those patches (if any)"""
|
||||
allfiles = set()
|
||||
allinfras = set()
|
||||
for patch in patches:
|
||||
(files, infras) = analyze_patch(patch)
|
||||
allfiles = allfiles | files
|
||||
allinfras = allinfras | infras
|
||||
allinfras = allinfras | get_infras(allfiles)
|
||||
return (allfiles, allinfras)
|
||||
|
||||
|
||||
#
|
||||
# DEVELOPERS file parsing functions
|
||||
#
|
||||
|
||||
class Developer:
|
||||
def __init__(self, name, files):
|
||||
self.name = name
|
||||
self.files = files
|
||||
self.packages = parse_developer_packages(files)
|
||||
self.architectures = parse_developer_architectures(files)
|
||||
self.infras = parse_developer_infras(files)
|
||||
|
||||
def hasfile(self, f):
|
||||
f = os.path.abspath(f)
|
||||
for fs in self.files:
|
||||
if f.startswith(fs):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def parse_developer_packages(fnames):
|
||||
"""Given a list of file patterns, travel through the Buildroot source
|
||||
tree to find which packages are implemented by those file
|
||||
patterns, and return a list of those packages."""
|
||||
packages = set()
|
||||
for fname in fnames:
|
||||
for root, dirs, files in os.walk(fname):
|
||||
for f in files:
|
||||
path = os.path.join(root, f)
|
||||
if fname_get_package_infra(path):
|
||||
pkg = os.path.splitext(f)[0]
|
||||
packages.add(pkg)
|
||||
return packages
|
||||
|
||||
|
||||
def parse_arches_from_config_in(fname):
|
||||
"""Given a path to an arch/Config.in.* file, parse it to get the list
|
||||
of BR2_ARCH values for this architecture."""
|
||||
arches = set()
|
||||
with open(fname, "r") as f:
|
||||
parsing_arches = False
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line == "config BR2_ARCH":
|
||||
parsing_arches = True
|
||||
continue
|
||||
if parsing_arches:
|
||||
m = re.match("^\s*default \"([^\"]*)\".*", line)
|
||||
if m:
|
||||
arches.add(m.group(1))
|
||||
else:
|
||||
parsing_arches = False
|
||||
return arches
|
||||
|
||||
|
||||
def parse_developer_architectures(fnames):
|
||||
"""Given a list of file names, find the ones starting by
|
||||
'arch/Config.in.', and use that to determine the architecture a
|
||||
developer is working on."""
|
||||
arches = set()
|
||||
for fname in fnames:
|
||||
if not re.match("^.*/arch/Config\.in\..*$", fname):
|
||||
continue
|
||||
arches = arches | parse_arches_from_config_in(fname)
|
||||
return arches
|
||||
|
||||
|
||||
def parse_developer_infras(fnames):
|
||||
infras = set()
|
||||
for fname in fnames:
|
||||
m = re.match("^package/pkg-([^.]*).mk$", fname)
|
||||
if m:
|
||||
infras.add(m.group(1))
|
||||
return infras
|
||||
|
||||
|
||||
def parse_developers(basepath=None):
|
||||
"""Parse the DEVELOPERS file and return a list of Developer objects."""
|
||||
developers = []
|
||||
linen = 0
|
||||
if basepath is None:
|
||||
basepath = os.getcwd()
|
||||
else:
|
||||
basepath = os.path.abspath(basepath)
|
||||
with open(os.path.join(basepath, "DEVELOPERS"), "r") as f:
|
||||
files = []
|
||||
name = None
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
elif line.startswith("N:"):
|
||||
if name is not None or len(files) != 0:
|
||||
print("Syntax error in DEVELOPERS file, line %d" % linen)
|
||||
name = line[2:].strip()
|
||||
elif line.startswith("F:"):
|
||||
fname = line[2:].strip()
|
||||
dev_files = glob.glob(os.path.join(basepath, fname))
|
||||
if len(dev_files) == 0:
|
||||
print("WARNING: '%s' doesn't match any file" % fname)
|
||||
files += dev_files
|
||||
elif line == "":
|
||||
if not name:
|
||||
continue
|
||||
developers.append(Developer(name, files))
|
||||
files = []
|
||||
name = None
|
||||
else:
|
||||
print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, line))
|
||||
return None
|
||||
linen += 1
|
||||
# handle last developer
|
||||
if name is not None:
|
||||
developers.append(Developer(name, files))
|
||||
return developers
|
||||
|
||||
|
||||
def check_developers(developers, basepath=None):
|
||||
"""Look at the list of files versioned in Buildroot, and returns the
|
||||
list of files that are not handled by any developer"""
|
||||
if basepath is None:
|
||||
basepath = os.getcwd()
|
||||
cmd = ["git", "--git-dir", os.path.join(basepath, ".git"), "ls-files"]
|
||||
files = subprocess.check_output(cmd).strip().split("\n")
|
||||
unhandled_files = []
|
||||
for f in files:
|
||||
handled = False
|
||||
for d in developers:
|
||||
if d.hasfile(os.path.join(basepath, f)):
|
||||
handled = True
|
||||
break
|
||||
if not handled:
|
||||
unhandled_files.append(f)
|
||||
return unhandled_files
|
||||
45
bsp/buildroot/utils/readme.txt
Normal file
45
bsp/buildroot/utils/readme.txt
Normal file
@@ -0,0 +1,45 @@
|
||||
This directory contains various useful scripts and tools for working
|
||||
with Buildroot. You need not add this directory in your PATH to use
|
||||
any of those tools, but you may do so if you want.
|
||||
|
||||
brmake
|
||||
a script that can be run instead of make, that prepends the date in
|
||||
front of each line, redirects all of the build output to a file
|
||||
("'br.log' in the current directory), and just outputs the Buildroot
|
||||
messages (those lines starting with >>>) on stdout.
|
||||
Do not run this script for interactive configuration (e.g. menuconfig)
|
||||
or on an unconfigured directory. The output is redirected so you will see
|
||||
nothing.
|
||||
|
||||
check-package
|
||||
a script that checks the coding style of a package's Config.in and
|
||||
.mk files, and also tests them for various types of typoes.
|
||||
|
||||
genrandconfig
|
||||
a script that generates a random configuration, used by the autobuilders
|
||||
(http://autobuild.buildroot.org). It selects a random toolchain from
|
||||
support/config-fragments/autobuild and randomly selects packages to build.
|
||||
|
||||
get-developpers
|
||||
a script to return the list of people interested in a specific part
|
||||
of Buildroot, so they can be Cc:ed on a mail. Accepts a patch as
|
||||
input, a package name or and architecture name.
|
||||
|
||||
scancpan
|
||||
a script to create a Buildroot package by scanning a CPAN module
|
||||
description.
|
||||
|
||||
scanpypi
|
||||
a script to create a Buildroot package by scanning a PyPI package
|
||||
description.
|
||||
|
||||
size-stats-compare
|
||||
a script to compare the rootfs size between two different Buildroot
|
||||
configurations. This can be used to identify the size impact of
|
||||
a specific option, of a set of specific options, or of an update
|
||||
to a newer Buildroot version...
|
||||
|
||||
test-pkg
|
||||
a script that tests a specific package against a set of various
|
||||
toolchains, with the goal to detect toolchain-related dependencies
|
||||
(wchar, threads...)
|
||||
864
bsp/buildroot/utils/scancpan
Executable file
864
bsp/buildroot/utils/scancpan
Executable file
@@ -0,0 +1,864 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
# This chunk of stuff was generated by App::FatPacker. To find the original
|
||||
# file's code, look for the end of this BEGIN block or the string 'FATPACK'
|
||||
BEGIN {
|
||||
my %fatpacked;
|
||||
|
||||
$fatpacked{"MetaCPAN/API/Tiny.pm"} = <<'METACPAN_API_TINY';
|
||||
package MetaCPAN::API::Tiny;
|
||||
{
|
||||
$MetaCPAN::API::Tiny::VERSION = '1.131730';
|
||||
}
|
||||
use strict;
|
||||
use warnings;
|
||||
# ABSTRACT: A Tiny API client for MetaCPAN
|
||||
|
||||
use Carp;
|
||||
use JSON::PP 'encode_json', 'decode_json';
|
||||
use HTTP::Tiny;
|
||||
|
||||
|
||||
sub new {
|
||||
my ($class, @args) = @_;
|
||||
|
||||
$#_ % 2 == 0
|
||||
or croak 'Arguments must be provided as name/value pairs';
|
||||
|
||||
my %params = @args;
|
||||
|
||||
die 'ua_args must be an array reference'
|
||||
if $params{ua_args} && ref($params{ua_args}) ne 'ARRAY';
|
||||
|
||||
my $self = +{
|
||||
base_url => $params{base_url} || 'http://api.metacpan.org/v0',
|
||||
ua => $params{ua} || HTTP::Tiny->new(
|
||||
$params{ua_args}
|
||||
? @{$params{ua_args}}
|
||||
: (agent => 'MetaCPAN::API::Tiny/'
|
||||
. ($MetaCPAN::API::VERSION || 'xx'))),
|
||||
};
|
||||
|
||||
return bless($self, $class);
|
||||
}
|
||||
|
||||
sub _build_extra_params {
|
||||
my $self = shift;
|
||||
|
||||
@_ % 2 == 0
|
||||
or croak 'Incorrect number of params, must be key/value';
|
||||
|
||||
my %extra = @_;
|
||||
my $ua = $self->{ua};
|
||||
|
||||
foreach my $key (keys %extra)
|
||||
{
|
||||
# The implementation in HTTP::Tiny uses + instead of %20, fix that
|
||||
$extra{$key} = $ua->_uri_escape($extra{$key});
|
||||
$extra{$key} =~ s/\+/%20/g;
|
||||
}
|
||||
|
||||
my $params = join '&', map { "$_=" . $extra{$_} } sort keys %extra;
|
||||
|
||||
return $params;
|
||||
}
|
||||
|
||||
|
||||
# /source/{author}/{release}/{path}
|
||||
sub source {
|
||||
my $self = shift;
|
||||
my %opts = @_ ? @_ : ();
|
||||
my $url = '';
|
||||
my $error = "Provide 'author' and 'release' and 'path'";
|
||||
|
||||
%opts or croak $error;
|
||||
|
||||
if (
|
||||
defined ( my $author = $opts{'author'} ) &&
|
||||
defined ( my $release = $opts{'release'} ) &&
|
||||
defined ( my $path = $opts{'path'} )
|
||||
) {
|
||||
$url = "source/$author/$release/$path";
|
||||
} else {
|
||||
croak $error;
|
||||
}
|
||||
|
||||
$url = $self->{base_url} . "/$url";
|
||||
|
||||
my $result = $self->{ua}->get($url);
|
||||
$result->{'success'}
|
||||
or croak "Failed to fetch '$url': " . $result->{'reason'};
|
||||
|
||||
return $result->{'content'};
|
||||
}
|
||||
|
||||
|
||||
# /release/{distribution}
|
||||
# /release/{author}/{release}
|
||||
sub release {
|
||||
my $self = shift;
|
||||
my %opts = @_ ? @_ : ();
|
||||
my $url = '';
|
||||
my $error = "Either provide 'distribution', or 'author' and 'release', " .
|
||||
"or 'search'";
|
||||
|
||||
%opts or croak $error;
|
||||
|
||||
my %extra_opts = ();
|
||||
|
||||
if ( defined ( my $dist = $opts{'distribution'} ) ) {
|
||||
$url = "release/$dist";
|
||||
} elsif (
|
||||
defined ( my $author = $opts{'author'} ) &&
|
||||
defined ( my $release = $opts{'release'} )
|
||||
) {
|
||||
$url = "release/$author/$release";
|
||||
} elsif ( defined ( my $search_opts = $opts{'search'} ) ) {
|
||||
ref $search_opts && ref $search_opts eq 'HASH'
|
||||
or croak $error;
|
||||
|
||||
%extra_opts = %{$search_opts};
|
||||
$url = 'release/_search';
|
||||
} else {
|
||||
croak $error;
|
||||
}
|
||||
|
||||
return $self->fetch( $url, %extra_opts );
|
||||
}
|
||||
|
||||
|
||||
# /pod/{module}
|
||||
# /pod/{author}/{release}/{path}
|
||||
sub pod {
|
||||
my $self = shift;
|
||||
my %opts = @_ ? @_ : ();
|
||||
my $url = '';
|
||||
my $error = "Either provide 'module' or 'author and 'release' and 'path'";
|
||||
|
||||
%opts or croak $error;
|
||||
|
||||
if ( defined ( my $module = $opts{'module'} ) ) {
|
||||
$url = "pod/$module";
|
||||
} elsif (
|
||||
defined ( my $author = $opts{'author'} ) &&
|
||||
defined ( my $release = $opts{'release'} ) &&
|
||||
defined ( my $path = $opts{'path'} )
|
||||
) {
|
||||
$url = "pod/$author/$release/$path";
|
||||
} else {
|
||||
croak $error;
|
||||
}
|
||||
|
||||
# check content-type
|
||||
my %extra = ();
|
||||
if ( defined ( my $type = $opts{'content-type'} ) ) {
|
||||
$type =~ m{^ text/ (?: html|plain|x-pod|x-markdown ) $}x
|
||||
or croak 'Incorrect content-type provided';
|
||||
|
||||
$extra{headers}{'content-type'} = $type;
|
||||
}
|
||||
|
||||
$url = $self->{base_url}. "/$url";
|
||||
|
||||
my $result = $self->{ua}->get( $url, \%extra );
|
||||
$result->{'success'}
|
||||
or croak "Failed to fetch '$url': " . $result->{'reason'};
|
||||
|
||||
return $result->{'content'};
|
||||
}
|
||||
|
||||
|
||||
# /module/{module}
|
||||
sub module {
|
||||
my $self = shift;
|
||||
my $name = shift;
|
||||
|
||||
$name or croak 'Please provide a module name';
|
||||
|
||||
return $self->fetch("module/$name");
|
||||
}
|
||||
|
||||
|
||||
# file() is a synonym of module
|
||||
sub file { goto &module }
|
||||
|
||||
|
||||
# /author/{author}
|
||||
sub author {
|
||||
my $self = shift;
|
||||
my ( $pause_id, $url, %extra_opts );
|
||||
|
||||
if ( @_ == 1 ) {
|
||||
$url = 'author/' . shift;
|
||||
} elsif ( @_ == 2 ) {
|
||||
my %opts = @_;
|
||||
|
||||
if ( defined $opts{'pauseid'} ) {
|
||||
$url = "author/" . $opts{'pauseid'};
|
||||
} elsif ( defined $opts{'search'} ) {
|
||||
my $search_opts = $opts{'search'};
|
||||
|
||||
ref $search_opts && ref $search_opts eq 'HASH'
|
||||
or croak "'search' key must be hashref";
|
||||
|
||||
%extra_opts = %{$search_opts};
|
||||
$url = 'author/_search';
|
||||
} else {
|
||||
croak 'Unknown option given';
|
||||
}
|
||||
} else {
|
||||
croak 'Please provide an author PAUSEID or a "search"';
|
||||
}
|
||||
|
||||
return $self->fetch( $url, %extra_opts );
|
||||
}
|
||||
|
||||
|
||||
|
||||
sub fetch {
|
||||
my $self = shift;
|
||||
my $url = shift;
|
||||
my $extra = $self->_build_extra_params(@_);
|
||||
my $base = $self->{base_url};
|
||||
my $req_url = $extra ? "$base/$url?$extra" : "$base/$url";
|
||||
|
||||
my $result = $self->{ua}->get($req_url);
|
||||
return $self->_decode_result( $result, $req_url );
|
||||
}
|
||||
|
||||
|
||||
sub post {
|
||||
my $self = shift;
|
||||
my $url = shift;
|
||||
my $query = shift;
|
||||
my $base = $self->{base_url};
|
||||
|
||||
defined $url
|
||||
or croak 'First argument of URL must be provided';
|
||||
|
||||
ref $query and ref $query eq 'HASH'
|
||||
or croak 'Second argument of query hashref must be provided';
|
||||
|
||||
my $query_json = encode_json( $query );
|
||||
my $result = $self->{ua}->request(
|
||||
'POST',
|
||||
"$base/$url",
|
||||
{
|
||||
headers => { 'Content-Type' => 'application/json' },
|
||||
content => $query_json,
|
||||
}
|
||||
);
|
||||
|
||||
return $self->_decode_result( $result, $url, $query_json );
|
||||
}
|
||||
|
||||
sub _decode_result {
|
||||
my $self = shift;
|
||||
my ( $result, $url, $original ) = @_;
|
||||
my $decoded_result;
|
||||
|
||||
ref $result and ref $result eq 'HASH'
|
||||
or croak 'First argument must be hashref';
|
||||
|
||||
defined $url
|
||||
or croak 'Second argument of a URL must be provided';
|
||||
|
||||
if ( defined ( my $success = $result->{'success'} ) ) {
|
||||
my $reason = $result->{'reason'} || '';
|
||||
$reason .= ( defined $original ? " (request: $original)" : '' );
|
||||
|
||||
$success or croak "Failed to fetch '$url': $reason";
|
||||
} else {
|
||||
croak 'Missing success in return value';
|
||||
}
|
||||
|
||||
defined ( my $content = $result->{'content'} )
|
||||
or croak 'Missing content in return value';
|
||||
|
||||
eval { $decoded_result = decode_json $content; 1 }
|
||||
or do { croak "Couldn't decode '$content': $@" };
|
||||
|
||||
return $decoded_result;
|
||||
}
|
||||
|
||||
1;
|
||||
|
||||
__END__
|
||||
|
||||
=pod
|
||||
|
||||
=head1 NAME
|
||||
|
||||
MetaCPAN::API::Tiny - A Tiny API client for MetaCPAN
|
||||
|
||||
=head1 VERSION
|
||||
|
||||
version 1.131730
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
This is the Tiny version of L<MetaCPAN::API>. It implements a compatible API
|
||||
with a few notable exceptions:
|
||||
|
||||
=over 4
|
||||
|
||||
=item Attributes are direct hash access
|
||||
|
||||
The attributes defined using Mo(o|u)se are now accessed via the blessed hash
|
||||
directly. There are no accessors defined to access this elements.
|
||||
|
||||
=item Exception handling
|
||||
|
||||
Instead of using Try::Tiny, raw evals are used. This could potentially cause
|
||||
issues, so just be aware.
|
||||
|
||||
=item Testing
|
||||
|
||||
Test::Fatal was replaced with an eval implementation of exception().
|
||||
Test::TinyMocker usage is retained, but may be absorbed since it is pure perl
|
||||
|
||||
=back
|
||||
|
||||
=head1 CLASS_METHODS
|
||||
|
||||
=head2 new
|
||||
|
||||
new is the constructor for MetaCPAN::API::Tiny. In the non-tiny version of this
|
||||
module, this is provided via Any::Moose built from the attributes defined. In
|
||||
the tiny version, we define our own constructor. It takes the same arguments
|
||||
and provides similar checks to MetaCPAN::API with regards to arguments passed.
|
||||
|
||||
=head1 PUBLIC_METHODS
|
||||
|
||||
=head2 source
|
||||
|
||||
my $source = $mcpan->source(
|
||||
author => 'DOY',
|
||||
release => 'Moose-2.0201',
|
||||
path => 'lib/Moose.pm',
|
||||
);
|
||||
|
||||
Searches MetaCPAN for a module or a specific release and returns the plain source.
|
||||
|
||||
=head2 release
|
||||
|
||||
my $result = $mcpan->release( distribution => 'Moose' );
|
||||
|
||||
# or
|
||||
my $result = $mcpan->release( author => 'DOY', release => 'Moose-2.0001' );
|
||||
|
||||
Searches MetaCPAN for a dist.
|
||||
|
||||
You can do complex searches using 'search' parameter:
|
||||
|
||||
# example lifted from MetaCPAN docs
|
||||
my $result = $mcpan->release(
|
||||
search => {
|
||||
author => "OALDERS AND ",
|
||||
filter => "status:latest",
|
||||
fields => "name",
|
||||
size => 1,
|
||||
},
|
||||
);
|
||||
|
||||
=head2 pod
|
||||
|
||||
my $result = $mcpan->pod( module => 'Moose' );
|
||||
|
||||
# or
|
||||
my $result = $mcpan->pod(
|
||||
author => 'DOY',
|
||||
release => 'Moose-2.0201',
|
||||
path => 'lib/Moose.pm',
|
||||
);
|
||||
|
||||
Searches MetaCPAN for a module or a specific release and returns the POD.
|
||||
|
||||
=head2 module
|
||||
|
||||
my $result = $mcpan->module('MetaCPAN::API');
|
||||
|
||||
Searches MetaCPAN and returns a module's ".pm" file.
|
||||
|
||||
=head2 file
|
||||
|
||||
A synonym of L</module>
|
||||
|
||||
=head2 author
|
||||
|
||||
my $result1 = $mcpan->author('XSAWYERX');
|
||||
my $result2 = $mcpan->author( pauseid => 'XSAWYERX' );
|
||||
|
||||
Searches MetaCPAN for a specific author.
|
||||
|
||||
You can do complex searches using 'search' parameter:
|
||||
|
||||
# example lifted from MetaCPAN docs
|
||||
my $result = $mcpan->author(
|
||||
search => {
|
||||
q => 'profile.name:twitter',
|
||||
size => 1,
|
||||
},
|
||||
);
|
||||
|
||||
=head2 fetch
|
||||
|
||||
my $result = $mcpan->fetch('/release/distribution/Moose');
|
||||
|
||||
# with parameters
|
||||
my $more = $mcpan->fetch(
|
||||
'/release/distribution/Moose',
|
||||
param => 'value',
|
||||
);
|
||||
|
||||
This is a helper method for API implementations. It fetches a path from MetaCPAN, decodes the JSON from the content variable and returns it.
|
||||
|
||||
You don't really need to use it, but you can in case you want to write your own extension implementation to MetaCPAN::API.
|
||||
|
||||
It accepts an additional hash as "GET" parameters.
|
||||
|
||||
=head2 post
|
||||
|
||||
# /release&content={"query":{"match_all":{}},"filter":{"prefix":{"archive":"Cache-Cache-1.06"}}}
|
||||
my $result = $mcpan->post(
|
||||
'release',
|
||||
{
|
||||
query => { match_all => {} },
|
||||
filter => { prefix => { archive => 'Cache-Cache-1.06' } },
|
||||
},
|
||||
);
|
||||
|
||||
The POST equivalent of the "fetch()" method. It gets the path and JSON request.
|
||||
|
||||
=head1 THANKS
|
||||
|
||||
Overall the tests and code were ripped directly from MetaCPAN::API and
|
||||
tiny-fied. A big thanks to Sawyer X for writing the original module.
|
||||
|
||||
=head1 AUTHOR
|
||||
|
||||
Nicholas R. Perez <nperez@cpan.org>
|
||||
|
||||
=head1 COPYRIGHT AND LICENSE
|
||||
|
||||
This software is copyright (c) 2013 by Nicholas R. Perez <nperez@cpan.org>.
|
||||
|
||||
This is free software; you can redistribute it and/or modify it under
|
||||
the same terms as the Perl 5 programming language system itself.
|
||||
|
||||
=cut
|
||||
METACPAN_API_TINY
|
||||
|
||||
s/^ //mg for values %fatpacked;
|
||||
|
||||
unshift @INC, sub {
|
||||
if (my $fat = $fatpacked{$_[1]}) {
|
||||
if ($] < 5.008) {
|
||||
return sub {
|
||||
return 0 unless length $fat;
|
||||
$fat =~ s/^([^\n]*\n?)//;
|
||||
$_ = $1;
|
||||
return 1;
|
||||
};
|
||||
}
|
||||
open my $fh, '<', \$fat
|
||||
or die "FatPacker error loading $_[1] (could be a perl installation issue?)";
|
||||
return $fh;
|
||||
}
|
||||
return
|
||||
};
|
||||
|
||||
} # END OF FATPACK CODE
|
||||
|
||||
|
||||
use 5.010;
|
||||
use strict;
|
||||
use warnings;
|
||||
use Fatal qw(open close);
|
||||
|
||||
use Getopt::Long;
|
||||
use Pod::Usage;
|
||||
use File::Basename;
|
||||
use Module::CoreList;
|
||||
use HTTP::Tiny;
|
||||
use Safe;
|
||||
use MetaCPAN::API::Tiny;
|
||||
|
||||
# Below, 5.026 should be aligned with the version of perl actually
|
||||
# bundled in Buildroot:
|
||||
die <<"MSG" if $] < 5.026;
|
||||
This script needs a host perl with the same major version as Buildroot target perl.
|
||||
|
||||
Your current host perl is:
|
||||
$^X
|
||||
version $]
|
||||
|
||||
You may install a local one by running:
|
||||
perlbrew install perl-5.26.0
|
||||
MSG
|
||||
|
||||
my ($help, $man, $quiet, $force, $recommend, $test, $host);
|
||||
my $target = 1;
|
||||
GetOptions( 'help|?' => \$help,
|
||||
'man' => \$man,
|
||||
'quiet|q' => \$quiet,
|
||||
'force|f' => \$force,
|
||||
'host!' => \$host,
|
||||
'target!' => \$target,
|
||||
'recommend' => \$recommend,
|
||||
'test' => \$test
|
||||
) or pod2usage(-exitval => 1);
|
||||
pod2usage(-exitval => 0) if $help;
|
||||
pod2usage(-exitval => 0, -verbose => 2) if $man;
|
||||
pod2usage(-exitval => 1) if scalar @ARGV == 0;
|
||||
|
||||
my %dist; # name -> metacpan data
|
||||
my %need_target; # name -> 1 if target package is needed
|
||||
my %need_host; # name -> 1 if host package is needed
|
||||
my %need_dlopen; # name -> 1 if requires dynamic library
|
||||
my %deps_build; # name -> list of host dependencies
|
||||
my %deps_runtime; # name -> list of target dependencies
|
||||
my %deps_optional; # name -> list of optional target dependencies
|
||||
my %license_files; # name -> list of license files
|
||||
my %checksum; # author -> list of checksum
|
||||
my $mirror = 'http://cpan.metacpan.org'; # a CPAN mirror
|
||||
my $mcpan = MetaCPAN::API::Tiny->new(base_url => 'http://fastapi.metacpan.org/v1');
|
||||
my $ua = HTTP::Tiny->new();
|
||||
|
||||
sub get_checksum {
|
||||
my ($url) = @_;
|
||||
my ($path) = $url =~ m|^[^:/?#]+://[^/?#]*([^?#]*)|;
|
||||
my ($basename, $dirname) = fileparse( $path );
|
||||
unless ($checksum{$dirname}) {
|
||||
my $url = $mirror . $dirname . q{CHECKSUMS};
|
||||
my $response = $ua->get($url);
|
||||
$checksum{$dirname} = $response->{content};
|
||||
}
|
||||
my $chksum = Safe->new->reval($checksum{$dirname});
|
||||
return $chksum->{$basename}, $basename;
|
||||
}
|
||||
|
||||
sub is_xs {
|
||||
my ($manifest) = @_;
|
||||
# This heuristic determines if a module is a native extension, by searching
|
||||
# some file extension types in the MANIFEST of the distribution.
|
||||
# It was inspired by http://deps.cpantesters.org/static/purity.html
|
||||
return $manifest =~ m/\.(swg|xs|c|h|i)[\n\s]/;
|
||||
}
|
||||
|
||||
sub find_license_files {
|
||||
my ($manifest) = @_;
|
||||
my @license_files;
|
||||
foreach (split /\n/, $manifest) {
|
||||
next if m|/|;
|
||||
push @license_files, $_ if m/(ARTISTIC|COPYING|COPYRIGHT|LICENSE)/i;
|
||||
}
|
||||
if (scalar @license_files == 0 && $manifest =~ m/(README)[\n\s]/i) {
|
||||
@license_files = ($1);
|
||||
}
|
||||
return \@license_files;
|
||||
}
|
||||
|
||||
sub fetch {
|
||||
my ($name, $need_target, $need_host, $top) = @_;
|
||||
$need_target{$name} = $need_target if $need_target;
|
||||
$need_host{$name} = $need_host if $need_host;
|
||||
unless ($dist{$name} && !$top) {
|
||||
say qq{fetch ${name}} unless $quiet;
|
||||
my $result = $mcpan->release( distribution => $name );
|
||||
$dist{$name} = $result;
|
||||
eval {
|
||||
my $manifest = $mcpan->source( author => $result->{author},
|
||||
release => $name . q{-} . $result->{version},
|
||||
path => 'MANIFEST' );
|
||||
$need_dlopen{$name} = is_xs( $manifest );
|
||||
$license_files{$name} = find_license_files( $manifest );
|
||||
};
|
||||
if ($@) {
|
||||
warn $@;
|
||||
$license_files{$name} = [];
|
||||
}
|
||||
my %build = ();
|
||||
my %runtime = ();
|
||||
my %optional = ();
|
||||
foreach my $dep (@{$result->{dependency}}) {
|
||||
my $modname = ${$dep}{module};
|
||||
next if $modname eq q{perl};
|
||||
next if $modname =~ m|^Alien|;
|
||||
next if $modname =~ m|^Win32|;
|
||||
next if !($test && $top) && $modname =~ m|^Test|;
|
||||
next if Module::CoreList::is_core( $modname, undef, $] );
|
||||
# we could use the host Module::CoreList data, because host perl and
|
||||
# target perl have the same major version
|
||||
next if ${$dep}{phase} eq q{develop};
|
||||
next if !($test && $top) && ${$dep}{phase} eq q{test};
|
||||
my $distname = $mcpan->module( $modname )->{distribution};
|
||||
if (${$dep}{phase} eq q{runtime}) {
|
||||
if (${$dep}{relationship} eq q{requires}) {
|
||||
$runtime{$distname} = 1;
|
||||
}
|
||||
else {
|
||||
$optional{$distname} = 1 if $recommend && $top;
|
||||
}
|
||||
}
|
||||
else { # configure, build
|
||||
$build{$distname} = 1;
|
||||
}
|
||||
}
|
||||
$deps_build{$name} = [keys %build];
|
||||
$deps_runtime{$name} = [keys %runtime];
|
||||
$deps_optional{$name} = [keys %optional];
|
||||
foreach my $distname (@{$deps_build{$name}}) {
|
||||
fetch( $distname, 0, 1 );
|
||||
}
|
||||
foreach my $distname (@{$deps_runtime{$name}}) {
|
||||
fetch( $distname, $need_target, $need_host );
|
||||
$need_dlopen{$name} ||= $need_dlopen{$distname};
|
||||
}
|
||||
foreach my $distname (@{$deps_optional{$name}}) {
|
||||
fetch( $distname, $need_target, $need_host );
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
foreach my $distname (@ARGV) {
|
||||
# Command-line's distributions
|
||||
fetch( $distname, !!$target, !!$host, 1 );
|
||||
}
|
||||
say scalar keys %dist, q{ packages fetched.} unless $quiet;
|
||||
|
||||
# Buildroot package name: lowercase
|
||||
sub fsname {
|
||||
my $name = shift;
|
||||
$name =~ s|_|-|g;
|
||||
return q{perl-} . lc $name;
|
||||
}
|
||||
|
||||
# Buildroot variable name: uppercase
|
||||
sub brname {
|
||||
my $name = shift;
|
||||
$name =~ s|-|_|g;
|
||||
return uc $name;
|
||||
}
|
||||
|
||||
while (my ($distname, $dist) = each %dist) {
|
||||
my $fsname = fsname( $distname );
|
||||
my $dirname = q{package/} . $fsname;
|
||||
my $cfgname = $dirname . q{/Config.in};
|
||||
my $mkname = $dirname . q{/} . $fsname . q{.mk};
|
||||
my $hashname = $dirname . q{/} . $fsname . q{.hash};
|
||||
my $brname = brname( $fsname );
|
||||
mkdir $dirname unless -d $dirname;
|
||||
if ($need_target{$distname} && ($force || !-f $cfgname)) {
|
||||
my $abstract = $dist->{abstract};
|
||||
my $homepage = $dist->{resources}->{homepage} || qq{https://metacpan.org/release/${distname}};
|
||||
say qq{write ${cfgname}} unless $quiet;
|
||||
open my $fh, q{>}, $cfgname;
|
||||
say {$fh} qq{config BR2_PACKAGE_${brname}};
|
||||
say {$fh} qq{\tbool "${fsname}"};
|
||||
say {$fh} qq{\tdepends on !BR2_STATIC_LIBS} if $need_dlopen{$distname};
|
||||
foreach my $dep (sort @{$deps_runtime{$distname}}) {
|
||||
my $brdep = brname( fsname( $dep ) );
|
||||
say {$fh} qq{\tselect BR2_PACKAGE_${brdep}};
|
||||
}
|
||||
say {$fh} qq{\thelp};
|
||||
say {$fh} qq{\t ${abstract}\n} if $abstract;
|
||||
say {$fh} qq{\t ${homepage}};
|
||||
if ($need_dlopen{$distname}) {
|
||||
say {$fh} qq{\ncomment "${fsname} needs a toolchain w/ dynamic library"};
|
||||
say {$fh} qq{\tdepends on BR2_STATIC_LIBS};
|
||||
}
|
||||
close $fh;
|
||||
}
|
||||
if ($force || !-f $mkname) {
|
||||
my $version = $dist->{version};
|
||||
my ($path) = $dist->{download_url} =~ m|^[^:/?#]+://[^/?#]*([^?#]*)|;
|
||||
# this URL contains only the scheme, auth and path parts (but no query and fragment parts)
|
||||
# the scheme is not used, because the job is done by the BR download infrastructure
|
||||
# the auth part is not used, because we use $(BR2_CPAN_MIRROR)
|
||||
my ($filename, $directories, $suffix) = fileparse( $path, q{tar.gz}, q{tgz} );
|
||||
$directories =~ s|/$||;
|
||||
my $dependencies = join q{ }, map( { q{host-} . fsname( $_ ); } sort @{$deps_build{$distname}} ),
|
||||
map( { fsname( $_ ); } sort @{$deps_runtime{$distname}} );
|
||||
my $host_dependencies = join q{ }, map { q{host-} . fsname( $_ ); } sort( @{$deps_build{$distname}},
|
||||
@{$deps_runtime{$distname}} );
|
||||
my $license = ref $dist->{license} eq 'ARRAY'
|
||||
? join q{ or }, @{$dist->{license}}
|
||||
: $dist->{license};
|
||||
# BR requires license name as in http://spdx.org/licenses/
|
||||
$license =~ s|apache_2_0|Apache-2.0|;
|
||||
$license =~ s|artistic_2|Artistic-2.0|;
|
||||
$license =~ s|mit|MIT|;
|
||||
$license =~ s|openssl|OpenSSL|;
|
||||
$license =~ s|perl_5|Artistic or GPL-1.0+|;
|
||||
my $license_files = join q{ }, @{$license_files{$distname}};
|
||||
say qq{write ${mkname}} unless $quiet;
|
||||
open my $fh, q{>}, $mkname;
|
||||
say {$fh} qq{################################################################################};
|
||||
say {$fh} qq{#};
|
||||
say {$fh} qq{# ${fsname}};
|
||||
say {$fh} qq{#};
|
||||
say {$fh} qq{################################################################################};
|
||||
say {$fh} qq{};
|
||||
say {$fh} qq{${brname}_VERSION = ${version}};
|
||||
say {$fh} qq{${brname}_SOURCE = ${distname}-\$(${brname}_VERSION).${suffix}};
|
||||
say {$fh} qq{${brname}_SITE = \$(BR2_CPAN_MIRROR)${directories}};
|
||||
say {$fh} qq{${brname}_DEPENDENCIES = ${dependencies}} if $need_target{$distname} && $dependencies;
|
||||
say {$fh} qq{HOST_${brname}_DEPENDENCIES = ${host_dependencies}} if $need_host{$distname} && $host_dependencies;
|
||||
say {$fh} qq{${brname}_LICENSE = ${license}} if $license && $license ne q{unknown};
|
||||
say {$fh} qq{${brname}_LICENSE_FILES = ${license_files}} if $license_files;
|
||||
say {$fh} qq{};
|
||||
foreach (sort @{$deps_optional{$distname}}) {
|
||||
next if grep { $_ eq $distname; } @{$deps_runtime{$_}}; # avoid cyclic dependencies
|
||||
my $opt_brname = brname( $_ );
|
||||
my $opt_fsname = fsname( $_ );
|
||||
say {$fh} qq{ifeq (\$(BR2_PACKAGE_PERL_${opt_brname}),y)};
|
||||
say {$fh} qq{${brname}_DEPENDENCIES += ${opt_fsname}};
|
||||
say {$fh} qq{endif};
|
||||
say {$fh} qq{};
|
||||
}
|
||||
say {$fh} qq{\$(eval \$(perl-package))} if $need_target{$distname};
|
||||
say {$fh} qq{\$(eval \$(host-perl-package))} if $need_host{$distname};
|
||||
close $fh;
|
||||
}
|
||||
if ($force || !-f $hashname) {
|
||||
my ($checksum, $filename) = get_checksum($dist->{download_url});
|
||||
my $md5 = $checksum->{md5};
|
||||
my $sha256 = $checksum->{sha256};
|
||||
say qq{write ${hashname}} unless $quiet;
|
||||
open my $fh, q{>}, $hashname;
|
||||
say {$fh} qq{# retrieved by scancpan from ${mirror}/};
|
||||
say {$fh} qq{md5 ${md5} ${filename}};
|
||||
say {$fh} qq{sha256 ${sha256} ${filename}};
|
||||
close $fh;
|
||||
}
|
||||
}
|
||||
|
||||
my %pkg;
|
||||
my $cfgname = q{package/Config.in};
|
||||
if (-f $cfgname) {
|
||||
open my $fh, q{<}, $cfgname;
|
||||
while (<$fh>) {
|
||||
chomp;
|
||||
$pkg{$_} = 1 if m|package/perl-|;
|
||||
}
|
||||
close $fh;
|
||||
}
|
||||
|
||||
foreach my $distname (keys %need_target) {
|
||||
my $fsname = fsname( $distname );
|
||||
$pkg{qq{\tsource "package/${fsname}/Config.in"}} = 1;
|
||||
}
|
||||
|
||||
say qq{${cfgname} must contain the following lines:};
|
||||
say join qq{\n}, sort keys %pkg;
|
||||
|
||||
__END__
|
||||
|
||||
=head1 NAME
|
||||
|
||||
utils/scancpan Try-Tiny Moo
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
supports/scripts/scancpan [options] [distname ...]
|
||||
|
||||
Options:
|
||||
-help
|
||||
-man
|
||||
-quiet
|
||||
-force
|
||||
-target/-notarget
|
||||
-host/-nohost
|
||||
-recommend
|
||||
-test
|
||||
|
||||
=head1 OPTIONS
|
||||
|
||||
=over 8
|
||||
|
||||
=item B<-help>
|
||||
|
||||
Prints a brief help message and exits.
|
||||
|
||||
=item B<-man>
|
||||
|
||||
Prints the manual page and exits.
|
||||
|
||||
=item B<-quiet>
|
||||
|
||||
Executes without output
|
||||
|
||||
=item B<-force>
|
||||
|
||||
Forces the overwriting of existing files.
|
||||
|
||||
=item B<-target/-notarget>
|
||||
|
||||
Switches package generation for the target variant (the default is C<-target>).
|
||||
|
||||
=item B<-host/-nohost>
|
||||
|
||||
Switches package generation for the host variant (the default is C<-nohost>).
|
||||
|
||||
=item B<-recommend>
|
||||
|
||||
Adds I<recommended> dependencies.
|
||||
|
||||
=item B<-test>
|
||||
|
||||
Adds dependencies for test.
|
||||
|
||||
=back
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
This script creates templates of the Buildroot package files for all the
|
||||
Perl/CPAN distributions required by the specified distnames. The
|
||||
dependencies and metadata are fetched from https://metacpan.org/.
|
||||
|
||||
After running this script, it is necessary to check the generated files.
|
||||
You have to manually add the license files (PERL_FOO_LICENSE_FILES variable).
|
||||
For distributions that link against a target library, you have to add the
|
||||
buildroot package name for that library to the DEPENDENCIES variable.
|
||||
|
||||
See the Buildroot documentation for details on the usage of the Perl
|
||||
infrastructure.
|
||||
|
||||
The major version of the host perl must be aligned on the target one,
|
||||
in order to work with the right CoreList data.
|
||||
|
||||
=head1 LICENSE
|
||||
|
||||
Copyright (C) 2013-2017 by Francois Perrad <francois.perrad@gadz.org>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
This script is a part of Buildroot.
|
||||
|
||||
This script requires the module C<MetaCPAN::API::Tiny> (version 1.131730)
|
||||
which was included at the beginning of this file by the tool C<fatpack>.
|
||||
|
||||
See L<http://search.cpan.org/~nperez/MetaCPAN-API-Tiny-1.131730/>.
|
||||
|
||||
See L<http://search.cpan.org/search?query=App-FatPacker&mode=dist>.
|
||||
|
||||
These both libraries are free software and may be distributed under the same
|
||||
terms as perl itself.
|
||||
|
||||
And perl may be distributed under the terms of Artistic v1 or GPL v1 license.
|
||||
|
||||
=cut
|
||||
706
bsp/buildroot/utils/scanpypi
Executable file
706
bsp/buildroot/utils/scanpypi
Executable file
@@ -0,0 +1,706 @@
|
||||
#!/usr/bin/env python2
|
||||
"""
|
||||
|
||||
Utility for building Buildroot packages for existing PyPI packages
|
||||
|
||||
Any package built by scanpypi should be manually checked for
|
||||
errors.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import json
|
||||
import urllib2
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import StringIO
|
||||
import tarfile
|
||||
import zipfile
|
||||
import errno
|
||||
import hashlib
|
||||
import re
|
||||
import textwrap
|
||||
import tempfile
|
||||
import imp
|
||||
from functools import wraps
|
||||
|
||||
BUF_SIZE = 65536
|
||||
|
||||
try:
|
||||
import spdx_lookup as liclookup
|
||||
except ImportError:
|
||||
# spdx_lookup is not installed
|
||||
print('spdx_lookup module is not installed. This can lead to an '
|
||||
'inaccurate licence detection. Please install it via\n'
|
||||
'pip install spdx_lookup')
|
||||
liclookup = None
|
||||
|
||||
|
||||
def setup_decorator(func, method):
|
||||
"""
|
||||
Decorator for distutils.core.setup and setuptools.setup.
|
||||
Puts the arguments with which setup is called as a dict
|
||||
Add key 'method' which should be either 'setuptools' or 'distutils'.
|
||||
|
||||
Keyword arguments:
|
||||
func -- either setuptools.setup or distutils.core.setup
|
||||
method -- either 'setuptools' or 'distutils'
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def closure(*args, **kwargs):
|
||||
# Any python packages calls its setup function to be installed.
|
||||
# Argument 'name' of this setup function is the package's name
|
||||
BuildrootPackage.setup_args[kwargs['name']] = kwargs
|
||||
BuildrootPackage.setup_args[kwargs['name']]['method'] = method
|
||||
return closure
|
||||
|
||||
# monkey patch
|
||||
import setuptools # noqa E402
|
||||
setuptools.setup = setup_decorator(setuptools.setup, 'setuptools')
|
||||
import distutils # noqa E402
|
||||
distutils.core.setup = setup_decorator(setuptools.setup, 'distutils')
|
||||
|
||||
|
||||
def find_file_upper_case(filenames, path='./'):
|
||||
"""
|
||||
List generator:
|
||||
Recursively find files that matches one of the specified filenames.
|
||||
Returns a relative path starting with path argument.
|
||||
|
||||
Keyword arguments:
|
||||
filenames -- List of filenames to be found
|
||||
path -- Path to the directory to search
|
||||
"""
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
if file.upper() in filenames:
|
||||
yield (os.path.join(root, file))
|
||||
|
||||
|
||||
def pkg_buildroot_name(pkg_name):
|
||||
"""
|
||||
Returns the Buildroot package name for the PyPI package pkg_name.
|
||||
Remove all non alphanumeric characters except -
|
||||
Also lowers the name and adds 'python-' suffix
|
||||
|
||||
Keyword arguments:
|
||||
pkg_name -- String to rename
|
||||
"""
|
||||
name = re.sub('[^\w-]', '', pkg_name.lower())
|
||||
prefix = 'python-'
|
||||
pattern = re.compile('^(?!' + prefix + ')(.+?)$')
|
||||
name = pattern.sub(r'python-\1', name)
|
||||
return name
|
||||
|
||||
|
||||
class DownloadFailed(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BuildrootPackage():
|
||||
"""This class's methods are not meant to be used individually please
|
||||
use them in the correct order:
|
||||
|
||||
__init__
|
||||
|
||||
download_package
|
||||
|
||||
extract_package
|
||||
|
||||
load_module
|
||||
|
||||
get_requirements
|
||||
|
||||
create_package_mk
|
||||
|
||||
create_hash_file
|
||||
|
||||
create_config_in
|
||||
|
||||
"""
|
||||
setup_args = {}
|
||||
|
||||
def __init__(self, real_name, pkg_folder):
|
||||
self.real_name = real_name
|
||||
self.buildroot_name = pkg_buildroot_name(self.real_name)
|
||||
self.pkg_dir = os.path.join(pkg_folder, self.buildroot_name)
|
||||
self.mk_name = self.buildroot_name.upper().replace('-', '_')
|
||||
self.as_string = None
|
||||
self.md5_sum = None
|
||||
self.metadata = None
|
||||
self.metadata_name = None
|
||||
self.metadata_url = None
|
||||
self.pkg_req = None
|
||||
self.setup_metadata = None
|
||||
self.tmp_extract = None
|
||||
self.used_url = None
|
||||
self.filename = None
|
||||
self.url = None
|
||||
self.version = None
|
||||
self.license_files = []
|
||||
|
||||
def fetch_package_info(self):
|
||||
"""
|
||||
Fetch a package's metadata from the python package index
|
||||
"""
|
||||
self.metadata_url = 'https://pypi.org/pypi/{pkg}/json'.format(
|
||||
pkg=self.real_name)
|
||||
try:
|
||||
pkg_json = urllib2.urlopen(self.metadata_url).read().decode()
|
||||
except urllib2.HTTPError as error:
|
||||
print('ERROR:', error.getcode(), error.msg, file=sys.stderr)
|
||||
print('ERROR: Could not find package {pkg}.\n'
|
||||
'Check syntax inside the python package index:\n'
|
||||
'https://pypi.python.org/pypi/ '
|
||||
.format(pkg=self.real_name))
|
||||
raise
|
||||
except urllib2.URLError:
|
||||
print('ERROR: Could not find package {pkg}.\n'
|
||||
'Check syntax inside the python package index:\n'
|
||||
'https://pypi.python.org/pypi/ '
|
||||
.format(pkg=self.real_name))
|
||||
raise
|
||||
self.metadata = json.loads(pkg_json)
|
||||
self.version = self.metadata['info']['version']
|
||||
self.metadata_name = self.metadata['info']['name']
|
||||
|
||||
def download_package(self):
|
||||
"""
|
||||
Download a package using metadata from pypi
|
||||
"""
|
||||
try:
|
||||
self.metadata['urls'][0]['filename']
|
||||
except IndexError:
|
||||
print(
|
||||
'Non-conventional package, ',
|
||||
'please check carefully after creation')
|
||||
self.metadata['urls'] = [{
|
||||
'packagetype': 'sdist',
|
||||
'url': self.metadata['info']['download_url'],
|
||||
'digests': None}]
|
||||
# In this case, we can't get the name of the downloaded file
|
||||
# from the pypi api, so we need to find it, this should work
|
||||
urlpath = urllib2.urlparse.urlparse(
|
||||
self.metadata['info']['download_url']).path
|
||||
# urlparse().path give something like
|
||||
# /path/to/file-version.tar.gz
|
||||
# We use basename to remove /path/to
|
||||
self.metadata['urls'][0]['filename'] = os.path.basename(urlpath)
|
||||
for download_url in self.metadata['urls']:
|
||||
if 'bdist' in download_url['packagetype']:
|
||||
continue
|
||||
try:
|
||||
print('Downloading package {pkg} from {url}...'.format(
|
||||
pkg=self.real_name, url=download_url['url']))
|
||||
download = urllib2.urlopen(download_url['url'])
|
||||
except urllib2.HTTPError as http_error:
|
||||
download = http_error
|
||||
else:
|
||||
self.used_url = download_url
|
||||
self.as_string = download.read()
|
||||
if not download_url['digests']['md5']:
|
||||
break
|
||||
self.md5_sum = hashlib.md5(self.as_string).hexdigest()
|
||||
if self.md5_sum == download_url['digests']['md5']:
|
||||
break
|
||||
else:
|
||||
if download.__class__ == urllib2.HTTPError:
|
||||
raise download
|
||||
raise DownloadFailed('Failed to download package {pkg}'
|
||||
.format(pkg=self.real_name))
|
||||
self.filename = self.used_url['filename']
|
||||
self.url = self.used_url['url']
|
||||
|
||||
def extract_package(self, tmp_path):
|
||||
"""
|
||||
Extract the package contents into a directrory
|
||||
|
||||
Keyword arguments:
|
||||
tmp_path -- directory where you want the package to be extracted
|
||||
"""
|
||||
as_file = StringIO.StringIO(self.as_string)
|
||||
if self.filename[-3:] == 'zip':
|
||||
with zipfile.ZipFile(as_file) as as_zipfile:
|
||||
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
|
||||
try:
|
||||
os.makedirs(tmp_pkg)
|
||||
except OSError as exception:
|
||||
if exception.errno != errno.EEXIST:
|
||||
print("ERROR: ", exception.strerror, file=sys.stderr)
|
||||
return
|
||||
print('WARNING:', exception.strerror, file=sys.stderr)
|
||||
print('Removing {pkg}...'.format(pkg=tmp_pkg))
|
||||
shutil.rmtree(tmp_pkg)
|
||||
os.makedirs(tmp_pkg)
|
||||
as_zipfile.extractall(tmp_pkg)
|
||||
pkg_filename = self.filename.split(".zip")[0]
|
||||
else:
|
||||
with tarfile.open(fileobj=as_file) as as_tarfile:
|
||||
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
|
||||
try:
|
||||
os.makedirs(tmp_pkg)
|
||||
except OSError as exception:
|
||||
if exception.errno != errno.EEXIST:
|
||||
print("ERROR: ", exception.strerror, file=sys.stderr)
|
||||
return
|
||||
print('WARNING:', exception.strerror, file=sys.stderr)
|
||||
print('Removing {pkg}...'.format(pkg=tmp_pkg))
|
||||
shutil.rmtree(tmp_pkg)
|
||||
os.makedirs(tmp_pkg)
|
||||
as_tarfile.extractall(tmp_pkg)
|
||||
pkg_filename = self.filename.split(".tar")[0]
|
||||
|
||||
tmp_extract = '{folder}/{name}'
|
||||
self.tmp_extract = tmp_extract.format(
|
||||
folder=tmp_pkg,
|
||||
name=pkg_filename)
|
||||
|
||||
def load_setup(self):
|
||||
"""
|
||||
Loads the corresponding setup and store its metadata
|
||||
"""
|
||||
current_dir = os.getcwd()
|
||||
os.chdir(self.tmp_extract)
|
||||
sys.path.append(self.tmp_extract)
|
||||
s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract])
|
||||
setup = imp.load_module('setup', s_file, s_path, s_desc)
|
||||
try:
|
||||
self.setup_metadata = self.setup_args[self.metadata_name]
|
||||
except KeyError:
|
||||
# This means setup was not called which most likely mean that it is
|
||||
# called through the if __name__ == '__main__' directive.
|
||||
# In this case, we can only pray that it is called through a
|
||||
# function called main() in setup.py.
|
||||
setup.main() # Will raise AttributeError if not found
|
||||
self.setup_metadata = self.setup_args[self.metadata_name]
|
||||
# Here we must remove the module the hard way.
|
||||
# We must do this because of a very specific case: if a package calls
|
||||
# setup from the __main__ but does not come with a 'main()' function,
|
||||
# for some reason setup.main() will successfully call the main
|
||||
# function of a previous package...
|
||||
sys.modules.pop('setup', None)
|
||||
del setup
|
||||
os.chdir(current_dir)
|
||||
sys.path.remove(self.tmp_extract)
|
||||
|
||||
def get_requirements(self, pkg_folder):
|
||||
"""
|
||||
Retrieve dependencies from the metadata found in the setup.py script of
|
||||
a pypi package.
|
||||
|
||||
Keyword Arguments:
|
||||
pkg_folder -- location of the already created packages
|
||||
"""
|
||||
if 'install_requires' not in self.setup_metadata:
|
||||
self.pkg_req = None
|
||||
return set()
|
||||
self.pkg_req = self.setup_metadata['install_requires']
|
||||
self.pkg_req = [re.sub('([-.\w]+).*', r'\1', req)
|
||||
for req in self.pkg_req]
|
||||
|
||||
# get rid of commented lines and also strip the package strings
|
||||
self.pkg_req = [item.strip() for item in self.pkg_req
|
||||
if len(item) > 0 and item[0] != '#']
|
||||
|
||||
req_not_found = self.pkg_req
|
||||
self.pkg_req = map(pkg_buildroot_name, self.pkg_req)
|
||||
pkg_tuples = zip(req_not_found, self.pkg_req)
|
||||
# pkg_tuples is a list of tuples that looks like
|
||||
# ('werkzeug','python-werkzeug') because I need both when checking if
|
||||
# dependencies already exist or are already in the download list
|
||||
req_not_found = set(
|
||||
pkg[0] for pkg in pkg_tuples
|
||||
if not os.path.isdir(pkg[1])
|
||||
)
|
||||
return req_not_found
|
||||
|
||||
def __create_mk_header(self):
|
||||
"""
|
||||
Create the header of the <package_name>.mk file
|
||||
"""
|
||||
header = ['#' * 80 + '\n']
|
||||
header.append('#\n')
|
||||
header.append('# {name}\n'.format(name=self.buildroot_name))
|
||||
header.append('#\n')
|
||||
header.append('#' * 80 + '\n')
|
||||
header.append('\n')
|
||||
return header
|
||||
|
||||
def __create_mk_download_info(self):
|
||||
"""
|
||||
Create the lines refering to the download information of the
|
||||
<package_name>.mk file
|
||||
"""
|
||||
lines = []
|
||||
version_line = '{name}_VERSION = {version}\n'.format(
|
||||
name=self.mk_name,
|
||||
version=self.version)
|
||||
lines.append(version_line)
|
||||
|
||||
targz = self.filename.replace(
|
||||
self.version,
|
||||
'$({name}_VERSION)'.format(name=self.mk_name))
|
||||
targz_line = '{name}_SOURCE = {filename}\n'.format(
|
||||
name=self.mk_name,
|
||||
filename=targz)
|
||||
lines.append(targz_line)
|
||||
|
||||
if self.filename not in self.url:
|
||||
# Sometimes the filename is in the url, sometimes it's not
|
||||
site_url = self.url
|
||||
else:
|
||||
site_url = self.url[:self.url.find(self.filename)]
|
||||
site_line = '{name}_SITE = {url}'.format(name=self.mk_name,
|
||||
url=site_url)
|
||||
site_line = site_line.rstrip('/') + '\n'
|
||||
lines.append(site_line)
|
||||
return lines
|
||||
|
||||
def __create_mk_setup(self):
|
||||
"""
|
||||
Create the line refering to the setup method of the package of the
|
||||
<package_name>.mk file
|
||||
|
||||
There are two things you can use to make an installer
|
||||
for a python package: distutils or setuptools
|
||||
distutils comes with python but does not support dependencies.
|
||||
distutils is mostly still there for backward support.
|
||||
setuptools is what smart people use,
|
||||
but it is not shipped with python :(
|
||||
"""
|
||||
lines = []
|
||||
setup_type_line = '{name}_SETUP_TYPE = {method}\n'.format(
|
||||
name=self.mk_name,
|
||||
method=self.setup_metadata['method'])
|
||||
lines.append(setup_type_line)
|
||||
return lines
|
||||
|
||||
def __get_license_names(self, license_files):
|
||||
"""
|
||||
Try to determine the related license name.
|
||||
|
||||
There are two possibilities. Either the script tries to
|
||||
get license name from package's metadata or, if spdx_lookup
|
||||
package is available, the script compares license files with
|
||||
SPDX database.
|
||||
"""
|
||||
license_line = ''
|
||||
if liclookup is None:
|
||||
license_dict = {
|
||||
'Apache Software License': 'Apache-2.0',
|
||||
'BSD License': 'FIXME: please specify the exact BSD version',
|
||||
'European Union Public Licence 1.0': 'EUPL-1.0',
|
||||
'European Union Public Licence 1.1': 'EUPL-1.1',
|
||||
"GNU General Public License": "GPL",
|
||||
"GNU General Public License v2": "GPL-2.0",
|
||||
"GNU General Public License v2 or later": "GPL-2.0+",
|
||||
"GNU General Public License v3": "GPL-3.0",
|
||||
"GNU General Public License v3 or later": "GPL-3.0+",
|
||||
"GNU Lesser General Public License v2": "LGPL-2.1",
|
||||
"GNU Lesser General Public License v2 or later": "LGPL-2.1+",
|
||||
"GNU Lesser General Public License v3": "LGPL-3.0",
|
||||
"GNU Lesser General Public License v3 or later": "LGPL-3.0+",
|
||||
"GNU Library or Lesser General Public License": "LGPL-2.0",
|
||||
"ISC License": "ISC",
|
||||
"MIT License": "MIT",
|
||||
"Mozilla Public License 1.0": "MPL-1.0",
|
||||
"Mozilla Public License 1.1": "MPL-1.1",
|
||||
"Mozilla Public License 2.0": "MPL-2.0",
|
||||
"Zope Public License": "ZPL"
|
||||
}
|
||||
regexp = re.compile('^License :* *.* *:+ (.*)( \(.*\))?$')
|
||||
classifiers_licenses = [regexp.sub(r"\1", lic)
|
||||
for lic in self.metadata['info']['classifiers']
|
||||
if regexp.match(lic)]
|
||||
licenses = map(lambda x: license_dict[x] if x in license_dict else x,
|
||||
classifiers_licenses)
|
||||
if not len(licenses):
|
||||
print('WARNING: License has been set to "{license}". It is most'
|
||||
' likely wrong, please change it if need be'.format(
|
||||
license=', '.join(licenses)))
|
||||
licenses = [self.metadata['info']['license']]
|
||||
license_line = '{name}_LICENSE = {license}\n'.format(
|
||||
name=self.mk_name,
|
||||
license=', '.join(licenses))
|
||||
else:
|
||||
license_names = []
|
||||
for license_file in license_files:
|
||||
with open(license_file) as lic_file:
|
||||
match = liclookup.match(lic_file.read())
|
||||
if match.confidence >= 90.0:
|
||||
license_names.append(match.license.id)
|
||||
|
||||
if len(license_names) > 0:
|
||||
license_line = ('{name}_LICENSE ='
|
||||
' {names}\n'.format(
|
||||
name=self.mk_name,
|
||||
names=', '.join(license_names)))
|
||||
|
||||
return license_line
|
||||
|
||||
def __create_mk_license(self):
|
||||
"""
|
||||
Create the lines referring to the package's license informations of the
|
||||
<package_name>.mk file
|
||||
|
||||
The license's files are found by searching the package (case insensitive)
|
||||
for files named license, license.txt etc. If more than one license file
|
||||
is found, the user is asked to select which ones he wants to use.
|
||||
"""
|
||||
lines = []
|
||||
|
||||
filenames = ['LICENCE', 'LICENSE', 'LICENSE.RST', 'LICENSE.TXT',
|
||||
'COPYING', 'COPYING.TXT']
|
||||
self.license_files = list(find_file_upper_case(filenames, self.tmp_extract))
|
||||
|
||||
lines.append(self.__get_license_names(self.license_files))
|
||||
|
||||
license_files = [license.replace(self.tmp_extract, '')[1:]
|
||||
for license in self.license_files]
|
||||
if len(license_files) > 0:
|
||||
if len(license_files) > 1:
|
||||
print('More than one file found for license:',
|
||||
', '.join(license_files))
|
||||
license_files = [filename
|
||||
for index, filename in enumerate(license_files)]
|
||||
license_file_line = ('{name}_LICENSE_FILES ='
|
||||
' {files}\n'.format(
|
||||
name=self.mk_name,
|
||||
files=' '.join(license_files)))
|
||||
lines.append(license_file_line)
|
||||
else:
|
||||
print('WARNING: No license file found,'
|
||||
' please specify it manually afterwards')
|
||||
license_file_line = '# No license file found\n'
|
||||
|
||||
return lines
|
||||
|
||||
def __create_mk_requirements(self):
|
||||
"""
|
||||
Create the lines referring to the dependencies of the of the
|
||||
<package_name>.mk file
|
||||
|
||||
Keyword Arguments:
|
||||
pkg_name -- name of the package
|
||||
pkg_req -- dependencies of the package
|
||||
"""
|
||||
lines = []
|
||||
dependencies_line = ('{name}_DEPENDENCIES ='
|
||||
' {reqs}\n'.format(
|
||||
name=self.mk_name,
|
||||
reqs=' '.join(self.pkg_req)))
|
||||
lines.append(dependencies_line)
|
||||
return lines
|
||||
|
||||
def create_package_mk(self):
|
||||
"""
|
||||
Create the lines corresponding to the <package_name>.mk file
|
||||
"""
|
||||
pkg_mk = '{name}.mk'.format(name=self.buildroot_name)
|
||||
path_to_mk = os.path.join(self.pkg_dir, pkg_mk)
|
||||
print('Creating {file}...'.format(file=path_to_mk))
|
||||
lines = self.__create_mk_header()
|
||||
lines += self.__create_mk_download_info()
|
||||
lines += self.__create_mk_setup()
|
||||
lines += self.__create_mk_license()
|
||||
|
||||
lines.append('\n')
|
||||
lines.append('$(eval $(python-package))')
|
||||
lines.append('\n')
|
||||
with open(path_to_mk, 'w') as mk_file:
|
||||
mk_file.writelines(lines)
|
||||
|
||||
def create_hash_file(self):
|
||||
"""
|
||||
Create the lines corresponding to the <package_name>.hash files
|
||||
"""
|
||||
pkg_hash = '{name}.hash'.format(name=self.buildroot_name)
|
||||
path_to_hash = os.path.join(self.pkg_dir, pkg_hash)
|
||||
print('Creating {filename}...'.format(filename=path_to_hash))
|
||||
lines = []
|
||||
if self.used_url['digests']['md5'] and self.used_url['digests']['sha256']:
|
||||
hash_header = '# md5, sha256 from {url}\n'.format(
|
||||
url=self.metadata_url)
|
||||
lines.append(hash_header)
|
||||
hash_line = '{method}\t{digest} {filename}\n'.format(
|
||||
method='md5',
|
||||
digest=self.used_url['digests']['md5'],
|
||||
filename=self.filename)
|
||||
lines.append(hash_line)
|
||||
hash_line = '{method}\t{digest} {filename}\n'.format(
|
||||
method='sha256',
|
||||
digest=self.used_url['digests']['sha256'],
|
||||
filename=self.filename)
|
||||
lines.append(hash_line)
|
||||
|
||||
if self.license_files:
|
||||
lines.append('# Locally computed sha256 checksums\n')
|
||||
for license_file in self.license_files:
|
||||
sha256 = hashlib.sha256()
|
||||
with open(license_file, 'rb') as lic_f:
|
||||
while True:
|
||||
data = lic_f.read(BUF_SIZE)
|
||||
if not data:
|
||||
break
|
||||
sha256.update(data)
|
||||
hash_line = '{method}\t{digest} {filename}\n'.format(
|
||||
method='sha256',
|
||||
digest=sha256.hexdigest(),
|
||||
filename=os.path.basename(license_file))
|
||||
lines.append(hash_line)
|
||||
|
||||
with open(path_to_hash, 'w') as hash_file:
|
||||
hash_file.writelines(lines)
|
||||
|
||||
def create_config_in(self):
|
||||
"""
|
||||
Creates the Config.in file of a package
|
||||
"""
|
||||
path_to_config = os.path.join(self.pkg_dir, 'Config.in')
|
||||
print('Creating {file}...'.format(file=path_to_config))
|
||||
lines = []
|
||||
config_line = 'config BR2_PACKAGE_{name}\n'.format(
|
||||
name=self.mk_name)
|
||||
lines.append(config_line)
|
||||
|
||||
bool_line = '\tbool "{name}"\n'.format(name=self.buildroot_name)
|
||||
lines.append(bool_line)
|
||||
if self.pkg_req:
|
||||
for dep in self.pkg_req:
|
||||
dep_line = '\tselect BR2_PACKAGE_{req} # runtime\n'.format(
|
||||
req=dep.upper().replace('-', '_'))
|
||||
lines.append(dep_line)
|
||||
|
||||
lines.append('\thelp\n')
|
||||
|
||||
help_lines = textwrap.wrap(self.metadata['info']['summary'],
|
||||
initial_indent='\t ',
|
||||
subsequent_indent='\t ')
|
||||
|
||||
# make sure a help text is terminated with a full stop
|
||||
if help_lines[-1][-1] != '.':
|
||||
help_lines[-1] += '.'
|
||||
|
||||
# \t + two spaces is 3 char long
|
||||
help_lines.append('')
|
||||
help_lines.append('\t ' + self.metadata['info']['home_page'])
|
||||
help_lines = map(lambda x: x + '\n', help_lines)
|
||||
lines += help_lines
|
||||
|
||||
with open(path_to_config, 'w') as config_file:
|
||||
config_file.writelines(lines)
|
||||
|
||||
|
||||
def main():
|
||||
# Building the parser
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Creates buildroot packages from the metadata of "
|
||||
"an existing PyPI packages and include it "
|
||||
"in menuconfig")
|
||||
parser.add_argument("packages",
|
||||
help="list of packages to be created",
|
||||
nargs='+')
|
||||
parser.add_argument("-o", "--output",
|
||||
help="""
|
||||
Output directory for packages.
|
||||
Default is ./package
|
||||
""",
|
||||
default='./package')
|
||||
|
||||
args = parser.parse_args()
|
||||
packages = list(set(args.packages))
|
||||
|
||||
# tmp_path is where we'll extract the files later
|
||||
tmp_prefix = 'scanpypi-'
|
||||
pkg_folder = args.output
|
||||
tmp_path = tempfile.mkdtemp(prefix=tmp_prefix)
|
||||
try:
|
||||
for real_pkg_name in packages:
|
||||
package = BuildrootPackage(real_pkg_name, pkg_folder)
|
||||
print('buildroot package name for {}:'.format(package.real_name),
|
||||
package.buildroot_name)
|
||||
# First we download the package
|
||||
# Most of the info we need can only be found inside the package
|
||||
print('Package:', package.buildroot_name)
|
||||
print('Fetching package', package.real_name)
|
||||
try:
|
||||
package.fetch_package_info()
|
||||
except (urllib2.URLError, urllib2.HTTPError):
|
||||
continue
|
||||
if package.metadata_name.lower() == 'setuptools':
|
||||
# setuptools imports itself, that does not work very well
|
||||
# with the monkey path at the begining
|
||||
print('Error: setuptools cannot be built using scanPyPI')
|
||||
continue
|
||||
|
||||
try:
|
||||
package.download_package()
|
||||
except urllib2.HTTPError as error:
|
||||
print('Error: {code} {reason}'.format(code=error.code,
|
||||
reason=error.reason))
|
||||
print('Error downloading package :', package.buildroot_name)
|
||||
print()
|
||||
continue
|
||||
|
||||
# extract the tarball
|
||||
try:
|
||||
package.extract_package(tmp_path)
|
||||
except (tarfile.ReadError, zipfile.BadZipfile):
|
||||
print('Error extracting package {}'.format(package.real_name))
|
||||
print()
|
||||
continue
|
||||
|
||||
# Loading the package install info from the package
|
||||
try:
|
||||
package.load_setup()
|
||||
except ImportError as err:
|
||||
if 'buildutils' in err.message:
|
||||
print('This package needs buildutils')
|
||||
else:
|
||||
raise
|
||||
continue
|
||||
except AttributeError as error:
|
||||
print('Error: Could not install package {pkg}: {error}'.format(
|
||||
pkg=package.real_name, error=error))
|
||||
continue
|
||||
|
||||
# Package requirement are an argument of the setup function
|
||||
req_not_found = package.get_requirements(pkg_folder)
|
||||
req_not_found = req_not_found.difference(packages)
|
||||
|
||||
packages += req_not_found
|
||||
if req_not_found:
|
||||
print('Added packages \'{pkgs}\' as dependencies of {pkg}'
|
||||
.format(pkgs=", ".join(req_not_found),
|
||||
pkg=package.buildroot_name))
|
||||
print('Checking if package {name} already exists...'.format(
|
||||
name=package.pkg_dir))
|
||||
try:
|
||||
os.makedirs(package.pkg_dir)
|
||||
except OSError as exception:
|
||||
if exception.errno != errno.EEXIST:
|
||||
print("ERROR: ", exception.message, file=sys.stderr)
|
||||
continue
|
||||
print('Error: Package {name} already exists'
|
||||
.format(name=package.pkg_dir))
|
||||
del_pkg = raw_input(
|
||||
'Do you want to delete existing package ? [y/N]')
|
||||
if del_pkg.lower() == 'y':
|
||||
shutil.rmtree(package.pkg_dir)
|
||||
os.makedirs(package.pkg_dir)
|
||||
else:
|
||||
continue
|
||||
package.create_package_mk()
|
||||
|
||||
package.create_hash_file()
|
||||
|
||||
package.create_config_in()
|
||||
print()
|
||||
# printing an empty line for visual confort
|
||||
finally:
|
||||
shutil.rmtree(tmp_path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
127
bsp/buildroot/utils/size-stats-compare
Executable file
127
bsp/buildroot/utils/size-stats-compare
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (C) 2016 Thomas De Schampheleire <thomas.de.schampheleire@gmail.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# TODO (improvements)
|
||||
# - support K,M,G size suffixes for threshold
|
||||
# - output CSV file in addition to stdout reporting
|
||||
|
||||
import csv
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
def read_file_size_csv(inputf, detail=None):
|
||||
"""Extract package or file sizes from CSV file into size dictionary"""
|
||||
sizes = {}
|
||||
reader = csv.reader(inputf)
|
||||
|
||||
header = next(reader)
|
||||
if (header[0] != 'File name' or header[1] != 'Package name' or
|
||||
header[2] != 'File size' or header[3] != 'Package size'):
|
||||
print(("Input file %s does not contain the expected header. Are you "
|
||||
"sure this file corresponds to the file-size-stats.csv "
|
||||
"file created by 'make graph-size'?") % inputf.name)
|
||||
sys.exit(1)
|
||||
|
||||
for row in reader:
|
||||
if detail:
|
||||
sizes[row[0]] = int(row[2])
|
||||
else:
|
||||
sizes[row[1]] = int(row[3])
|
||||
|
||||
return sizes
|
||||
|
||||
def compare_sizes(old, new):
|
||||
"""Return delta/added/removed dictionaries based on two input size
|
||||
dictionaries"""
|
||||
delta = {}
|
||||
oldkeys = set(old.keys())
|
||||
newkeys = set(new.keys())
|
||||
|
||||
# packages/files in both
|
||||
for entry in newkeys.intersection(oldkeys):
|
||||
delta[entry] = ('', new[entry] - old[entry])
|
||||
# packages/files only in new
|
||||
for entry in newkeys.difference(oldkeys):
|
||||
delta[entry] = ('added', new[entry])
|
||||
# packages/files only in old
|
||||
for entry in oldkeys.difference(newkeys):
|
||||
delta[entry] = ('removed', -old[entry])
|
||||
|
||||
return delta
|
||||
|
||||
def print_results(result, threshold):
|
||||
"""Print the given result dictionary sorted by size, ignoring any entries
|
||||
below or equal to threshold"""
|
||||
|
||||
from six import iteritems
|
||||
list_result = list(iteritems(result))
|
||||
# result is a dictionary: name -> (flag, size difference)
|
||||
# list_result is a list of tuples: (name, (flag, size difference))
|
||||
|
||||
for entry in sorted(list_result, key=lambda entry: entry[1][1]):
|
||||
if threshold is not None and abs(entry[1][1]) <= threshold:
|
||||
continue
|
||||
print('%12s %7s %s' % (entry[1][1], entry[1][0], entry[0]))
|
||||
|
||||
|
||||
# main #########################################################################
|
||||
|
||||
description = """
|
||||
Compare rootfs size between Buildroot compilations, for example after changing
|
||||
configuration options or after switching to another Buildroot release.
|
||||
|
||||
This script compares the file-size-stats.csv file generated by 'make graph-size'
|
||||
with the corresponding file from another Buildroot compilation.
|
||||
The size differences can be reported per package or per file.
|
||||
Size differences smaller or equal than a given threshold can be ignored.
|
||||
"""
|
||||
|
||||
parser = argparse.ArgumentParser(description=description,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument('-d', '--detail', action='store_true',
|
||||
help='''report differences for individual files rather than
|
||||
packages''')
|
||||
parser.add_argument('-t', '--threshold', type=int,
|
||||
help='''ignore size differences smaller or equal than this
|
||||
value (bytes)''')
|
||||
parser.add_argument('old_file_size_csv', type=argparse.FileType('r'),
|
||||
metavar='old-file-size-stats.csv',
|
||||
help="""old CSV file with file and package size statistics,
|
||||
generated by 'make graph-size'""")
|
||||
parser.add_argument('new_file_size_csv', type=argparse.FileType('r'),
|
||||
metavar='new-file-size-stats.csv',
|
||||
help='new CSV file with file and package size statistics')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.detail:
|
||||
keyword = 'file'
|
||||
else:
|
||||
keyword = 'package'
|
||||
|
||||
old_sizes = read_file_size_csv(args.old_file_size_csv, args.detail)
|
||||
new_sizes = read_file_size_csv(args.new_file_size_csv, args.detail)
|
||||
|
||||
delta = compare_sizes(old_sizes, new_sizes)
|
||||
|
||||
print('Size difference per %s (bytes), threshold = %s' % (keyword, args.threshold))
|
||||
print(80*'-')
|
||||
print_results(delta, args.threshold)
|
||||
print(80*'-')
|
||||
print_results({'TOTAL': ('', sum(new_sizes.values()) - sum(old_sizes.values()))},
|
||||
threshold=None)
|
||||
195
bsp/buildroot/utils/test-pkg
Executable file
195
bsp/buildroot/utils/test-pkg
Executable file
@@ -0,0 +1,195 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
TOOLCHAINS_CSV='support/config-fragments/autobuild/toolchain-configs.csv'
|
||||
|
||||
main() {
|
||||
local o O opts
|
||||
local cfg dir pkg random toolchains_dir toolchain
|
||||
local ret nb nb_skip nb_fail nb_legal nb_tc build_dir
|
||||
local -a toolchains
|
||||
|
||||
o='hc:d:p:r:t:'
|
||||
O='help,config-snippet:build-dir:package:,random:,toolchains-dir:'
|
||||
opts="$(getopt -n "${my_name}" -o "${o}" -l "${O}" -- "${@}")"
|
||||
eval set -- "${opts}"
|
||||
|
||||
random=0
|
||||
toolchains_csv="${TOOLCHAINS_CSV}"
|
||||
while [ ${#} -gt 0 ]; do
|
||||
case "${1}" in
|
||||
(-h|--help)
|
||||
help; exit 0
|
||||
;;
|
||||
(-c|--config-snippet)
|
||||
cfg="${2}"; shift 2
|
||||
;;
|
||||
(-d|--build-dir)
|
||||
dir="${2}"; shift 2
|
||||
;;
|
||||
(-p|--package)
|
||||
pkg="${2}"; shift 2
|
||||
;;
|
||||
(-r|--random)
|
||||
random="${2}"; shift 2
|
||||
;;
|
||||
(-t|--toolchains-csv)
|
||||
toolchains_csv="${2}"; shift 2
|
||||
;;
|
||||
(--)
|
||||
shift; break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [ -z "${cfg}" ]; then
|
||||
printf "error: no config snippet specified\n" >&2; exit 1
|
||||
fi
|
||||
if [ ! -e "${cfg}" ]; then
|
||||
printf "error: %s: no such file\n" "${cfg}" >&2; exit 1
|
||||
fi
|
||||
if [ -z "${dir}" ]; then
|
||||
dir="${HOME}/br-test-pkg"
|
||||
fi
|
||||
|
||||
# Extract the URLs of the toolchains; drop internal toolchains
|
||||
# E.g.: http://server/path/to/name.config,arch,libc
|
||||
# --> http://server/path/to/name.config
|
||||
toolchains=($(sed -r -e 's/,.*//; /internal/d; /^#/d; /^$/d;' "${toolchains_csv}" \
|
||||
|if [ ${random} -gt 0 ]; then \
|
||||
sort -R |head -n ${random}
|
||||
else
|
||||
cat
|
||||
fi |sort
|
||||
)
|
||||
)
|
||||
|
||||
nb_tc="${#toolchains[@]}"
|
||||
if [ ${nb_tc} -eq 0 ]; then
|
||||
printf "error: no toolchain found (networking issue?)\n" >&2; exit 1
|
||||
fi
|
||||
|
||||
nb=0
|
||||
nb_skip=0
|
||||
nb_fail=0
|
||||
nb_legal=0
|
||||
for toolchainconfig in "${toolchains[@]}"; do
|
||||
: $((nb++))
|
||||
toolchain="$(basename "${toolchainconfig}" .config)"
|
||||
build_dir="${dir}/${toolchain}"
|
||||
printf "%40s [%*d/%d]: " "${toolchain}" ${#nb_tc} ${nb} ${nb_tc}
|
||||
build_one "${build_dir}" "${toolchainconfig}" "${cfg}" "${pkg}" && ret=0 || ret=${?}
|
||||
case ${ret} in
|
||||
(0) printf "OK\n";;
|
||||
(1) : $((nb_skip++)); printf "SKIPPED\n";;
|
||||
(2) : $((nb_fail++)); printf "FAILED\n";;
|
||||
(3) : $((nb_legal++)); printf "FAILED\n";;
|
||||
esac
|
||||
done
|
||||
|
||||
printf "%d builds, %d skipped, %d build failed, %d legal-info failed\n" \
|
||||
${nb} ${nb_skip} ${nb_fail} ${nb_legal}
|
||||
}
|
||||
|
||||
build_one() {
|
||||
local dir="${1}"
|
||||
local toolchainconfig="${2}"
|
||||
local cfg="${3}"
|
||||
local pkg="${4}"
|
||||
|
||||
mkdir -p "${dir}"
|
||||
|
||||
support/kconfig/merge_config.sh -O "${dir}" \
|
||||
"${toolchainconfig}" "support/config-fragments/minimal.config" "${cfg}" \
|
||||
>> "${dir}/logfile" 2>&1
|
||||
# We want all the options from the snippet to be present as-is (set
|
||||
# or not set) in the actual .config; if one of them is not, it means
|
||||
# some dependency from the toolchain or arch is not available, in
|
||||
# which case this config is untestable and we skip it.
|
||||
# We don't care about the locale to sort in, as long as both sort are
|
||||
# done in the same locale.
|
||||
comm -23 <(sort "${cfg}") <(sort "${dir}/.config") >"${dir}/missing.config"
|
||||
if [ -s "${dir}/missing.config" ]; then
|
||||
return 1
|
||||
fi
|
||||
# Remove file, it's empty anyway.
|
||||
rm -f "${dir}/missing.config"
|
||||
|
||||
if [ -n "${pkg}" ]; then
|
||||
if ! make O="${dir}" "${pkg}-dirclean" >> "${dir}/logfile" 2>&1; then
|
||||
return 2
|
||||
fi
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
if ! make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
|
||||
return 2
|
||||
fi
|
||||
|
||||
# legal-info done systematically, because some packages have different
|
||||
# sources depending on the configuration (e.g. lua-5.2 vs. lua-5.3)
|
||||
if ! make O="${dir}" legal-info >> "${dir}/logfile" 2>&1; then
|
||||
return 3
|
||||
fi
|
||||
}
|
||||
|
||||
help() {
|
||||
cat <<_EOF_
|
||||
test-pkg: test-build a package against various toolchains and architectures
|
||||
|
||||
The supplied config snippet is appended to each toolchain config, the
|
||||
resulting configuration is checked to ensure it still contains all options
|
||||
specified in the snippet; if any is missing, the build is skipped, on the
|
||||
assumption that the package under test requires a toolchain or architecture
|
||||
feature that is missing.
|
||||
|
||||
In case failures are noticed, you can fix the package and just re-run the
|
||||
same command again; it will re-run the test where it failed. If you did
|
||||
specify a package (with -p), the package build dir will be removed first.
|
||||
|
||||
The list of toolchains is retrieved from ${TOOLCHAINS_CSV}.
|
||||
Only the external toolchains are tried, because building a Buildroot toolchain
|
||||
would take too long. An alternative toolchains CSV file can be specified with
|
||||
the -t option. This file should have lines consisting of the path to the
|
||||
toolchain config fragment and the required host architecture, separated by a
|
||||
comma. The config fragments should contain only the toolchain and architecture
|
||||
settings.
|
||||
|
||||
Options:
|
||||
|
||||
-h, --help
|
||||
Print this help.
|
||||
|
||||
-c CFG, --config-snippet CFG
|
||||
Use the CFG file as the source for the config snippet. This file
|
||||
should contain all the config options required to build a package.
|
||||
|
||||
-d DIR, --build-dir DIR
|
||||
Do the builds in directory DIR, one sub-dir per toolchain.
|
||||
|
||||
-p PKG, --package PKG
|
||||
Test-build the package PKG, by running 'make PKG'; if not specified,
|
||||
just runs 'make'.
|
||||
|
||||
-r N, --random N
|
||||
Limit the tests to the N randomly selected toolchains, instead of
|
||||
building with all toolchains.
|
||||
|
||||
-t CSVFILE, --toolchains-csv CSVFILE
|
||||
CSV file containing the paths to config fragments of toolchains to
|
||||
try. If not specified, the toolchains in ${TOOLCHAINS_CSV} will be
|
||||
used.
|
||||
|
||||
Example:
|
||||
|
||||
Testing libcec would require a config snippet that contains:
|
||||
BR2_PACKAGE_LIBCEC=y
|
||||
|
||||
Testing libcurl with openSSL support would require a snippet such as:
|
||||
BR2_PACKAGE_OPENSSL=y
|
||||
BR2_PACKAGE_LIBCURL=y
|
||||
|
||||
_EOF_
|
||||
}
|
||||
|
||||
my_name="${0##*/}"
|
||||
main "${@}"
|
||||
Reference in New Issue
Block a user