Bump buildroot to 2019.02
This commit is contained in:
@@ -4,7 +4,9 @@
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
import sys
|
||||
|
||||
import checkpackagelib.lib_config
|
||||
@@ -24,6 +26,9 @@ def parse_args():
|
||||
parser.add_argument("files", metavar="F", type=str, nargs="*",
|
||||
help="list of files")
|
||||
|
||||
parser.add_argument("--br2-external", "-b", dest='intree_only', action="store_false",
|
||||
help="do not apply the pathname filters used for intree files")
|
||||
|
||||
parser.add_argument("--manual-url", action="store",
|
||||
default="http://nightly.buildroot.org/",
|
||||
help="default: %(default)s")
|
||||
@@ -40,13 +45,33 @@ def parse_args():
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
CONFIG_IN_FILENAME = re.compile("/Config\.\S*$")
|
||||
FILE_IS_FROM_A_PACKAGE = re.compile("package/[^/]*/")
|
||||
CONFIG_IN_FILENAME = re.compile("Config\.\S*$")
|
||||
DO_CHECK_INTREE = re.compile("|".join([
|
||||
"Config.in",
|
||||
"arch/",
|
||||
"boot/",
|
||||
"fs/",
|
||||
"linux/",
|
||||
"package/",
|
||||
"system/",
|
||||
"toolchain/",
|
||||
]))
|
||||
DO_NOT_CHECK_INTREE = re.compile("|".join([
|
||||
"boot/barebox/barebox\.mk$",
|
||||
"fs/common\.mk$",
|
||||
"package/doc-asciidoc\.mk$",
|
||||
"package/pkg-\S*\.mk$",
|
||||
"toolchain/helpers\.mk$",
|
||||
"toolchain/toolchain-external/pkg-toolchain-external\.mk$",
|
||||
]))
|
||||
|
||||
|
||||
def get_lib_from_filename(fname):
|
||||
if FILE_IS_FROM_A_PACKAGE.search(fname) is None:
|
||||
return None
|
||||
if flags.intree_only:
|
||||
if DO_CHECK_INTREE.match(fname) is None:
|
||||
return None
|
||||
if DO_NOT_CHECK_INTREE.match(fname):
|
||||
return None
|
||||
if CONFIG_IN_FILENAME.search(fname):
|
||||
return checkpackagelib.lib_config
|
||||
if fname.endswith(".hash"):
|
||||
@@ -103,10 +128,19 @@ def check_file_using_lib(fname):
|
||||
|
||||
for cf in objects:
|
||||
nwarnings += print_warnings(cf.before())
|
||||
for lineno, text in enumerate(open(fname, "r").readlines()):
|
||||
if six.PY3:
|
||||
f = open(fname, "r", errors="surrogateescape")
|
||||
else:
|
||||
f = open(fname, "r")
|
||||
lastline = ""
|
||||
for lineno, text in enumerate(f.readlines()):
|
||||
nlines += 1
|
||||
for cf in objects:
|
||||
if cf.disable.search(lastline):
|
||||
continue
|
||||
nwarnings += print_warnings(cf.check_line(lineno + 1, text))
|
||||
lastline = text
|
||||
f.close()
|
||||
for cf in objects:
|
||||
nwarnings += print_warnings(cf.after())
|
||||
|
||||
@@ -117,7 +151,16 @@ def __main__():
|
||||
global flags
|
||||
flags = parse_args()
|
||||
|
||||
if len(flags.files) == 0:
|
||||
if flags.intree_only:
|
||||
# change all paths received to be relative to the base dir
|
||||
base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
files_to_check = [os.path.relpath(os.path.abspath(f), base_dir) for f in flags.files]
|
||||
# move current dir so the script find the files
|
||||
os.chdir(base_dir)
|
||||
else:
|
||||
files_to_check = flags.files
|
||||
|
||||
if len(files_to_check) == 0:
|
||||
print("No files to check style")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -125,7 +168,7 @@ def __main__():
|
||||
total_warnings = 0
|
||||
total_lines = 0
|
||||
|
||||
for fname in flags.files:
|
||||
for fname in files_to_check:
|
||||
nwarnings, nlines = check_file_using_lib(fname)
|
||||
total_warnings += nwarnings
|
||||
total_lines += nlines
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
import re
|
||||
|
||||
|
||||
class _CheckFunction(object):
|
||||
def __init__(self, filename, url_to_manual):
|
||||
self.filename = filename
|
||||
self.url_to_manual = url_to_manual
|
||||
self.disable = re.compile(r"^\s*# check-package .*\b{}\b".format(self.__class__.__name__))
|
||||
|
||||
def before(self):
|
||||
pass
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# See utils/checkpackagelib/readme.txt before editing this file.
|
||||
|
||||
from base import _CheckFunction
|
||||
from checkpackagelib.base import _CheckFunction
|
||||
|
||||
|
||||
class ConsecutiveEmptyLines(_CheckFunction):
|
||||
|
||||
@@ -5,11 +5,11 @@
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from lib import EmptyLastLine # noqa: F401
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
from lib import TrailingSpace # noqa: F401
|
||||
from checkpackagelib.base import _CheckFunction
|
||||
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from checkpackagelib.lib import EmptyLastLine # noqa: F401
|
||||
from checkpackagelib.lib import NewlineAtEof # noqa: F401
|
||||
from checkpackagelib.lib import TrailingSpace # noqa: F401
|
||||
|
||||
|
||||
def _empty_or_comment(text):
|
||||
@@ -132,6 +132,12 @@ class Indent(_CheckFunction):
|
||||
text]
|
||||
elif entry in entries_that_should_not_be_indented:
|
||||
if not text.startswith(entry):
|
||||
# four Config.in files have a special but legitimate indentation rule
|
||||
if self.filename in ["package/Config.in",
|
||||
"package/Config.in.host",
|
||||
"package/kodi/Config.in",
|
||||
"package/x11r7/Config.in"]:
|
||||
return
|
||||
return ["{}:{}: should not be indented"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
|
||||
@@ -5,11 +5,11 @@
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from lib import EmptyLastLine # noqa: F401
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
from lib import TrailingSpace # noqa: F401
|
||||
from checkpackagelib.base import _CheckFunction
|
||||
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from checkpackagelib.lib import EmptyLastLine # noqa: F401
|
||||
from checkpackagelib.lib import NewlineAtEof # noqa: F401
|
||||
from checkpackagelib.lib import TrailingSpace # noqa: F401
|
||||
|
||||
|
||||
def _empty_line_or_comment(text):
|
||||
|
||||
@@ -6,16 +6,20 @@
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from lib import EmptyLastLine # noqa: F401
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
from lib import TrailingSpace # noqa: F401
|
||||
from checkpackagelib.base import _CheckFunction
|
||||
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
|
||||
from checkpackagelib.lib import EmptyLastLine # noqa: F401
|
||||
from checkpackagelib.lib import NewlineAtEof # noqa: F401
|
||||
from checkpackagelib.lib import TrailingSpace # noqa: F401
|
||||
|
||||
# used in more than one check
|
||||
start_conditional = ["ifdef", "ifeq", "ifndef", "ifneq"]
|
||||
end_conditional = ["endif"]
|
||||
|
||||
|
||||
class Indent(_CheckFunction):
|
||||
COMMENT = re.compile("^\s*#")
|
||||
CONDITIONAL = re.compile("^\s*(ifeq|ifneq|endif)\s")
|
||||
CONDITIONAL = re.compile("^\s*({})\s".format("|".join(start_conditional + end_conditional)))
|
||||
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
|
||||
END_DEFINE = re.compile("^\s*endef\s")
|
||||
MAKEFILE_TARGET = re.compile("^[^# \t]+:\s")
|
||||
@@ -69,6 +73,66 @@ class Indent(_CheckFunction):
|
||||
text]
|
||||
|
||||
|
||||
class OverriddenVariable(_CheckFunction):
|
||||
CONCATENATING = re.compile("^([A-Z0-9_]+)\s*(\+|:|)=\s*\$\(\\1\)")
|
||||
END_CONDITIONAL = re.compile("^\s*({})".format("|".join(end_conditional)))
|
||||
OVERRIDING_ASSIGNMENTS = [':=', "="]
|
||||
START_CONDITIONAL = re.compile("^\s*({})".format("|".join(start_conditional)))
|
||||
VARIABLE = re.compile("^([A-Z0-9_]+)\s*((\+|:|)=)")
|
||||
USUALLY_OVERRIDDEN = re.compile("^[A-Z0-9_]+({})".format("|".join([
|
||||
"_ARCH\s*=\s*",
|
||||
"_CPU\s*=\s*",
|
||||
"_SITE\s*=\s*",
|
||||
"_SOURCE\s*=\s*",
|
||||
"_VERSION\s*=\s*"])))
|
||||
|
||||
def before(self):
|
||||
self.conditional = 0
|
||||
self.unconditionally_set = []
|
||||
self.conditionally_set = []
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.START_CONDITIONAL.search(text):
|
||||
self.conditional += 1
|
||||
return
|
||||
if self.END_CONDITIONAL.search(text):
|
||||
self.conditional -= 1
|
||||
return
|
||||
|
||||
m = self.VARIABLE.search(text)
|
||||
if m is None:
|
||||
return
|
||||
variable, assignment = m.group(1, 2)
|
||||
|
||||
if self.conditional == 0:
|
||||
if variable in self.conditionally_set:
|
||||
self.unconditionally_set.append(variable)
|
||||
if assignment in self.OVERRIDING_ASSIGNMENTS:
|
||||
return ["{}:{}: unconditional override of variable {} previously conditionally set"
|
||||
.format(self.filename, lineno, variable),
|
||||
text]
|
||||
|
||||
if variable not in self.unconditionally_set:
|
||||
self.unconditionally_set.append(variable)
|
||||
return
|
||||
if assignment in self.OVERRIDING_ASSIGNMENTS:
|
||||
return ["{}:{}: unconditional override of variable {}"
|
||||
.format(self.filename, lineno, variable),
|
||||
text]
|
||||
else:
|
||||
if variable not in self.unconditionally_set:
|
||||
self.conditionally_set.append(variable)
|
||||
return
|
||||
if self.CONCATENATING.search(text):
|
||||
return
|
||||
if self.USUALLY_OVERRIDDEN.search(text):
|
||||
return
|
||||
if assignment in self.OVERRIDING_ASSIGNMENTS:
|
||||
return ["{}:{}: conditional override of variable {}"
|
||||
.format(self.filename, lineno, variable),
|
||||
text]
|
||||
|
||||
|
||||
class PackageHeader(_CheckFunction):
|
||||
def before(self):
|
||||
self.skip = False
|
||||
@@ -123,7 +187,7 @@ class RemoveDefaultPackageSourceVariable(_CheckFunction):
|
||||
|
||||
|
||||
class SpaceBeforeBackslash(_CheckFunction):
|
||||
TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH = re.compile(r"^.*( |\t)\\$")
|
||||
TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH = re.compile(r"^.*( |\t ?)\\$")
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH.match(text.rstrip()):
|
||||
@@ -159,14 +223,19 @@ class TypoInPackageVariable(_CheckFunction):
|
||||
"ACLOCAL_DIR",
|
||||
"ACLOCAL_HOST_DIR",
|
||||
"BR_CCACHE_INITIAL_SETUP",
|
||||
"BR_LIBC",
|
||||
"BR_NO_CHECK_HASH_FOR",
|
||||
"LINUX_EXTENSIONS",
|
||||
"LINUX_POST_PATCH_HOOKS",
|
||||
"LINUX_TOOLS",
|
||||
"LUA_RUN",
|
||||
"MKFS_JFFS2",
|
||||
"MKIMAGE_ARCH",
|
||||
"PACKAGES_PERMISSIONS_TABLE",
|
||||
"PKG_CONFIG_HOST_BINARY",
|
||||
"SUMTOOL",
|
||||
"TARGET_FINALIZE_HOOKS",
|
||||
"TARGETS_ROOTFS",
|
||||
"XTENSA_CORE_NAME"]))
|
||||
PACKAGE_NAME = re.compile("/([^/]+)\.mk")
|
||||
VARIABLE = re.compile("^([A-Z0-9_]+_[A-Z0-9_]+)\s*(\+|)=")
|
||||
@@ -176,8 +245,10 @@ class TypoInPackageVariable(_CheckFunction):
|
||||
package = package.replace("-", "_").upper()
|
||||
# linux tools do not use LINUX_TOOL_ prefix for variables
|
||||
package = package.replace("LINUX_TOOL_", "")
|
||||
# linux extensions do not use LINUX_EXT_ prefix for variables
|
||||
package = package.replace("LINUX_EXT_", "")
|
||||
self.package = package
|
||||
self.REGEX = re.compile("^(HOST_)?({}_[A-Z0-9_]+)".format(package))
|
||||
self.REGEX = re.compile("^(HOST_|ROOTFS_)?({}_[A-Z0-9_]+)".format(package))
|
||||
self.FIND_VIRTUAL = re.compile(
|
||||
"^{}_PROVIDES\s*(\+|)=\s*(.*)".format(package))
|
||||
self.virtual = []
|
||||
@@ -215,8 +286,8 @@ class UselessFlag(_CheckFunction):
|
||||
"_INSTALL_REDISTRIBUTE\s*=\s*YES",
|
||||
"_INSTALL_STAGING\s*=\s*NO",
|
||||
"_INSTALL_TARGET\s*=\s*YES"])))
|
||||
END_CONDITIONAL = re.compile("^\s*(endif)")
|
||||
START_CONDITIONAL = re.compile("^\s*(ifeq|ifneq)")
|
||||
END_CONDITIONAL = re.compile("^\s*({})".format("|".join(end_conditional)))
|
||||
START_CONDITIONAL = re.compile("^\s*({})".format("|".join(start_conditional)))
|
||||
|
||||
def before(self):
|
||||
self.conditional = 0
|
||||
@@ -244,3 +315,13 @@ class UselessFlag(_CheckFunction):
|
||||
"({}#_infrastructure_for_autotools_based_packages)"
|
||||
.format(self.filename, lineno, self.url_to_manual),
|
||||
text]
|
||||
|
||||
|
||||
class VariableWithBraces(_CheckFunction):
|
||||
VARIABLE_WITH_BRACES = re.compile(r"^[^#].*[^$]\${\w+}")
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
if self.VARIABLE_WITH_BRACES.match(text.rstrip()):
|
||||
return ["{}:{}: use $() to delimit variables, not ${{}}"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
|
||||
import re
|
||||
|
||||
from base import _CheckFunction
|
||||
from lib import NewlineAtEof # noqa: F401
|
||||
from checkpackagelib.base import _CheckFunction
|
||||
from checkpackagelib.lib import NewlineAtEof # noqa: F401
|
||||
|
||||
|
||||
class ApplyOrder(_CheckFunction):
|
||||
|
||||
@@ -127,7 +127,7 @@ def get_toolchain_configs(toolchains_csv, buildrootdir):
|
||||
|
||||
with open(toolchains_csv) as r:
|
||||
# filter empty lines and comments
|
||||
lines = [ t for t in r.readlines() if len(t.strip()) > 0 and t[0] != '#' ]
|
||||
lines = [t for t in r.readlines() if len(t.strip()) > 0 and t[0] != '#']
|
||||
toolchains = decode_byte_list(lines)
|
||||
configs = []
|
||||
|
||||
@@ -187,6 +187,7 @@ def is_toolchain_usable(configfile, config):
|
||||
if platform.machine() == 'x86_64':
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_LINARO_ARM=y\n' in configlines or \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_LINARO_AARCH64=y\n' in configlines or \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_LINARO_AARCH64_BE=y\n' in configlines or \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_LINARO_ARMEB=y\n' in configlines:
|
||||
ldd_version_output = subprocess.check_output(['ldd', '--version'])
|
||||
glibc_version = ldd_version_output.splitlines()[0].split()[-1]
|
||||
@@ -210,6 +211,8 @@ def fixup_config(configfile):
|
||||
with open(configfile) as configf:
|
||||
configlines = configf.readlines()
|
||||
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL = 'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/'
|
||||
|
||||
if "BR2_NEEDS_HOST_JAVA=y\n" in configlines and not sysinfo.has("java"):
|
||||
return False
|
||||
if "BR2_NEEDS_HOST_JAVAC=y\n" in configlines and not sysinfo.has("javac"):
|
||||
@@ -221,36 +224,36 @@ def fixup_config(configfile):
|
||||
return False
|
||||
# The ctng toolchain is affected by PR58854
|
||||
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain tigger an assembler error with guile package when compiled with -Os (same issue as for CS ARM 2014.05-29)
|
||||
if 'BR2_PACKAGE_GUILE=y\n' in configlines and \
|
||||
'BR2_OPTIMIZE_S=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv5-ctng-linux-gnueabi.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR58854
|
||||
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv6-ctng-linux-uclibcgnueabi.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv6-ctng-linux-uclibcgnueabi.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR58854
|
||||
if 'BR2_PACKAGE_LTTNG_TOOLS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv7-ctng-linux-gnueabihf.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'armv7-ctng-linux-gnueabihf.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR60155
|
||||
if 'BR2_PACKAGE_SDL=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The ctng toolchain is affected by PR60155
|
||||
if 'BR2_PACKAGE_LIBMPEG2=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'powerpc-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
|
||||
if 'BR2_PACKAGE_STRONGSWAN=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS toolchain uses eglibc-2.18 which lacks SYS_getdents64
|
||||
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# libffi not available on sh2a and ARMv7-M, but propagating libffi
|
||||
# arch dependencies in Buildroot is really too much work, so we
|
||||
@@ -266,37 +269,37 @@ def fixup_config(configfile):
|
||||
configlines.append('BR2_PACKAGE_SUNXI_BOARDS_FEX_FILE="a10/hackberry.fex"\n')
|
||||
# This MIPS uClibc toolchain fails to build the gdb package
|
||||
if 'BR2_PACKAGE_GDB=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the rt-tests package
|
||||
if 'BR2_PACKAGE_RT_TESTS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the civetweb package
|
||||
if 'BR2_PACKAGE_CIVETWEB=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS ctng toolchain fails to build the python3 package
|
||||
if 'BR2_PACKAGE_PYTHON3=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mips64el-ctng_n64-linux-gnu.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the strace package
|
||||
if 'BR2_PACKAGE_STRACE=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the cdrkit package
|
||||
if 'BR2_PACKAGE_CDRKIT=y\n' in configlines and \
|
||||
'BR2_STATIC_LIBS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# uClibc vfork static linking issue
|
||||
if 'BR2_PACKAGE_ALSA_LIB=y\n' in configlines and \
|
||||
'BR2_STATIC_LIBS=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/i486-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'i486-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# This MIPS uClibc toolchain fails to build the weston package
|
||||
if 'BR2_PACKAGE_WESTON=y\n' in configlines and \
|
||||
'BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
BR2_TOOLCHAIN_EXTERNAL_URL + 'mipsel-ctng-linux-uclibc.tar.xz"\n' in configlines:
|
||||
return False
|
||||
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
|
||||
@@ -307,17 +310,9 @@ def fixup_config(configfile):
|
||||
'BR2_PACKAGE_QT5BASE_GUI=y\n' in configlines:
|
||||
return False
|
||||
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
|
||||
'BR2_PACKAGE_QT_GUI_MODULE=y\n' in configlines:
|
||||
return False
|
||||
# The cs nios2 2017.02 toolchain is affected by binutils PR19405
|
||||
if 'BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y\n' in configlines and \
|
||||
'BR2_PACKAGE_FLANN=y\n' in configlines:
|
||||
return False
|
||||
# or1k affected by binutils PR21464
|
||||
if 'BR2_or1k=y\n' in configlines and \
|
||||
'BR2_PACKAGE_QT_GUI_MODULE=y\n' in configlines:
|
||||
return False
|
||||
|
||||
with open(configfile, "w+") as configf:
|
||||
configf.writelines(configlines)
|
||||
@@ -363,6 +358,8 @@ def gen_config(args):
|
||||
configlines.append("BR2_STATIC_LIBS=y\n")
|
||||
if randint(0, 20) == 0:
|
||||
configlines.append("BR2_PACKAGE_PYTHON_PY_ONLY=y\n")
|
||||
if randint(0, 5) == 0:
|
||||
configlines.append("BR2_OPTIMIZE_2=y\n")
|
||||
|
||||
# Write out the configuration file
|
||||
if not os.path.exists(args.outputdir):
|
||||
@@ -405,7 +402,7 @@ def gen_config(args):
|
||||
"savedefconfig"])
|
||||
|
||||
return subprocess.call(["make", "O=%s" % args.outputdir, "-C", args.buildrootdir,
|
||||
"core-dependencies"])
|
||||
"dependencies"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -24,11 +24,6 @@ def parse_args():
|
||||
|
||||
|
||||
def __main__():
|
||||
# DEVELOPERS is one level up from here
|
||||
devs_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
|
||||
devs = getdeveloperlib.parse_developers(devs_dir)
|
||||
if devs is None:
|
||||
sys.exit(1)
|
||||
args = parse_args()
|
||||
|
||||
# Check that only one action is given
|
||||
@@ -50,9 +45,17 @@ def __main__():
|
||||
print("No action specified")
|
||||
return
|
||||
|
||||
# getdeveloperlib expects to be executed from the toplevel buildroot
|
||||
# directory, which is one level up from this script
|
||||
os.chdir(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
|
||||
|
||||
devs = getdeveloperlib.parse_developers()
|
||||
if devs is None:
|
||||
sys.exit(1)
|
||||
|
||||
# Handle the check action
|
||||
if args.check:
|
||||
files = getdeveloperlib.check_developers(devs, devs_dir)
|
||||
files = getdeveloperlib.check_developers(devs)
|
||||
for f in files:
|
||||
print(f)
|
||||
|
||||
|
||||
@@ -158,8 +158,6 @@ def parse_developers(basepath=None):
|
||||
linen = 0
|
||||
if basepath is None:
|
||||
basepath = os.getcwd()
|
||||
else:
|
||||
basepath = os.path.abspath(basepath)
|
||||
with open(os.path.join(basepath, "DEVELOPERS"), "r") as f:
|
||||
files = []
|
||||
name = None
|
||||
|
||||
@@ -483,10 +483,13 @@ use Module::CoreList;
|
||||
use HTTP::Tiny;
|
||||
use Safe;
|
||||
use MetaCPAN::API::Tiny;
|
||||
use Digest::SHA qw(sha256_hex);
|
||||
use Text::Wrap;
|
||||
$Text::Wrap::columns = 62;
|
||||
|
||||
# Below, 5.026 should be aligned with the version of perl actually
|
||||
# Below, 5.028 should be aligned with the version of perl actually
|
||||
# bundled in Buildroot:
|
||||
die <<"MSG" if $] < 5.026;
|
||||
die <<"MSG" if $] < 5.028;
|
||||
This script needs a host perl with the same major version as Buildroot target perl.
|
||||
|
||||
Your current host perl is:
|
||||
@@ -494,7 +497,7 @@ Your current host perl is:
|
||||
version $]
|
||||
|
||||
You may install a local one by running:
|
||||
perlbrew install perl-5.26.0
|
||||
perlbrew install perl-5.28.0
|
||||
MSG
|
||||
|
||||
my ($help, $man, $quiet, $force, $recommend, $test, $host);
|
||||
@@ -516,14 +519,25 @@ my %dist; # name -> metacpan data
|
||||
my %need_target; # name -> 1 if target package is needed
|
||||
my %need_host; # name -> 1 if host package is needed
|
||||
my %need_dlopen; # name -> 1 if requires dynamic library
|
||||
my %is_xs; # name -> 1 if XS module
|
||||
my %deps_build; # name -> list of host dependencies
|
||||
my %deps_runtime; # name -> list of target dependencies
|
||||
my %deps_optional; # name -> list of optional target dependencies
|
||||
my %license_files; # name -> list of license files
|
||||
my %license_files; # name -> hash of license files
|
||||
my %checksum; # author -> list of checksum
|
||||
my $mirror = 'http://cpan.metacpan.org'; # a CPAN mirror
|
||||
my $mcpan = MetaCPAN::API::Tiny->new(base_url => 'http://fastapi.metacpan.org/v1');
|
||||
my $ua = HTTP::Tiny->new();
|
||||
my $new_pkgs;
|
||||
|
||||
my %white_list = (
|
||||
'ExtUtils-Config' => 1,
|
||||
'ExtUtils-InstallPaths' => 1,
|
||||
'ExtUtils-Helpers' => 1,
|
||||
'File-ShareDir-Install' => 1,
|
||||
'Module-Build' => 1,
|
||||
'Module-Build-Tiny' => 1,
|
||||
);
|
||||
my @info = ();
|
||||
|
||||
sub get_checksum {
|
||||
my ($url) = @_;
|
||||
@@ -556,7 +570,36 @@ sub find_license_files {
|
||||
if (scalar @license_files == 0 && $manifest =~ m/(README)[\n\s]/i) {
|
||||
@license_files = ($1);
|
||||
}
|
||||
return \@license_files;
|
||||
if (scalar @license_files == 0 && $manifest =~ m/(README\.md)[\n\s]/i) {
|
||||
@license_files = ($1);
|
||||
}
|
||||
return @license_files;
|
||||
}
|
||||
|
||||
sub want_test {
|
||||
my ($distname) = @_;
|
||||
return 1 if $need_dlopen{$distname} && scalar @{$deps_runtime{$distname}} > 0;
|
||||
}
|
||||
|
||||
sub get_dependencies {
|
||||
my ($distname) = @_;
|
||||
my %dep = map { $_ => 1 } @{$deps_runtime{$distname}};
|
||||
for my $direct (@{$deps_runtime{$distname}}) {
|
||||
for (get_dependencies( $direct )) {
|
||||
$dep{$_} = 1;
|
||||
}
|
||||
}
|
||||
return keys %dep;
|
||||
}
|
||||
|
||||
sub get_indirect_dependencies {
|
||||
my ($distname) = @_;
|
||||
my %indirect;
|
||||
my %direct = map { $_ => 1 } @{$deps_runtime{$distname}};
|
||||
for my $dep (get_dependencies( $distname )) {
|
||||
$indirect{$dep} = 1 unless exists $direct{$dep};
|
||||
}
|
||||
return keys %indirect;
|
||||
}
|
||||
|
||||
sub fetch {
|
||||
@@ -567,16 +610,19 @@ sub fetch {
|
||||
say qq{fetch ${name}} unless $quiet;
|
||||
my $result = $mcpan->release( distribution => $name );
|
||||
$dist{$name} = $result;
|
||||
$license_files{$name} = {};
|
||||
eval {
|
||||
my $manifest = $mcpan->source( author => $result->{author},
|
||||
release => $name . q{-} . $result->{version},
|
||||
path => 'MANIFEST' );
|
||||
$need_dlopen{$name} = is_xs( $manifest );
|
||||
$license_files{$name} = find_license_files( $manifest );
|
||||
my $author = $result->{author};
|
||||
my $release = $name . q{-} . $result->{version};
|
||||
my $manifest = $mcpan->source( author => $author, release => $release, path => 'MANIFEST' );
|
||||
$need_dlopen{$name} = $is_xs{$name} = is_xs( $manifest );
|
||||
foreach my $fname (find_license_files( $manifest )) {
|
||||
my $license = $mcpan->source( author => $author, release => $release, path => $fname );
|
||||
$license_files{$name}->{$fname} = sha256_hex( $license );
|
||||
}
|
||||
};
|
||||
if ($@) {
|
||||
warn $@;
|
||||
$license_files{$name} = [];
|
||||
}
|
||||
my %build = ();
|
||||
my %runtime = ();
|
||||
@@ -591,6 +637,7 @@ sub fetch {
|
||||
# we could use the host Module::CoreList data, because host perl and
|
||||
# target perl have the same major version
|
||||
next if ${$dep}{phase} eq q{develop};
|
||||
next if ${$dep}{phase} eq q{x_Dist_Zilla};
|
||||
next if !($test && $top) && ${$dep}{phase} eq q{test};
|
||||
my $distname = $mcpan->module( $modname )->{distribution};
|
||||
if (${$dep}{phase} eq q{runtime}) {
|
||||
@@ -603,11 +650,12 @@ sub fetch {
|
||||
}
|
||||
else { # configure, build
|
||||
$build{$distname} = 1;
|
||||
push @info, qq{[$name] suspicious dependency on $distname}
|
||||
unless exists $white_list{$distname};
|
||||
}
|
||||
}
|
||||
$deps_build{$name} = [keys %build];
|
||||
$deps_runtime{$name} = [keys %runtime];
|
||||
$deps_optional{$name} = [keys %optional];
|
||||
foreach my $distname (@{$deps_build{$name}}) {
|
||||
fetch( $distname, 0, 1 );
|
||||
}
|
||||
@@ -615,7 +663,7 @@ sub fetch {
|
||||
fetch( $distname, $need_target, $need_host );
|
||||
$need_dlopen{$name} ||= $need_dlopen{$distname};
|
||||
}
|
||||
foreach my $distname (@{$deps_optional{$name}}) {
|
||||
foreach my $distname (keys %optional) {
|
||||
fetch( $distname, $need_target, $need_host );
|
||||
}
|
||||
}
|
||||
@@ -642,6 +690,24 @@ sub brname {
|
||||
return uc $name;
|
||||
}
|
||||
|
||||
# Buildroot requires license name as in http://spdx.org/licenses/
|
||||
sub brlicense {
|
||||
my $license = shift;
|
||||
$license =~ s|apache_1_1|Apache-1.1|;
|
||||
$license =~ s|apache_2_0|Apache-2.0|;
|
||||
$license =~ s|artistic_2|Artistic-2.0|;
|
||||
$license =~ s|artistic|Artistic-1.0|;
|
||||
$license =~ s|lgpl_2_1|LGPL-2.1|;
|
||||
$license =~ s|lgpl_3_0|LGPL-3.0|;
|
||||
$license =~ s|gpl_2|GPL-2.0|;
|
||||
$license =~ s|gpl_3|GPL-3.0|;
|
||||
$license =~ s|mit|MIT|;
|
||||
$license =~ s|mozilla_1_1|Mozilla-1.1|;
|
||||
$license =~ s|openssl|OpenSSL|;
|
||||
$license =~ s|perl_5|Artistic or GPL-1.0+|;
|
||||
return $license;
|
||||
}
|
||||
|
||||
while (my ($distname, $dist) = each %dist) {
|
||||
my $fsname = fsname( $distname );
|
||||
my $dirname = q{package/} . $fsname;
|
||||
@@ -649,9 +715,15 @@ while (my ($distname, $dist) = each %dist) {
|
||||
my $mkname = $dirname . q{/} . $fsname . q{.mk};
|
||||
my $hashname = $dirname . q{/} . $fsname . q{.hash};
|
||||
my $brname = brname( $fsname );
|
||||
mkdir $dirname unless -d $dirname;
|
||||
my $testname = q{support/testing/tests/package/test_} . lc $brname . q{.py};
|
||||
unless (-d $dirname) {
|
||||
mkdir $dirname;
|
||||
$new_pkgs = 1;
|
||||
}
|
||||
if ($need_target{$distname} && ($force || !-f $cfgname)) {
|
||||
my $abstract = $dist->{abstract};
|
||||
$dist->{abstract} =~ s|\s+$||;
|
||||
$dist->{abstract} .= q{.} unless $dist->{abstract} =~ m|\.$|;
|
||||
my $abstract = wrap( q{}, qq{\t }, $dist->{abstract} );
|
||||
my $homepage = $dist->{resources}->{homepage} || qq{https://metacpan.org/release/${distname}};
|
||||
say qq{write ${cfgname}} unless $quiet;
|
||||
open my $fh, q{>}, $cfgname;
|
||||
@@ -660,7 +732,7 @@ while (my ($distname, $dist) = each %dist) {
|
||||
say {$fh} qq{\tdepends on !BR2_STATIC_LIBS} if $need_dlopen{$distname};
|
||||
foreach my $dep (sort @{$deps_runtime{$distname}}) {
|
||||
my $brdep = brname( fsname( $dep ) );
|
||||
say {$fh} qq{\tselect BR2_PACKAGE_${brdep}};
|
||||
say {$fh} qq{\tselect BR2_PACKAGE_${brdep} # runtime};
|
||||
}
|
||||
say {$fh} qq{\thelp};
|
||||
say {$fh} qq{\t ${abstract}\n} if $abstract;
|
||||
@@ -679,20 +751,21 @@ while (my ($distname, $dist) = each %dist) {
|
||||
# the auth part is not used, because we use $(BR2_CPAN_MIRROR)
|
||||
my ($filename, $directories, $suffix) = fileparse( $path, q{tar.gz}, q{tgz} );
|
||||
$directories =~ s|/$||;
|
||||
my $dependencies = join q{ }, map( { q{host-} . fsname( $_ ); } sort @{$deps_build{$distname}} ),
|
||||
map( { fsname( $_ ); } sort @{$deps_runtime{$distname}} );
|
||||
my $host_dependencies = join q{ }, map { q{host-} . fsname( $_ ); } sort( @{$deps_build{$distname}},
|
||||
@{$deps_runtime{$distname}} );
|
||||
my $license = ref $dist->{license} eq 'ARRAY'
|
||||
? join q{ or }, @{$dist->{license}}
|
||||
: $dist->{license};
|
||||
# BR requires license name as in http://spdx.org/licenses/
|
||||
$license =~ s|apache_2_0|Apache-2.0|;
|
||||
$license =~ s|artistic_2|Artistic-2.0|;
|
||||
$license =~ s|mit|MIT|;
|
||||
$license =~ s|openssl|OpenSSL|;
|
||||
$license =~ s|perl_5|Artistic or GPL-1.0+|;
|
||||
my $license_files = join q{ }, @{$license_files{$distname}};
|
||||
my @dependencies = map( { q{host-} . fsname( $_ ); } sort @{$deps_build{$distname}} );
|
||||
my $dependencies = join qq{ \\\n\t}, @dependencies;
|
||||
$dependencies = qq{\\\n\t} . $dependencies if scalar @dependencies > 1;
|
||||
my @host_dependencies = map { q{host-} . fsname( $_ ); } sort( @{$deps_build{$distname}},
|
||||
@{$deps_runtime{$distname}} );
|
||||
my $host_dependencies = join qq{ \\\n\t}, @host_dependencies;
|
||||
$host_dependencies = qq{\\\n\t} . $host_dependencies if scalar @host_dependencies > 1;
|
||||
my $license = brlicense( ref $dist->{license} eq 'ARRAY'
|
||||
? join q{ or }, @{$dist->{license}}
|
||||
: $dist->{license} );
|
||||
my $license_files = join q{ }, sort keys %{$license_files{$distname}};
|
||||
if ($license_files && (!$license || $license eq q{unknown})) {
|
||||
push @info, qq{[$distname] undefined LICENSE, see $license_files};
|
||||
$license = q{???};
|
||||
}
|
||||
say qq{write ${mkname}} unless $quiet;
|
||||
open my $fh, q{>}, $mkname;
|
||||
say {$fh} qq{################################################################################};
|
||||
@@ -706,18 +779,10 @@ while (my ($distname, $dist) = each %dist) {
|
||||
say {$fh} qq{${brname}_SITE = \$(BR2_CPAN_MIRROR)${directories}};
|
||||
say {$fh} qq{${brname}_DEPENDENCIES = ${dependencies}} if $need_target{$distname} && $dependencies;
|
||||
say {$fh} qq{HOST_${brname}_DEPENDENCIES = ${host_dependencies}} if $need_host{$distname} && $host_dependencies;
|
||||
say {$fh} qq{${brname}_LICENSE = ${license}} if $license && $license ne q{unknown};
|
||||
say {$fh} qq{${brname}_LICENSE = ${license}} if $license;
|
||||
say {$fh} qq{${brname}_LICENSE_FILES = ${license_files}} if $license_files;
|
||||
say {$fh} qq{${brname}_DISTNAME = ${distname}};
|
||||
say {$fh} qq{};
|
||||
foreach (sort @{$deps_optional{$distname}}) {
|
||||
next if grep { $_ eq $distname; } @{$deps_runtime{$_}}; # avoid cyclic dependencies
|
||||
my $opt_brname = brname( $_ );
|
||||
my $opt_fsname = fsname( $_ );
|
||||
say {$fh} qq{ifeq (\$(BR2_PACKAGE_PERL_${opt_brname}),y)};
|
||||
say {$fh} qq{${brname}_DEPENDENCIES += ${opt_fsname}};
|
||||
say {$fh} qq{endif};
|
||||
say {$fh} qq{};
|
||||
}
|
||||
say {$fh} qq{\$(eval \$(perl-package))} if $need_target{$distname};
|
||||
say {$fh} qq{\$(eval \$(host-perl-package))} if $need_host{$distname};
|
||||
close $fh;
|
||||
@@ -731,28 +796,82 @@ while (my ($distname, $dist) = each %dist) {
|
||||
say {$fh} qq{# retrieved by scancpan from ${mirror}/};
|
||||
say {$fh} qq{md5 ${md5} ${filename}};
|
||||
say {$fh} qq{sha256 ${sha256} ${filename}};
|
||||
my %license_files = %{$license_files{$distname}};
|
||||
if (scalar keys %license_files) {
|
||||
say {$fh} q{};
|
||||
say {$fh} qq{# computed by scancpan};
|
||||
foreach my $license (sort keys %license_files) {
|
||||
my $digest = $license_files{$license};
|
||||
say {$fh} qq{sha256 ${digest} ${license}};
|
||||
}
|
||||
}
|
||||
close $fh;
|
||||
}
|
||||
if (want_test( $distname ) && ($force || !-f $testname)) {
|
||||
my $classname = $distname;
|
||||
$classname =~ s|-||g;
|
||||
my $modname = $distname;
|
||||
$modname =~ s|-|::|g;
|
||||
my $mark = $is_xs{$distname} ? q{ XS} : q{};
|
||||
my @indirect = (get_indirect_dependencies( $distname ));
|
||||
say qq{write ${testname}} unless $quiet;
|
||||
open my $fh, q{>}, $testname;
|
||||
say {$fh} qq{from tests.package.test_perl import TestPerlBase};
|
||||
say {$fh} qq{};
|
||||
say {$fh} qq{};
|
||||
say {$fh} qq{class TestPerl${classname}(TestPerlBase):};
|
||||
say {$fh} qq{ """};
|
||||
say {$fh} qq{ package:};
|
||||
say {$fh} qq{ ${distname}${mark}};
|
||||
say {$fh} qq{ direct dependencies:};
|
||||
foreach my $dep (sort @{$deps_runtime{$distname}}) {
|
||||
$mark = $is_xs{$dep} ? q{ XS} : q{};
|
||||
say {$fh} qq{ ${dep}${mark}};
|
||||
}
|
||||
if (scalar @indirect > 0) {
|
||||
say {$fh} qq{ indirect dependencies:};
|
||||
foreach my $dep (sort @indirect) {
|
||||
$mark = $is_xs{$dep} ? q{ XS} : q{};
|
||||
say {$fh} qq{ ${dep}${mark}};
|
||||
}
|
||||
}
|
||||
say {$fh} qq{ """};
|
||||
say {$fh} qq{};
|
||||
say {$fh} qq{ config = TestPerlBase.config + \\};
|
||||
say {$fh} qq{ """};
|
||||
say {$fh} qq{ BR2_PACKAGE_PERL=y};
|
||||
say {$fh} qq{ BR2_PACKAGE_${brname}=y};
|
||||
say {$fh} qq{ """};
|
||||
say {$fh} qq{};
|
||||
say {$fh} qq{ def test_run(self):};
|
||||
say {$fh} qq{ self.login()};
|
||||
say {$fh} qq{ self.module_test("${modname}")};
|
||||
close $fh;
|
||||
}
|
||||
}
|
||||
|
||||
my %pkg;
|
||||
my $cfgname = q{package/Config.in};
|
||||
if (-f $cfgname) {
|
||||
open my $fh, q{<}, $cfgname;
|
||||
while (<$fh>) {
|
||||
chomp;
|
||||
$pkg{$_} = 1 if m|package/perl-|;
|
||||
if ($new_pkgs) {
|
||||
my %pkg;
|
||||
my $cfgname = q{package/Config.in};
|
||||
if (-f $cfgname) {
|
||||
open my $fh, q{<}, $cfgname;
|
||||
while (<$fh>) {
|
||||
chomp;
|
||||
$pkg{$_} = 1 if m|package/perl-|;
|
||||
}
|
||||
close $fh;
|
||||
}
|
||||
close $fh;
|
||||
|
||||
foreach my $distname (keys %need_target) {
|
||||
my $fsname = fsname( $distname );
|
||||
$pkg{qq{\tsource "package/${fsname}/Config.in"}} = 1;
|
||||
}
|
||||
|
||||
say qq{${cfgname} must contain the following lines:};
|
||||
say join qq{\n}, sort keys %pkg;
|
||||
}
|
||||
|
||||
foreach my $distname (keys %need_target) {
|
||||
my $fsname = fsname( $distname );
|
||||
$pkg{qq{\tsource "package/${fsname}/Config.in"}} = 1;
|
||||
}
|
||||
|
||||
say qq{${cfgname} must contain the following lines:};
|
||||
say join qq{\n}, sort keys %pkg;
|
||||
say join qq{\n}, @info;
|
||||
|
||||
__END__
|
||||
|
||||
@@ -762,7 +881,7 @@ utils/scancpan Try-Tiny Moo
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
supports/scripts/scancpan [options] [distname ...]
|
||||
utils/scancpan [options] [distname ...]
|
||||
|
||||
Options:
|
||||
-help
|
||||
@@ -819,7 +938,6 @@ Perl/CPAN distributions required by the specified distnames. The
|
||||
dependencies and metadata are fetched from https://metacpan.org/.
|
||||
|
||||
After running this script, it is necessary to check the generated files.
|
||||
You have to manually add the license files (PERL_FOO_LICENSE_FILES variable).
|
||||
For distributions that link against a target library, you have to add the
|
||||
buildroot package name for that library to the DEPENDENCIES variable.
|
||||
|
||||
@@ -831,7 +949,7 @@ in order to work with the right CoreList data.
|
||||
|
||||
=head1 LICENSE
|
||||
|
||||
Copyright (C) 2013-2017 by Francois Perrad <francois.perrad@gadz.org>
|
||||
Copyright (C) 2013-2018 by Francois Perrad <francois.perrad@gadz.org>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@@ -852,9 +970,9 @@ This script is a part of Buildroot.
|
||||
This script requires the module C<MetaCPAN::API::Tiny> (version 1.131730)
|
||||
which was included at the beginning of this file by the tool C<fatpack>.
|
||||
|
||||
See L<http://search.cpan.org/~nperez/MetaCPAN-API-Tiny-1.131730/>.
|
||||
See L<https://metacpan.org/release/NPEREZ/MetaCPAN-API-Tiny-1.131730>.
|
||||
|
||||
See L<http://search.cpan.org/search?query=App-FatPacker&mode=dist>.
|
||||
See L<https://metacpan.org/release/App-FatPacker>.
|
||||
|
||||
These both libraries are free software and may be distributed under the same
|
||||
terms as perl itself.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python2
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
|
||||
Utility for building Buildroot packages for existing PyPI packages
|
||||
@@ -7,13 +7,12 @@ Any package built by scanpypi should be manually checked for
|
||||
errors.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
import argparse
|
||||
import json
|
||||
import urllib2
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import StringIO
|
||||
import tarfile
|
||||
import zipfile
|
||||
import errno
|
||||
@@ -23,6 +22,16 @@ import textwrap
|
||||
import tempfile
|
||||
import imp
|
||||
from functools import wraps
|
||||
import six.moves.urllib.request
|
||||
import six.moves.urllib.error
|
||||
import six.moves.urllib.parse
|
||||
from six.moves import map
|
||||
from six.moves import zip
|
||||
from six.moves import input
|
||||
if six.PY2:
|
||||
import StringIO
|
||||
else:
|
||||
import io
|
||||
|
||||
BUF_SIZE = 65536
|
||||
|
||||
@@ -88,6 +97,7 @@ def pkg_buildroot_name(pkg_name):
|
||||
pkg_name -- String to rename
|
||||
"""
|
||||
name = re.sub('[^\w-]', '', pkg_name.lower())
|
||||
name = name.replace('_', '-')
|
||||
prefix = 'python-'
|
||||
pattern = re.compile('^(?!' + prefix + ')(.+?)$')
|
||||
name = pattern.sub(r'python-\1', name)
|
||||
@@ -147,15 +157,15 @@ class BuildrootPackage():
|
||||
self.metadata_url = 'https://pypi.org/pypi/{pkg}/json'.format(
|
||||
pkg=self.real_name)
|
||||
try:
|
||||
pkg_json = urllib2.urlopen(self.metadata_url).read().decode()
|
||||
except urllib2.HTTPError as error:
|
||||
pkg_json = six.moves.urllib.request.urlopen(self.metadata_url).read().decode()
|
||||
except six.moves.urllib.error.HTTPError as error:
|
||||
print('ERROR:', error.getcode(), error.msg, file=sys.stderr)
|
||||
print('ERROR: Could not find package {pkg}.\n'
|
||||
'Check syntax inside the python package index:\n'
|
||||
'https://pypi.python.org/pypi/ '
|
||||
.format(pkg=self.real_name))
|
||||
raise
|
||||
except urllib2.URLError:
|
||||
except six.moves.urllib.error.URLError:
|
||||
print('ERROR: Could not find package {pkg}.\n'
|
||||
'Check syntax inside the python package index:\n'
|
||||
'https://pypi.python.org/pypi/ '
|
||||
@@ -169,6 +179,7 @@ class BuildrootPackage():
|
||||
"""
|
||||
Download a package using metadata from pypi
|
||||
"""
|
||||
download = None
|
||||
try:
|
||||
self.metadata['urls'][0]['filename']
|
||||
except IndexError:
|
||||
@@ -181,7 +192,7 @@ class BuildrootPackage():
|
||||
'digests': None}]
|
||||
# In this case, we can't get the name of the downloaded file
|
||||
# from the pypi api, so we need to find it, this should work
|
||||
urlpath = urllib2.urlparse.urlparse(
|
||||
urlpath = six.moves.urllib.parse.urlparse(
|
||||
self.metadata['info']['download_url']).path
|
||||
# urlparse().path give something like
|
||||
# /path/to/file-version.tar.gz
|
||||
@@ -192,9 +203,9 @@ class BuildrootPackage():
|
||||
continue
|
||||
try:
|
||||
print('Downloading package {pkg} from {url}...'.format(
|
||||
pkg=self.real_name, url=download_url['url']))
|
||||
download = urllib2.urlopen(download_url['url'])
|
||||
except urllib2.HTTPError as http_error:
|
||||
pkg=self.real_name, url=download_url['url']))
|
||||
download = six.moves.urllib.request.urlopen(download_url['url'])
|
||||
except six.moves.urllib.error.HTTPError as http_error:
|
||||
download = http_error
|
||||
else:
|
||||
self.used_url = download_url
|
||||
@@ -204,14 +215,33 @@ class BuildrootPackage():
|
||||
self.md5_sum = hashlib.md5(self.as_string).hexdigest()
|
||||
if self.md5_sum == download_url['digests']['md5']:
|
||||
break
|
||||
else:
|
||||
if download.__class__ == urllib2.HTTPError:
|
||||
raise download
|
||||
raise DownloadFailed('Failed to download package {pkg}'
|
||||
|
||||
if download is None:
|
||||
raise DownloadFailed('Failed to download package {pkg}: '
|
||||
'No source archive available'
|
||||
.format(pkg=self.real_name))
|
||||
elif download.__class__ == six.moves.urllib.error.HTTPError:
|
||||
raise download
|
||||
|
||||
self.filename = self.used_url['filename']
|
||||
self.url = self.used_url['url']
|
||||
|
||||
def check_archive(self, members):
|
||||
"""
|
||||
Check archive content before extracting
|
||||
|
||||
Keyword arguments:
|
||||
members -- list of archive members
|
||||
"""
|
||||
# Protect against https://github.com/snyk/zip-slip-vulnerability
|
||||
# Older python versions do not validate that the extracted files are
|
||||
# inside the target directory. Detect and error out on evil paths
|
||||
evil = [e for e in members if os.path.relpath(e).startswith(('/', '..'))]
|
||||
if evil:
|
||||
print('ERROR: Refusing to extract {} with suspicious members {}'.format(
|
||||
self.filename, evil))
|
||||
sys.exit(1)
|
||||
|
||||
def extract_package(self, tmp_path):
|
||||
"""
|
||||
Extract the package contents into a directrory
|
||||
@@ -219,7 +249,10 @@ class BuildrootPackage():
|
||||
Keyword arguments:
|
||||
tmp_path -- directory where you want the package to be extracted
|
||||
"""
|
||||
as_file = StringIO.StringIO(self.as_string)
|
||||
if six.PY2:
|
||||
as_file = StringIO.StringIO(self.as_string)
|
||||
else:
|
||||
as_file = io.BytesIO(self.as_string)
|
||||
if self.filename[-3:] == 'zip':
|
||||
with zipfile.ZipFile(as_file) as as_zipfile:
|
||||
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
|
||||
@@ -233,6 +266,7 @@ class BuildrootPackage():
|
||||
print('Removing {pkg}...'.format(pkg=tmp_pkg))
|
||||
shutil.rmtree(tmp_pkg)
|
||||
os.makedirs(tmp_pkg)
|
||||
self.check_archive(as_zipfile.namelist())
|
||||
as_zipfile.extractall(tmp_pkg)
|
||||
pkg_filename = self.filename.split(".zip")[0]
|
||||
else:
|
||||
@@ -248,6 +282,7 @@ class BuildrootPackage():
|
||||
print('Removing {pkg}...'.format(pkg=tmp_pkg))
|
||||
shutil.rmtree(tmp_pkg)
|
||||
os.makedirs(tmp_pkg)
|
||||
self.check_archive(as_tarfile.getnames())
|
||||
as_tarfile.extractall(tmp_pkg)
|
||||
pkg_filename = self.filename.split(".tar")[0]
|
||||
|
||||
@@ -265,6 +300,12 @@ class BuildrootPackage():
|
||||
sys.path.append(self.tmp_extract)
|
||||
s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract])
|
||||
setup = imp.load_module('setup', s_file, s_path, s_desc)
|
||||
if self.metadata_name in self.setup_args:
|
||||
pass
|
||||
elif self.metadata_name.replace('_', '-') in self.setup_args:
|
||||
self.metadata_name = self.metadata_name.replace('_', '-')
|
||||
elif self.metadata_name.replace('-', '_') in self.setup_args:
|
||||
self.metadata_name = self.metadata_name.replace('-', '_')
|
||||
try:
|
||||
self.setup_metadata = self.setup_args[self.metadata_name]
|
||||
except KeyError:
|
||||
@@ -304,8 +345,8 @@ class BuildrootPackage():
|
||||
if len(item) > 0 and item[0] != '#']
|
||||
|
||||
req_not_found = self.pkg_req
|
||||
self.pkg_req = map(pkg_buildroot_name, self.pkg_req)
|
||||
pkg_tuples = zip(req_not_found, self.pkg_req)
|
||||
self.pkg_req = list(map(pkg_buildroot_name, self.pkg_req))
|
||||
pkg_tuples = list(zip(req_not_found, self.pkg_req))
|
||||
# pkg_tuples is a list of tuples that looks like
|
||||
# ('werkzeug','python-werkzeug') because I need both when checking if
|
||||
# dependencies already exist or are already in the download list
|
||||
@@ -338,13 +379,14 @@ class BuildrootPackage():
|
||||
version=self.version)
|
||||
lines.append(version_line)
|
||||
|
||||
targz = self.filename.replace(
|
||||
self.version,
|
||||
'$({name}_VERSION)'.format(name=self.mk_name))
|
||||
targz_line = '{name}_SOURCE = {filename}\n'.format(
|
||||
name=self.mk_name,
|
||||
filename=targz)
|
||||
lines.append(targz_line)
|
||||
if self.buildroot_name != self.real_name:
|
||||
targz = self.filename.replace(
|
||||
self.version,
|
||||
'$({name}_VERSION)'.format(name=self.mk_name))
|
||||
targz_line = '{name}_SOURCE = {filename}\n'.format(
|
||||
name=self.mk_name,
|
||||
filename=targz)
|
||||
lines.append(targz_line)
|
||||
|
||||
if self.filename not in self.url:
|
||||
# Sometimes the filename is in the url, sometimes it's not
|
||||
@@ -413,8 +455,7 @@ class BuildrootPackage():
|
||||
classifiers_licenses = [regexp.sub(r"\1", lic)
|
||||
for lic in self.metadata['info']['classifiers']
|
||||
if regexp.match(lic)]
|
||||
licenses = map(lambda x: license_dict[x] if x in license_dict else x,
|
||||
classifiers_licenses)
|
||||
licenses = [license_dict[x] if x in license_dict else x for x in classifiers_licenses]
|
||||
if not len(licenses):
|
||||
print('WARNING: License has been set to "{license}". It is most'
|
||||
' likely wrong, please change it if need be'.format(
|
||||
@@ -428,8 +469,10 @@ class BuildrootPackage():
|
||||
for license_file in license_files:
|
||||
with open(license_file) as lic_file:
|
||||
match = liclookup.match(lic_file.read())
|
||||
if match.confidence >= 90.0:
|
||||
if match is not None and match.confidence >= 90.0:
|
||||
license_names.append(match.license.id)
|
||||
else:
|
||||
license_names.append("FIXME: license id couldn't be detected")
|
||||
|
||||
if len(license_names) > 0:
|
||||
license_line = ('{name}_LICENSE ='
|
||||
@@ -547,7 +590,7 @@ class BuildrootPackage():
|
||||
hash_line = '{method}\t{digest} {filename}\n'.format(
|
||||
method='sha256',
|
||||
digest=sha256.hexdigest(),
|
||||
filename=os.path.basename(license_file))
|
||||
filename=license_file.replace(self.tmp_extract, '')[1:])
|
||||
lines.append(hash_line)
|
||||
|
||||
with open(path_to_hash, 'w') as hash_file:
|
||||
@@ -574,7 +617,7 @@ class BuildrootPackage():
|
||||
|
||||
lines.append('\thelp\n')
|
||||
|
||||
help_lines = textwrap.wrap(self.metadata['info']['summary'],
|
||||
help_lines = textwrap.wrap(self.metadata['info']['summary'], 62,
|
||||
initial_indent='\t ',
|
||||
subsequent_indent='\t ')
|
||||
|
||||
@@ -585,7 +628,7 @@ class BuildrootPackage():
|
||||
# \t + two spaces is 3 char long
|
||||
help_lines.append('')
|
||||
help_lines.append('\t ' + self.metadata['info']['home_page'])
|
||||
help_lines = map(lambda x: x + '\n', help_lines)
|
||||
help_lines = [x + '\n' for x in help_lines]
|
||||
lines += help_lines
|
||||
|
||||
with open(path_to_config, 'w') as config_file:
|
||||
@@ -626,7 +669,7 @@ def main():
|
||||
print('Fetching package', package.real_name)
|
||||
try:
|
||||
package.fetch_package_info()
|
||||
except (urllib2.URLError, urllib2.HTTPError):
|
||||
except (six.moves.urllib.error.URLError, six.moves.urllib.error.HTTPError):
|
||||
continue
|
||||
if package.metadata_name.lower() == 'setuptools':
|
||||
# setuptools imports itself, that does not work very well
|
||||
@@ -636,7 +679,7 @@ def main():
|
||||
|
||||
try:
|
||||
package.download_package()
|
||||
except urllib2.HTTPError as error:
|
||||
except six.moves.urllib.error.HTTPError as error:
|
||||
print('Error: {code} {reason}'.format(code=error.code,
|
||||
reason=error.reason))
|
||||
print('Error downloading package :', package.buildroot_name)
|
||||
@@ -684,7 +727,7 @@ def main():
|
||||
continue
|
||||
print('Error: Package {name} already exists'
|
||||
.format(name=package.pkg_dir))
|
||||
del_pkg = raw_input(
|
||||
del_pkg = input(
|
||||
'Do you want to delete existing package ? [y/N]')
|
||||
if del_pkg.lower() == 'y':
|
||||
shutil.rmtree(package.pkg_dir)
|
||||
|
||||
@@ -24,14 +24,15 @@ import csv
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
|
||||
def read_file_size_csv(inputf, detail=None):
|
||||
"""Extract package or file sizes from CSV file into size dictionary"""
|
||||
sizes = {}
|
||||
reader = csv.reader(inputf)
|
||||
|
||||
header = next(reader)
|
||||
if (header[0] != 'File name' or header[1] != 'Package name' or
|
||||
header[2] != 'File size' or header[3] != 'Package size'):
|
||||
if header[0] != 'File name' or header[1] != 'Package name' or \
|
||||
header[2] != 'File size' or header[3] != 'Package size':
|
||||
print(("Input file %s does not contain the expected header. Are you "
|
||||
"sure this file corresponds to the file-size-stats.csv "
|
||||
"file created by 'make graph-size'?") % inputf.name)
|
||||
@@ -45,6 +46,7 @@ def read_file_size_csv(inputf, detail=None):
|
||||
|
||||
return sizes
|
||||
|
||||
|
||||
def compare_sizes(old, new):
|
||||
"""Return delta/added/removed dictionaries based on two input size
|
||||
dictionaries"""
|
||||
@@ -64,6 +66,7 @@ def compare_sizes(old, new):
|
||||
|
||||
return delta
|
||||
|
||||
|
||||
def print_results(result, threshold):
|
||||
"""Print the given result dictionary sorted by size, ignoring any entries
|
||||
below or equal to threshold"""
|
||||
|
||||
@@ -5,28 +5,37 @@ TOOLCHAINS_CSV='support/config-fragments/autobuild/toolchain-configs.csv'
|
||||
|
||||
main() {
|
||||
local o O opts
|
||||
local cfg dir pkg random toolchains_dir toolchain
|
||||
local cfg dir pkg random toolchains_dir toolchain all number mode
|
||||
local ret nb nb_skip nb_fail nb_legal nb_tc build_dir
|
||||
local -a toolchains
|
||||
|
||||
o='hc:d:p:r:t:'
|
||||
o='hac:d:n:p:r:t:'
|
||||
O='help,config-snippet:build-dir:package:,random:,toolchains-dir:'
|
||||
opts="$(getopt -n "${my_name}" -o "${o}" -l "${O}" -- "${@}")"
|
||||
eval set -- "${opts}"
|
||||
|
||||
random=0
|
||||
all=0
|
||||
number=0
|
||||
mode=0
|
||||
toolchains_csv="${TOOLCHAINS_CSV}"
|
||||
while [ ${#} -gt 0 ]; do
|
||||
case "${1}" in
|
||||
(-h|--help)
|
||||
help; exit 0
|
||||
;;
|
||||
(-a|--all)
|
||||
all=1; shift 1
|
||||
;;
|
||||
(-c|--config-snippet)
|
||||
cfg="${2}"; shift 2
|
||||
;;
|
||||
(-d|--build-dir)
|
||||
dir="${2}"; shift 2
|
||||
;;
|
||||
(-n|--number)
|
||||
number="${2}"; shift 2
|
||||
;;
|
||||
(-p|--package)
|
||||
pkg="${2}"; shift 2
|
||||
;;
|
||||
@@ -51,15 +60,37 @@ main() {
|
||||
dir="${HOME}/br-test-pkg"
|
||||
fi
|
||||
|
||||
if [ ${random} -gt 0 ]; then
|
||||
mode=$((mode+1))
|
||||
fi
|
||||
|
||||
if [ ${number} -gt 0 ]; then
|
||||
mode=$((mode+1))
|
||||
fi
|
||||
|
||||
if [ ${all} -eq 1 ]; then
|
||||
mode=$((mode+1))
|
||||
fi
|
||||
|
||||
# Default mode is to test the N first toolchains, which have been
|
||||
# chosen to be a good selection of toolchains.
|
||||
if [ ${mode} -eq 0 ] ; then
|
||||
number=6
|
||||
elif [ ${mode} -gt 1 ] ; then
|
||||
printf "error: --all, --number and --random are mutually exclusive\n" >&2; exit 1
|
||||
fi
|
||||
|
||||
# Extract the URLs of the toolchains; drop internal toolchains
|
||||
# E.g.: http://server/path/to/name.config,arch,libc
|
||||
# --> http://server/path/to/name.config
|
||||
toolchains=($(sed -r -e 's/,.*//; /internal/d; /^#/d; /^$/d;' "${toolchains_csv}" \
|
||||
|if [ ${random} -gt 0 ]; then \
|
||||
sort -R |head -n ${random}
|
||||
else
|
||||
cat
|
||||
fi |sort
|
||||
elif [ ${number} -gt 0 ]; then \
|
||||
head -n ${number}
|
||||
else
|
||||
sort
|
||||
fi
|
||||
)
|
||||
)
|
||||
|
||||
@@ -98,7 +129,7 @@ build_one() {
|
||||
|
||||
mkdir -p "${dir}"
|
||||
|
||||
support/kconfig/merge_config.sh -O "${dir}" \
|
||||
CONFIG_= support/kconfig/merge_config.sh -O "${dir}" \
|
||||
"${toolchainconfig}" "support/config-fragments/minimal.config" "${cfg}" \
|
||||
>> "${dir}/logfile" 2>&1
|
||||
# We want all the options from the snippet to be present as-is (set
|
||||
@@ -121,7 +152,7 @@ build_one() {
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
if ! make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
|
||||
if ! BR_FORCE_CHECK_DEPENDENCIES=YES make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
|
||||
return 2
|
||||
fi
|
||||
|
||||
@@ -154,6 +185,10 @@ toolchain config fragment and the required host architecture, separated by a
|
||||
comma. The config fragments should contain only the toolchain and architecture
|
||||
settings.
|
||||
|
||||
By default, a useful subset of toolchains is tested. If needed, all
|
||||
toolchains can be tested (-a), an arbitrary number of toolchains (-n
|
||||
in order, -r for random).
|
||||
|
||||
Options:
|
||||
|
||||
-h, --help
|
||||
@@ -170,9 +205,16 @@ Options:
|
||||
Test-build the package PKG, by running 'make PKG'; if not specified,
|
||||
just runs 'make'.
|
||||
|
||||
-a, --all
|
||||
Test all toolchains, instead of the default subset defined by
|
||||
Buildroot developers.
|
||||
|
||||
-n N, --number N
|
||||
Test N toolchains, in the order defined in the toolchain CSV
|
||||
file.
|
||||
|
||||
-r N, --random N
|
||||
Limit the tests to the N randomly selected toolchains, instead of
|
||||
building with all toolchains.
|
||||
Limit the tests to the N randomly selected toolchains.
|
||||
|
||||
-t CSVFILE, --toolchains-csv CSVFILE
|
||||
CSV file containing the paths to config fragments of toolchains to
|
||||
|
||||
Reference in New Issue
Block a user