Bump buidlroot version to 2018.02.6

This commit is contained in:
jbnadal
2018-10-22 14:55:59 +02:00
parent 222960cedb
commit bec94fdb63
6150 changed files with 84803 additions and 117446 deletions

View File

@@ -0,0 +1,10 @@
BR2_arm=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv5-ctng-linux-gnueabi.tar.xz"
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_PREFIX="armv5-ctng-linux-gnueabi"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_8=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_12=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,13 @@
BR2_arm=y
BR2_cortex_a9=y
BR2_ARM_ENABLE_NEON=y
BR2_ARM_ENABLE_VFP=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/armv7-ctng-linux-gnueabihf.tar.xz"
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_PREFIX="armv7-ctng-linux-gnueabihf"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_8=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_12=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,9 @@
BR2_aarch64=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-aarch64-glibc-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_4=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,3 @@
BR2_arcle=y
BR2_TOOLCHAIN_BUILDROOT_LOCALE=y
BR2_TOOLCHAIN_BUILDROOT_CXX=y

View File

@@ -0,0 +1,4 @@
BR2_arcle=y
BR2_archs38=y
BR2_TOOLCHAIN_BUILDROOT_GLIBC=y
BR2_TOOLCHAIN_BUILDROOT_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_arcle=y
BR2_archs38=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arcle-hs38-full-2017.11-rc2-15-g9d544fe.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_7=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_1=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,8 @@
BR2_arm=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arm-basic-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set

View File

@@ -0,0 +1,11 @@
BR2_arm=y
BR2_cortex_a9=y
BR2_ARM_ENABLE_VFP=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arm-cortex-a9-glibc-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_7=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_arm=y
BR2_cortex_a9=y
BR2_ARM_ENABLE_VFP=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arm-cortex-a9-musl-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_MUSL=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,12 @@
BR2_arm=y
BR2_cortex_m4=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arm-cortex-m4-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_NPTL is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_arm=y
BR2_arm1176jzf_s=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arm11-full-nothread-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_arm=y
BR2_STATIC_LIBS=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arm-full-static-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_10=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_arm=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-arm-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_9=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_10=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,5 @@
BR2_arm=y
BR2_ARM_EABI=y
BR2_TOOLCHAIN_BUILDROOT_LOCALE=y
BR2_TOOLCHAIN_BUILDROOT_WCHAR=y
BR2_TOOLCHAIN_BUILDROOT_CXX=y

View File

@@ -0,0 +1,4 @@
BR2_arm=y
BR2_cortex_a8=y
BR2_TOOLCHAIN_BUILDROOT_GLIBC=y
BR2_TOOLCHAIN_BUILDROOT_CXX=y

View File

@@ -0,0 +1,4 @@
BR2_arm=y
BR2_cortex_a8=y
BR2_TOOLCHAIN_BUILDROOT_MUSL=y
BR2_TOOLCHAIN_BUILDROOT_CXX=y

View File

@@ -0,0 +1,12 @@
BR2_bfin=y
BR2_bf512=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.net/toolchains/tarballs/br-bfin-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_NPTL is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_x86_pentium_mmx=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.net/toolchains/tarballs/br-i386-pentium-mmx-musl-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_12=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_MUSL=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_SSP is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_x86_pentium4=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-i386-pentium4-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_2=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_m68k=y
BR2_m68k_cf5208=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.net/toolchains/tarballs/br-m68k-5208-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_NPTL is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_m68k=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.net/toolchains/tarballs/br-m68k-68040-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_NPTL is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1 @@
BR2_microblazeel=y

View File

@@ -0,0 +1,10 @@
BR2_microblazeel=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-microblaze-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_1=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_NPTL is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_mipsel=y
BR2_mips_32r6=y
# BR2_MIPS_SOFT_FLOAT is not set
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-mips32r6-el-hf-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_mips64el=y
BR2_MIPS_NABI64=y
# BR2_MIPS_SOFT_FLOAT is not set
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-mips64-n64-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_5=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,12 @@
BR2_mips64el=y
BR2_mips_64r6=y
BR2_MIPS_NABI64=y
# BR2_MIPS_SOFT_FLOAT is not set
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-mips64r6-n64-el-hf-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_mipsel=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-mipsel-o32-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_nios2=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-nios2-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_SSP is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_or1k=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.net/toolchains/tarballs/br-openrisc-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_5=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_NPTL is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_powerpc=y
BR2_powerpc_603e=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-powerpc-603e-basic-cpp-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_powerpc=y
BR2_powerpc_e500mc=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-powerpc-e500mc-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,4 @@
BR2_powerpc=y
BR2_TOOLCHAIN_BUILDROOT_LOCALE=y
BR2_TOOLCHAIN_BUILDROOT_WCHAR=y
BR2_TOOLCHAIN_BUILDROOT_CXX=y

View File

@@ -0,0 +1,9 @@
BR2_powerpc64=y
BR2_powerpc_power7=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.net/toolchains/tarballs/br-powerpc64-power7-glibc-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,9 @@
BR2_powerpc64le=y
BR2_powerpc_power8=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.net/toolchains/tarballs/br-powerpc64le-power8-glibc-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_sh=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-sh4-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,9 @@
BR2_sparc=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-sparc-uclibc-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_1=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,8 @@
BR2_sparc64=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-sparc64-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_1=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_x86_64=y
BR2_x86_core2=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-x86-64-core2-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_4=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_x86_64=y
BR2_x86_atom=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-x86-64-musl-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_MUSL=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,3 @@
BR2_xtensa=y
BR2_TOOLCHAIN_BUILDROOT_LOCALE=y
BR2_TOOLCHAIN_BUILDROOT_CXX=y

View File

@@ -0,0 +1,9 @@
BR2_xtensa=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/br-xtensa-full-2017.11-rc1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_6=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_13=y
BR2_TOOLCHAIN_EXTERNAL_LOCALE=y
# BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG is not set
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,10 @@
BR2_x86_i686=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/i686-ctng-linux-gnu.tar.xz"
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_PREFIX="i686-ctng-linux-gnu"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_8=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_9=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,3 @@
BR2_aarch64=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_LINARO_AARCH64=y

View File

@@ -0,0 +1,5 @@
BR2_arm=y
BR2_cortex_a8=y
BR2_ARM_EABIHF=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_LINARO_ARM=y

View File

@@ -0,0 +1,9 @@
BR2_mips64el=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n32-linux-gnu.tar.xz"
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_PREFIX="mips64el-ctng_n32-linux-gnu"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_8=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_9=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_mips64el=y
BR2_MIPS_NABI64=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/mips64el-ctng_n64-linux-gnu.tar.xz"
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_PREFIX="mips64el-ctng_n64-linux-gnu"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_8=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_9=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,11 @@
BR2_powerpc=y
BR2_powerpc_8548=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/powerpc-ctng_e500v2-linux-gnuspe.tar.xz"
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_PREFIX="powerpc-ctng_e500v2-linux-gnuspe"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_7=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_12=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,4 @@
BR2_arm=y
BR2_arm920t=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_ARM=y

View File

@@ -0,0 +1,7 @@
BR2_arm=y
BR2_cortex_a8=y
BR2_ARM_EABI=y
BR2_ARM_INSTRUCTIONS_THUMB2=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_ARM=y
BR2_TARGET_OPTIMIZATION=""

View File

@@ -0,0 +1,3 @@
BR2_arm=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_ARM=y

View File

@@ -0,0 +1,4 @@
BR2_mips=y
BR2_mips_32r2=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_MIPS=y

View File

@@ -0,0 +1,5 @@
BR2_mips64el=y
BR2_mips_64r2=y
BR2_MIPS_NABI64=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_MIPS=y

View File

@@ -0,0 +1,3 @@
BR2_nios2=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_NIOSII=y

View File

@@ -0,0 +1,4 @@
BR2_x86_64=y
BR2_x86_steamroller=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CODESOURCERY_AMD64=y

View File

@@ -0,0 +1,55 @@
support/config-fragments/autobuild/armv5-ctng-linux-gnueabi.config,x86
support/config-fragments/autobuild/armv7-ctng-linux-gnueabihf.config,x86
support/config-fragments/autobuild/br-aarch64-glibc.config,x86_64
support/config-fragments/autobuild/br-arc-full-internal.config,any
support/config-fragments/autobuild/br-arc-internal-glibc.config,any
support/config-fragments/autobuild/br-arcle-hs38.config,x86_64
support/config-fragments/autobuild/br-arm-basic.config,x86_64
support/config-fragments/autobuild/br-arm-cortex-a9-glibc.config,x86_64
support/config-fragments/autobuild/br-arm-cortex-a9-musl.config,x86_64
support/config-fragments/autobuild/br-arm-cortex-m4-full.config,x86_64
support/config-fragments/autobuild/br-arm-full.config,x86_64
support/config-fragments/autobuild/br-arm-full-nothread.config,x86_64
support/config-fragments/autobuild/br-arm-full-static.config,x86_64
support/config-fragments/autobuild/br-arm-internal-full.config,any
support/config-fragments/autobuild/br-arm-internal-glibc.config,any
support/config-fragments/autobuild/br-arm-internal-musl.config,any
support/config-fragments/autobuild/br-bfin-full.config,x86_64
support/config-fragments/autobuild/br-i386-pentium4-full.config,x86_64
support/config-fragments/autobuild/br-i386-pentium-mmx-musl.config,x86_64
support/config-fragments/autobuild/br-m68k-5208-full.config,x86_64
support/config-fragments/autobuild/br-m68k-68040-full.config,x86_64
support/config-fragments/autobuild/br-microblazeel-full.config,x86_64
support/config-fragments/autobuild/br-microblazeel-full-internal.config,any
support/config-fragments/autobuild/br-mips64-n64-full.config,x86_64
support/config-fragments/autobuild/br-mips32r6-el-hf-glibc.config,x86_64
support/config-fragments/autobuild/br-mips64r6-el-hf-glibc.config,x86_64
support/config-fragments/autobuild/br-mipsel-o32-full.config,x86_64
support/config-fragments/autobuild/br-nios2-glibc.config,x86_64
support/config-fragments/autobuild/br-openrisc-uclibc.config,x86_64
support/config-fragments/autobuild/br-powerpc-603e-basic-cpp.config,x86_64
support/config-fragments/autobuild/br-powerpc64le-power8-glibc.config,x86_64
support/config-fragments/autobuild/br-powerpc64-power7-glibc.config,x86_64
support/config-fragments/autobuild/br-powerpc-e500mc-full.config,x86_64
support/config-fragments/autobuild/br-powerpc-internal-full.config,any
support/config-fragments/autobuild/br-sh4-full.config,x86_64
support/config-fragments/autobuild/br-sparc-uclibc.config,x86_64
support/config-fragments/autobuild/br-sparc64-glibc.config,x86_64
support/config-fragments/autobuild/br-x86-64-core2-full.config,x86_64
support/config-fragments/autobuild/br-x86-64-musl.config,x86_64
support/config-fragments/autobuild/br-xtensa-full.config,x86_64
support/config-fragments/autobuild/br-xtensa-full-internal.config,any
support/config-fragments/autobuild/i686-ctng-linux-gnu.config,x86
support/config-fragments/autobuild/linaro-aarch64.config,x86
support/config-fragments/autobuild/linaro-arm.config,x86
support/config-fragments/autobuild/mips64el-ctng_n32-linux-gnu.config,x86
support/config-fragments/autobuild/mips64el-ctng_n64-linux-gnu.config,x86
support/config-fragments/autobuild/powerpc-ctng_e500v2-linux-gnuspe.config,x86
support/config-fragments/autobuild/sourcery-arm-armv4t.config,x86
support/config-fragments/autobuild/sourcery-arm.config,x86
support/config-fragments/autobuild/sourcery-arm-thumb2.config,x86
support/config-fragments/autobuild/sourcery-mips64.config,x86
support/config-fragments/autobuild/sourcery-mips.config,x86
support/config-fragments/autobuild/sourcery-nios2.config,x86
support/config-fragments/autobuild/sourcery-x86-64.config,x86
support/config-fragments/autobuild/x86_64-ctng_locales-linux-gnu.config,x86
1 support/config-fragments/autobuild/armv5-ctng-linux-gnueabi.config x86
2 support/config-fragments/autobuild/armv7-ctng-linux-gnueabihf.config x86
3 support/config-fragments/autobuild/br-aarch64-glibc.config x86_64
4 support/config-fragments/autobuild/br-arc-full-internal.config any
5 support/config-fragments/autobuild/br-arc-internal-glibc.config any
6 support/config-fragments/autobuild/br-arcle-hs38.config x86_64
7 support/config-fragments/autobuild/br-arm-basic.config x86_64
8 support/config-fragments/autobuild/br-arm-cortex-a9-glibc.config x86_64
9 support/config-fragments/autobuild/br-arm-cortex-a9-musl.config x86_64
10 support/config-fragments/autobuild/br-arm-cortex-m4-full.config x86_64
11 support/config-fragments/autobuild/br-arm-full.config x86_64
12 support/config-fragments/autobuild/br-arm-full-nothread.config x86_64
13 support/config-fragments/autobuild/br-arm-full-static.config x86_64
14 support/config-fragments/autobuild/br-arm-internal-full.config any
15 support/config-fragments/autobuild/br-arm-internal-glibc.config any
16 support/config-fragments/autobuild/br-arm-internal-musl.config any
17 support/config-fragments/autobuild/br-bfin-full.config x86_64
18 support/config-fragments/autobuild/br-i386-pentium4-full.config x86_64
19 support/config-fragments/autobuild/br-i386-pentium-mmx-musl.config x86_64
20 support/config-fragments/autobuild/br-m68k-5208-full.config x86_64
21 support/config-fragments/autobuild/br-m68k-68040-full.config x86_64
22 support/config-fragments/autobuild/br-microblazeel-full.config x86_64
23 support/config-fragments/autobuild/br-microblazeel-full-internal.config any
24 support/config-fragments/autobuild/br-mips64-n64-full.config x86_64
25 support/config-fragments/autobuild/br-mips32r6-el-hf-glibc.config x86_64
26 support/config-fragments/autobuild/br-mips64r6-el-hf-glibc.config x86_64
27 support/config-fragments/autobuild/br-mipsel-o32-full.config x86_64
28 support/config-fragments/autobuild/br-nios2-glibc.config x86_64
29 support/config-fragments/autobuild/br-openrisc-uclibc.config x86_64
30 support/config-fragments/autobuild/br-powerpc-603e-basic-cpp.config x86_64
31 support/config-fragments/autobuild/br-powerpc64le-power8-glibc.config x86_64
32 support/config-fragments/autobuild/br-powerpc64-power7-glibc.config x86_64
33 support/config-fragments/autobuild/br-powerpc-e500mc-full.config x86_64
34 support/config-fragments/autobuild/br-powerpc-internal-full.config any
35 support/config-fragments/autobuild/br-sh4-full.config x86_64
36 support/config-fragments/autobuild/br-sparc-uclibc.config x86_64
37 support/config-fragments/autobuild/br-sparc64-glibc.config x86_64
38 support/config-fragments/autobuild/br-x86-64-core2-full.config x86_64
39 support/config-fragments/autobuild/br-x86-64-musl.config x86_64
40 support/config-fragments/autobuild/br-xtensa-full.config x86_64
41 support/config-fragments/autobuild/br-xtensa-full-internal.config any
42 support/config-fragments/autobuild/i686-ctng-linux-gnu.config x86
43 support/config-fragments/autobuild/linaro-aarch64.config x86
44 support/config-fragments/autobuild/linaro-arm.config x86
45 support/config-fragments/autobuild/mips64el-ctng_n32-linux-gnu.config x86
46 support/config-fragments/autobuild/mips64el-ctng_n64-linux-gnu.config x86
47 support/config-fragments/autobuild/powerpc-ctng_e500v2-linux-gnuspe.config x86
48 support/config-fragments/autobuild/sourcery-arm-armv4t.config x86
49 support/config-fragments/autobuild/sourcery-arm.config x86
50 support/config-fragments/autobuild/sourcery-arm-thumb2.config x86
51 support/config-fragments/autobuild/sourcery-mips64.config x86
52 support/config-fragments/autobuild/sourcery-mips.config x86
53 support/config-fragments/autobuild/sourcery-nios2.config x86
54 support/config-fragments/autobuild/sourcery-x86-64.config x86
55 support/config-fragments/autobuild/x86_64-ctng_locales-linux-gnu.config x86

View File

@@ -0,0 +1,11 @@
BR2_x86_64=y
BR2_x86_corei7=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://autobuild.buildroot.org/toolchains/tarballs/x86_64-ctng_locales-linux-gnu.tar.xz"
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_PREFIX="x86_64-ctng_locales-linux-gnu"
BR2_TOOLCHAIN_EXTERNAL_GCC_4_8=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_3_9=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y

View File

@@ -0,0 +1,8 @@
# This config fragment disables Buildroot options that are turned on by
# default, in order to arrive at minimal build time.
BR2_INIT_NONE=y
BR2_SYSTEM_BIN_SH_NONE=y
# BR2_PACKAGE_BUSYBOX is not set
# BR2_TARGET_ROOTFS_TAR is not set
BR2_COMPILER_PARANOID_UNSAFE_PATH=y
BR2_PACKAGE_BUSYBOX_SHOW_OTHERS=y

View File

@@ -10,9 +10,10 @@
#
BR2_CMAKE_VERSION_MIN = 3.1
BR2_CMAKE ?= cmake
ifeq ($(call suitable-host-package,cmake,\
$(BR2_CMAKE) $(BR2_CMAKE_VERSION_MIN)),)
BR2_CMAKE = $(HOST_DIR)/usr/bin/cmake
BR2_CMAKE_CANDIDATES ?= cmake cmake3
BR2_CMAKE ?= $(call suitable-host-package,cmake,\
$(BR2_CMAKE_VERSION_MIN) $(BR2_CMAKE_CANDIDATES))
ifeq ($(BR2_CMAKE),)
BR2_CMAKE = $(HOST_DIR)/bin/cmake
BR2_CMAKE_HOST_DEPENDENCY = host-cmake
endif

View File

@@ -1,47 +1,45 @@
#!/bin/sh
candidate="${1}"
version_min="${2}"
# prevent shift error
[ $# -lt 2 ] && exit 1
major_min="${version_min%.*}"
minor_min="${version_min#*.}"
major_min="${1%.*}"
minor_min="${1#*.}"
# cmake-3.7 incorrectly handles rpath, linking to host libraries
blacklist_version="3.7"
shift
cmake=`which ${candidate}`
if [ ! -x "${cmake}" ]; then
# echo nothing: no suitable cmake found
exit 1
fi
for candidate; do
# Extract version X.Y from versions in the form X.Y or X.Y.Z
# with X, Y and Z numbers with one or more digits each, e.g.
# 3.2 -> 3.2
# 3.2.3 -> 3.2
# 3.2.42 -> 3.2
# 3.10 -> 3.10
# 3.10.4 -> 3.10
# 3.10.42 -> 3.10
version="$(${cmake} --version \
|sed -r -e '/.* ([[:digit:]]+\.[[:digit:]]+).*$/!d;' \
-e 's//\1/'
)"
major="${version%.*}"
minor="${version#*.}"
# Try to locate the candidate. Discard it if not located.
cmake=`which "${candidate}" 2>/dev/null`
[ -n "${cmake}" ] || continue
if [ "${version}" = "${blacklist_version}" ]; then
# echo nothing: no suitable cmake found
exit 1
fi
# Extract version X.Y from versions in the form X.Y or X.Y.Z
# with X, Y and Z numbers with one or more digits each, e.g.
# 3.2 -> 3.2
# 3.2.3 -> 3.2
# 3.2.42 -> 3.2
# 3.10 -> 3.10
# 3.10.4 -> 3.10
# 3.10.42 -> 3.10
# Discard the candidate if no version can be obtained
version="$(${cmake} --version \
|sed -r -e '/.* ([[:digit:]]+\.[[:digit:]]+).*$/!d;' \
-e 's//\1/'
)"
[ -n "${version}" ] || continue
if [ ${major} -gt ${major_min} ]; then
echo "${cmake}"
else
if [ ${major} -eq ${major_min} -a ${minor} -ge ${minor_min} ]; then
major="${version%.*}"
minor="${version#*.}"
if [ ${major} -gt ${major_min} ]; then
echo "${cmake}"
else
# echo nothing: no suitable cmake found
exit 1
exit
elif [ ${major} -eq ${major_min} -a ${minor} -ge ${minor_min} ]; then
echo "${cmake}"
exit
fi
fi
done
# echo nothing: no suitable cmake found
exit 1

View File

@@ -1,5 +1,5 @@
ifeq (,$(call suitable-host-package,lzip,$(LZCAT)))
DEPENDENCIES_HOST_PREREQ += host-lzip
EXTRACTOR_DEPENDENCY_PRECHECKED_EXTENSIONS += .lz
LZCAT = $(HOST_DIR)/usr/bin/lzip -d -c
LZCAT = $(HOST_DIR)/bin/lzip -d -c
endif

View File

@@ -2,5 +2,5 @@ TAR ?= tar
ifeq (,$(call suitable-host-package,tar,$(TAR)))
DEPENDENCIES_HOST_PREREQ += host-tar
TAR = $(HOST_DIR)/usr/bin/tar
TAR = $(HOST_DIR)/bin/tar
endif

View File

@@ -26,10 +26,11 @@ if [ ! -z "${version_bsd}" ] ; then
minor=0
fi
# Minimal version = 1.17 (previous versions do not correctly unpack archives
# containing hard-links if the --strip-components option is used).
# Minimal version = 1.27 (previous versions do not correctly unpack archives
# containing hard-links if the --strip-components option is used or create
# different gnu long link headers for path elements > 100 characters).
major_min=1
minor_min=17
minor_min=27
# Maximal version = 1.29 (1.30 changed --numeric-owner output for
# filenames > 100 characters). This is really a fix for a bug in

View File

@@ -4,5 +4,5 @@
ifeq (,$(call suitable-host-package,xzcat,$(XZCAT)))
DEPENDENCIES_HOST_PREREQ += host-xz
EXTRACTOR_DEPENDENCY_PRECHECKED_EXTENSIONS += .xz .lzma
XZCAT = $(HOST_DIR)/usr/bin/xzcat
XZCAT = $(HOST_DIR)/bin/xzcat
endif

View File

@@ -22,12 +22,13 @@ DEPENDENCIES_HOST_PREREQ += host-ccache
endif
core-dependencies:
@HOSTCC="$(firstword $(HOSTCC))" MAKE="$(MAKE)" \
DL_TOOLS="$(sort $(DL_TOOLS_DEPENDENCIES))" \
@MAKE="$(MAKE)" DL_TOOLS="$(sort $(DL_TOOLS_DEPENDENCIES))" \
$(TOPDIR)/support/dependencies/dependencies.sh
core-dependencies $(DEPENDENCIES_HOST_PREREQ): HOSTCC=$(HOSTCC_NOCCACHE)
core-dependencies $(DEPENDENCIES_HOST_PREREQ): HOSTCXX=$(HOSTCXX_NOCCACHE)
$(DEPENDENCIES_HOST_PREREQ): HOSTCC=$(HOSTCC_NOCCACHE)
$(DEPENDENCIES_HOST_PREREQ): HOSTCXX=$(HOSTCXX_NOCCACHE)
$(DEPENDENCIES_HOST_PREREQ): core-dependencies
dependencies: core-dependencies $(DEPENDENCIES_HOST_PREREQ)
################################################################################

View File

@@ -11,39 +11,38 @@ if test $? != 0 ; then
exit 1
fi
# sanity check for CWD in LD_LIBRARY_PATH
# try not to rely on egrep..
if test -n "$LD_LIBRARY_PATH" ; then
echo TRiGGER_start"$LD_LIBRARY_PATH"TRiGGER_end | grep '::' >/dev/null 2>&1 ||
echo TRiGGER_start"$LD_LIBRARY_PATH"TRiGGER_end | grep ':\.:' >/dev/null 2>&1 ||
echo TRiGGER_start"$LD_LIBRARY_PATH"TRiGGER_end | grep 'TRiGGER_start:' >/dev/null 2>&1 ||
echo TRiGGER_start"$LD_LIBRARY_PATH"TRiGGER_end | grep 'TRiGGER_start\.:' >/dev/null 2>&1 ||
echo TRiGGER_start"$LD_LIBRARY_PATH"TRiGGER_end | grep ':TRiGGER_end' >/dev/null 2>&1 ||
echo TRiGGER_start"$LD_LIBRARY_PATH"TRiGGER_end | grep ':\.TRiGGER_end' >/dev/null 2>&1 ||
echo TRiGGER_start"$LD_LIBRARY_PATH"TRiGGER_end | grep 'TRiGGER_start\.TRiGGER_end' >/dev/null 2>&1
if test $? = 0; then
echo
echo "You seem to have the current working directory in your"
echo "LD_LIBRARY_PATH environment variable. This doesn't work."
exit 1;
fi
fi;
# Sanity check for CWD in LD_LIBRARY_PATH
case ":${LD_LIBRARY_PATH:-unset}:" in
(*::*|*:.:*)
echo
echo "You seem to have the current working directory in your"
echo "LD_LIBRARY_PATH environment variable. This doesn't work."
exit 1
;;
esac
# sanity check for CWD in PATH. Having the current working directory
# in the PATH makes the toolchain build process break.
# try not to rely on egrep..
if test -n "$PATH" ; then
echo TRiGGER_start"$PATH"TRiGGER_end | grep ':\.:' >/dev/null 2>&1 ||
echo TRiGGER_start"$PATH"TRiGGER_end | grep 'TRiGGER_start\.:' >/dev/null 2>&1 ||
echo TRiGGER_start"$PATH"TRiGGER_end | grep ':\.TRiGGER_end' >/dev/null 2>&1 ||
echo TRiGGER_start"$PATH"TRiGGER_end | grep 'TRiGGER_start\.TRiGGER_end' >/dev/null 2>&1
if test $? = 0; then
echo
echo "You seem to have the current working directory in your"
echo "PATH environment variable. This doesn't work."
exit 1;
fi
fi;
# Sanity check for CWD in PATH. Having the current working directory
# in the PATH makes various packages (e.g. toolchain, coreutils...)
# build process break.
# PATH should not contain a newline, otherwise it fails in spectacular
# ways as soon as PATH is referenced in a package rule
# An empty PATH is technically possible, but in practice we would not
# even arrive here if that was the case.
case ":${PATH:-unset}:" in
(*::*|*:.:*)
echo
echo "You seem to have the current working directory in your"
echo "PATH environment variable. This doesn't work."
exit 1
;;
(*"
"*) printf "\n"
# Break the '\n' sequence, or a \n is printed (which is not what we want).
printf "Your PATH contains a newline (%sn) character.\n" "\\"
printf "This doesn't work. Fix you PATH.\n"
exit 1
;;
esac
if test -n "$PERL_MM_OPT" ; then
echo
@@ -188,7 +187,7 @@ if grep ^BR2_NEEDS_HOST_UTF8_LOCALE=y $BR2_CONFIG > /dev/null; then
echo "You need locale support on your build machine to build a toolchain supporting locales"
exit 1 ;
fi
if ! locale -a | grep -q -i utf8$ ; then
if ! locale -a | grep -q -i -E 'utf-?8$' ; then
echo
echo "You need at least one UTF8 locale to build a toolchain supporting locales"
exit 1 ;
@@ -223,6 +222,8 @@ if grep -q ^BR2_HOSTARCH_NEEDS_IA32_LIBS=y $BR2_CONFIG ; then
echo "If you're running a Debian/Ubuntu distribution, install the libc6-i386,"
echo "lib32stdc++6, and lib32z1 packages (or alternatively libc6:i386,"
echo "libstdc++6:i386, and zlib1g:i386)."
echo "If you're running a RedHat/Fedora distribution, install the glibc.i686 and"
echo "zlib.i686 packages."
echo "For other distributions, refer to the documentation on how to install the 32 bits"
echo "compatibility libraries."
exit 1
@@ -237,6 +238,14 @@ if grep -q ^BR2_HOSTARCH_NEEDS_IA32_COMPILER=y $BR2_CONFIG ; then
echo "For other distributions, refer to their documentation."
exit 1
fi
if ! echo "int main(void) {}" | g++ -m32 -x c++ - -o /dev/null 2>/dev/null; then
echo
echo "Your Buildroot configuration needs a compiler capable of building 32 bits binaries."
echo "If you're running a Debian/Ubuntu distribution, install the g++-multilib package."
echo "For other distributions, refer to their documentation."
exit 1
fi
fi
# Check that the Perl installation is complete enough for Buildroot.
@@ -268,3 +277,8 @@ if [ -n "$missing_perl_modules" ] ; then
echo
exit 1
fi
if ! python -c "import argparse" > /dev/null 2>&1 ; then
echo "Your Python installation is not complete enough: argparse module is missing"
exit 1
fi

View File

@@ -0,0 +1,44 @@
# This Dockerfile generates the docker image that gets used by Gitlab CI
# To build it (YYYYMMDD.HHMM is the current date and time in UTC):
# sudo docker build -t buildroot/base:YYYYMMDD.HHMM support/docker
# sudo docker push buildroot/base:YYYYMMDD.HHMM
# We use a specific tag for the base image *and* the corresponding date
# for the repository., so do not forget to update the apt-sources.list
# file that is shipped next to this Dockerfile.
FROM debian:stretch-20171210
LABEL maintainer="Buildroot mailing list <buildroot@buildroot.org>" \
vendor="Buildroot" \
description="Container with everything needed to run Buildroot"
# Setup environment
ENV DEBIAN_FRONTEND noninteractive
# This repository can be a bit slow at times. Don't panic...
COPY apt-sources.list /etc/apt/sources.list
# The container has no package lists, so need to update first
RUN dpkg --add-architecture i386 && \
apt-get update -y && \
apt-get install -y --no-install-recommends \
build-essential cmake libc6:i386 g++-multilib \
bc ca-certificates file locales rsync \
cvs bzr git mercurial subversion wget \
cpio unzip \
libncurses5-dev \
python-nose2 python-pexpect qemu-system-arm qemu-system-x86 && \
apt-get -y autoremove && \
apt-get -y clean
# To be able to generate a toolchain with locales, enable one UTF-8 locale
RUN sed -i 's/# \(en_US.UTF-8\)/\1/' /etc/locale.gen && \
/usr/sbin/locale-gen
RUN useradd -ms /bin/bash br-user && \
chown -R br-user:br-user /home/br-user
USER br-user
WORKDIR /home/br-user
ENV HOME /home/br-user
ENV LC_ALL en_US.UTF-8

View File

@@ -0,0 +1,4 @@
# Latest just before 20171210T000000Z:
deb [check-valid-until=no] http://snapshot.debian.org/archive/debian/20171209T220346Z/ stretch main
deb [check-valid-until=no] http://snapshot.debian.org/archive/debian/20171209T220346Z/ stretch-updates main
deb [check-valid-until=no] http://snapshot.debian.org/archive/debian-security/20171209T224618Z/ stretch/updates main

View File

@@ -42,8 +42,9 @@ if [ ! -f "${h_file}" ]; then
fi
# Check one hash for a file
# $1: known hash
# $2: file (full path)
# $1: algo hash
# $2: known hash
# $3: file (full path)
check_one_hash() {
_h="${1}"
_known="${2}"

View File

@@ -85,8 +85,11 @@ if [ ${recurse} -eq 1 ]; then
_git submodule update --init --recursive
fi
# We do not need the .git dir; we keep other .git files, in case they
# We do not want the .git dir; we keep other .git files, in case they
# are the only files in their directory.
# The .git dir would generate non reproducible tarballs as it depends on
# the state of the remote server. It also would generate large tarballs
# (gigabytes for some linux trees) when a full clone took place.
rm -rf .git
popd >/dev/null

View File

@@ -33,6 +33,6 @@ _svn() {
eval ${SVN} "${@}"
}
_svn --non-interactive export ${verbose} "${@}" "'${repo}@${rev}'" "'${basename}'"
_svn export ${verbose} "${@}" "'${repo}@${rev}'" "'${basename}'"
tar czf "${output}" "${basename}"

View File

@@ -33,6 +33,7 @@ usage() {
echo " -n use allnoconfig instead of alldefconfig"
echo " -r list redundant entries when merging fragments"
echo " -O dir to put generated output files"
echo " -e colon-separated list of br2-external trees to use (optional)"
}
MAKE=true
@@ -71,6 +72,11 @@ while true; do
shift 2
continue
;;
"-e")
EXTERNAL_ARG="BR2_EXTERNAL=$2"
shift 2
continue
;;
*)
break
;;
@@ -131,7 +137,7 @@ fi
# Use the merged file as the starting point for:
# alldefconfig: Fills in any missing symbols with Kconfig default
# allnoconfig: Fills in any missing symbols with # CONFIG_* is not set
make KCONFIG_ALLCONFIG=$TMP_FILE $OUTPUT_ARG $ALLTARGET
make KCONFIG_ALLCONFIG=$TMP_FILE $EXTERNAL_ARG $OUTPUT_ARG $ALLTARGET
# Check all specified config values took (might have missed-dependency issues)

View File

@@ -0,0 +1,2 @@
# Locally computed
sha256 9755181e27175cb3510b4da8629caa406fb355a19aa8e7d55f06bf8ab33323c4 COPYING

View File

@@ -0,0 +1,7 @@
# Impersonate a Linux system. Afterall, that's what we are...
set(CMAKE_SYSTEM_NAME Linux)
include(Platform/Linux)
# Override problematic settings, to avoid RPATH against host lib directories.
set_property(GLOBAL PROPERTY FIND_LIBRARY_USE_LIB32_PATHS FALSE)
set_property(GLOBAL PROPERTY FIND_LIBRARY_USE_LIB64_PATHS FALSE)

View File

@@ -5,7 +5,7 @@
################################################################################
# Buildroot version to use
RELEASE='2017.02'
RELEASE='2018.02'
### Change here for more memory/cores ###
VM_MEMORY=2048

View File

@@ -0,0 +1,47 @@
#!/bin/sh
#
if [ "$#" -ne 0 ]; then
echo "Run this script to relocate the buildroot SDK at that location"
exit 1
fi
LOCFILE="share/buildroot/sdk-location"
FILEPATH="$(readlink -f "$0")"
NEWPATH="$(dirname "${FILEPATH}")"
cd "${NEWPATH}"
if [ ! -r "${LOCFILE}" ]; then
echo "Previous location of the buildroot SDK not found!"
exit 1
fi
OLDPATH="$(cat "${LOCFILE}")"
if [ "${NEWPATH}" = "${OLDPATH}" ]; then
echo "This buildroot SDK has already been relocated!"
exit 0
fi
# Check if the path substitution does work properly, e.g. a tree
# "/a/b/c" copied into "/a/b/c/a/b/c/" would not be allowed.
newpath="$(sed -e "s|${OLDPATH}|${NEWPATH}|g" "${LOCFILE}")"
if [ "${NEWPATH}" != "${newpath}" ]; then
echo "Something went wrong with substituting the path!"
echo "Please choose another location for your SDK!"
exit 1
fi
echo "Relocating the buildroot SDK from ${OLDPATH} to ${NEWPATH} ..."
# Make sure file uses the right language
export LC_ALL=C
# Replace the old path with the new one in all text files
grep -lr "${OLDPATH}" . | while read -r FILE ; do
if file -b --mime-type "${FILE}" | grep -q '^text/' && [ "${FILE}" != "${LOCFILE}" ]
then
sed -i "s|${OLDPATH}|${NEWPATH}|g" "${FILE}"
fi
done
# At the very end, we update the location file to not break the
# SDK if this script gets interruted.
sed -i "s|${OLDPATH}|${NEWPATH}|g" ${LOCFILE}

View File

@@ -25,5 +25,5 @@ can extract as root:
Those image files are created using the contents of the target/
directory, but there is a post-processing step to create device files
and set owernship/permissions properly even if Buildroot does not run
and set ownership/permissions properly even if Buildroot does not run
as root.

View File

@@ -4,13 +4,17 @@
#
# In order to allow the toolchain to be relocated, we calculate the
# HOST_DIR based on this file's location: $(HOST_DIR)/usr/share/buildroot
# HOST_DIR based on this file's location: $(HOST_DIR)/share/buildroot
# and store it in RELOCATED_HOST_DIR.
# All the other variables that need to refer to HOST_DIR will use the
# RELOCATED_HOST_DIR variable.
string(REPLACE "/usr/share/buildroot" "" RELOCATED_HOST_DIR ${CMAKE_CURRENT_LIST_DIR})
string(REPLACE "/share/buildroot" "" RELOCATED_HOST_DIR ${CMAKE_CURRENT_LIST_DIR})
set(CMAKE_SYSTEM_NAME Linux)
# Point cmake to the location where we have our custom modules,
# so that it can find our custom platform description.
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR})
set(CMAKE_SYSTEM_NAME Buildroot)
set(CMAKE_SYSTEM_PROCESSOR @@CMAKE_SYSTEM_PROCESSOR@@)
# Set the {C,CXX}FLAGS appended by CMake depending on the build type
@@ -44,7 +48,7 @@ set(CMAKE_EXE_LINKER_FLAGS "@@TARGET_LDFLAGS@@" CACHE STRING "Buildroot LDFLAGS
set(CMAKE_INSTALL_SO_NO_EXE 0)
set(CMAKE_PROGRAM_PATH "${RELOCATED_HOST_DIR}/usr/bin")
set(CMAKE_PROGRAM_PATH "${RELOCATED_HOST_DIR}/bin")
set(CMAKE_SYSROOT "${RELOCATED_HOST_DIR}/@@STAGING_SUBDIR@@")
set(CMAKE_FIND_ROOT_PATH "${RELOCATED_HOST_DIR}/@@STAGING_SUBDIR@@")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)

View File

@@ -113,6 +113,11 @@ function apply_patch {
echo " to be applied : ${path}/${patch}"
exit 1
fi
if grep -q "^rename from" ${path}/${patch} && \
grep -q "^rename to" ${path}/${patch} ; then
echo "Error: patch contains some renames, not supported by old patch versions"
exit 1
fi
echo "${path}/${patch}" >> ${builddir}/.applied_patches_list
${uncomp} "${path}/$patch" | patch -g0 -p1 -E -d "${builddir}" -t -N $silent
if [ $? != 0 ] ; then

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
set -e
# This script must be able to run with bash-3.1, so it can't use

View File

@@ -3,11 +3,12 @@
import sys
import subprocess
# Execute the "make <pkg>-show-version" command to get the version of a given
# list of packages, and return the version formatted as a Python dictionary.
def get_version(pkgs):
sys.stderr.write("Getting version for %s\n" % pkgs)
cmd = ["make", "-s", "--no-print-directory" ]
cmd = ["make", "-s", "--no-print-directory"]
for pkg in pkgs:
cmd.append("%s-show-version" % pkg)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
@@ -25,9 +26,10 @@ def get_version(pkgs):
version[pkg] = output[i]
return version
def _get_depends(pkgs, rule):
sys.stderr.write("Getting dependencies for %s\n" % pkgs)
cmd = ["make", "-s", "--no-print-directory" ]
cmd = ["make", "-s", "--no-print-directory"]
for pkg in pkgs:
cmd.append("%s-%s" % (pkg, rule))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
@@ -49,12 +51,14 @@ def _get_depends(pkgs, rule):
deps[pkg] = pkg_deps
return deps
# Execute the "make <pkg>-show-depends" command to get the list of
# dependencies of a given list of packages, and return the list of
# dependencies formatted as a Python dictionary.
def get_depends(pkgs):
return _get_depends(pkgs, 'show-depends')
# Execute the "make <pkg>-show-rdepends" command to get the list of
# reverse dependencies of a given list of packages, and return the
# list of dependencies formatted as a Python dictionary.

View File

@@ -0,0 +1,88 @@
#!/usr/bin/env bash
# List of hardcoded paths that should be ignored, as they may
# contain binaries for an architecture different from the
# architecture of the target.
declare -a IGNORES=(
# Skip firmware files, they could be ELF files for other
# architectures
"/lib/firmware"
"/usr/lib/firmware"
# Skip kernel modules
# When building a 32-bit userland on 64-bit architectures, the kernel
# and its modules may still be 64-bit. To keep the basic
# check-bin-arch logic simple, just skip this directory.
"/lib/modules"
"/usr/lib/modules"
# Skip files in /usr/share, several packages (qemu,
# pru-software-support) legitimately install ELF binaries that
# are not for the target architecture
"/usr/share"
)
while getopts p:l:r:a:i: OPT ; do
case "${OPT}" in
p) package="${OPTARG}";;
l) pkg_list="${OPTARG}";;
r) readelf="${OPTARG}";;
a) arch_name="${OPTARG}";;
i)
# Ensure we do have single '/' as separators,
# and that we have a leading and a trailing one.
pattern="$(sed -r -e 's:/+:/:g; s:^/*:/:; s:/*$:/:;' <<<"${OPTARG}")"
IGNORES+=("${pattern}")
;;
:) error "option '%s' expects a mandatory argument\n" "${OPTARG}";;
\?) error "unknown option '%s'\n" "${OPTARG}";;
esac
done
if test -z "${package}" -o -z "${pkg_list}" -o -z "${readelf}" -o -z "${arch_name}" ; then
echo "Usage: $0 -p <pkg> -l <pkg-file-list> -r <readelf> -a <arch name> [-i PATH ...]"
exit 1
fi
exitcode=0
# Only split on new lines, for filenames-with-spaces
IFS="
"
while read f; do
for ignore in "${IGNORES[@]}"; do
if [[ "${f}" =~ ^"${ignore}" ]]; then
continue 2
fi
done
# Skip symlinks. Some symlinks may have absolute paths as
# target, pointing to host binaries while we're building.
if [[ -L "${TARGET_DIR}/${f}" ]]; then
continue
fi
# Get architecture using readelf. We pipe through 'head -1' so
# that when the file is a static library (.a), we only take
# into account the architecture of the first object file.
arch=$(LC_ALL=C ${readelf} -h "${TARGET_DIR}/${f}" 2>&1 | \
sed -r -e '/^ Machine: +(.+)/!d; s//\1/;' | head -1)
# If no architecture found, assume it was not an ELF file
if test "${arch}" = "" ; then
continue
fi
# Architecture is correct
if test "${arch}" = "${arch_name}" ; then
continue
fi
printf 'ERROR: architecture for "%s" is "%s", should be "%s"\n' \
"${f}" "${arch}" "${arch_name}"
exitcode=1
done < <( sed -r -e "/^${package},\.(.+)$/!d; s//\1/;" ${pkg_list} )
exit ${exitcode}

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash
# This script scans $(HOST_DIR)/{bin,sbin} for all ELF files, and checks
# they have an RPATH to $(HOST_DIR)/usr/lib if they need libraries from
# they have an RPATH to $(HOST_DIR)/lib if they need libraries from
# there.
# Override the user's locale so we are sure we can parse the output of
@@ -18,6 +18,7 @@ main() {
ret=0
while read file; do
is_elf "${file}" || continue
elf_needs_rpath "${file}" "${hostdir}" || continue
check_elf_has_rpath "${file}" "${hostdir}" && continue
if [ ${ret} -eq 0 ]; then
@@ -26,21 +27,25 @@ main() {
printf "*** ERROR: package %s installs executables without proper RPATH:\n" "${pkg}"
fi
printf "*** %s\n" "${file}"
done < <( find "${hostdir}"/{,usr/}{bin,sbin} -type f -exec file {} + 2>/dev/null \
|sed -r -e '/^([^:]+):.*\<ELF\>.*\<executable\>.*/!d' \
-e 's//\1/' \
)
done < <( find "${hostdir}"/{bin,sbin} -type f 2>/dev/null )
return ${ret}
}
is_elf() {
local f="${1}"
readelf -l "${f}" 2>/dev/null \
|grep -E 'Requesting program interpreter:' >/dev/null 2>&1
}
elf_needs_rpath() {
local file="${1}"
local hostdir="${2}"
local lib
while read lib; do
[ -e "${hostdir}/usr/lib/${lib}" ] && return 0
[ -e "${hostdir}/lib/${lib}" ] && return 0
done < <( readelf -d "${file}" \
|sed -r -e '/^.* \(NEEDED\) .*Shared library: \[(.+)\]$/!d;' \
-e 's//\1/;' \
@@ -58,7 +63,7 @@ check_elf_has_rpath() {
for dir in ${rpath//:/ }; do
# Remove duplicate and trailing '/' for proper match
dir="$( sed -r -e 's:/+:/:g; s:/$::;' <<<"${dir}" )"
[ "${dir}" = "${hostdir}/usr/lib" ] && return 0
[ "${dir}" = "${hostdir}/lib" -o "${dir}" = "\$ORIGIN/../lib" ] && return 0
done
done < <( readelf -d "${file}" \
|sed -r -e '/.* \(R(UN)?PATH\) +Library r(un)?path: \[(.+)\]$/!d' \

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env python
import sys
import csv
import argparse
from collections import defaultdict
warn = 'Warning: {0} file "{1}" is touched by more than one package: {2}\n'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('packages_file_list', nargs='*',
help='The packages-file-list to check from')
parser.add_argument('-t', '--type', metavar="TYPE",
help='Report as a TYPE file (TYPE is either target, staging, or host)')
args = parser.parse_args()
if not len(args.packages_file_list) == 1:
sys.stderr.write('No packages-file-list was provided.\n')
return False
if args.type is None:
sys.stderr.write('No type was provided\n')
return False
file_to_pkg = defaultdict(list)
with open(args.packages_file_list[0], 'rb') as pkg_file_list:
for line in pkg_file_list.readlines():
pkg, _, file = line.rstrip(b'\n').partition(b',')
file_to_pkg[file].append(pkg)
for file in file_to_pkg:
if len(file_to_pkg[file]) > 1:
# If possible, try to decode the binary strings with
# the default user's locale
try:
sys.stderr.write(warn.format(args.type, file.decode(),
[p.decode() for p in file_to_pkg[file]]))
except UnicodeDecodeError:
# ... but fallback to just dumping them raw if they
# contain non-representable chars
sys.stderr.write(warn.format(args.type, file,
file_to_pkg[file]))
if __name__ == "__main__":
sys.exit(main())

View File

@@ -45,7 +45,7 @@ if test ! -d ${project_directory}/host ; then
exit 1
fi
if test ! -e ${project_directory}/host/usr/bin/${toolchain_prefix}gcc ; then
if test ! -e ${project_directory}/host/bin/${toolchain_prefix}gcc ; then
echo "Cannot find the cross-compiler in the project directory"
exit 1
fi

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# This is a script to find, and correct, a problem with old versions of
# configure that affect powerpc64 and powerpc64le.

View File

@@ -0,0 +1,147 @@
#!/usr/bin/env bash
# Copyright (C) 2016 Samuel Martin <s.martin49@gmail.com>
# Copyright (C) 2017 Wolfgang Grandegger <wg@grandegger.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
usage() {
cat <<EOF >&2
Usage: ${0} TREE_KIND
Description:
This script scans a tree and sanitize ELF files' RPATH found in there.
Sanitization behaves the same whatever the kind of the processed tree,
but the resulting RPATH differs. The rpath sanitization is done using
"patchelf --make-rpath-relative".
Arguments:
TREE_KIND Kind of tree to be processed.
Allowed values: host, target, staging
Environment:
PATCHELF patchelf program to use
(default: HOST_DIR/bin/patchelf)
HOST_DIR host directory
STAGING_DIR staging directory
TARGET_DIR target directory
TOOLCHAIN_EXTERNAL_DOWNLOAD_INSTALL_DIR
(default HOST_DIR/opt/ext-toolchain)
Returns: 0 if success or 1 in case of error
EOF
}
: ${PATCHELF:=${HOST_DIR}/bin/patchelf}
# ELF files should not be in these sub-directories
HOST_EXCLUDEPATHS="/share/terminfo"
STAGING_EXCLUDEPATHS="/usr/include /usr/share/terminfo"
TARGET_EXCLUDEPATHS="/lib/firmware"
main() {
local rootdir
local tree="${1}"
local find_args=( )
local sanitize_extra_args=( )
if ! "${PATCHELF}" --version > /dev/null 2>&1; then
echo "Error: can't execute patchelf utility '${PATCHELF}'"
exit 1
fi
case "${tree}" in
host)
rootdir="${HOST_DIR}"
# do not process the sysroot (only contains target binaries)
find_args+=( "-path" "${STAGING_DIR}" "-prune" "-o" )
# do not process the external toolchain installation directory to
# avoid breaking it.
test "${TOOLCHAIN_EXTERNAL_DOWNLOAD_INSTALL_DIR}" != "" && \
find_args+=( "-path" "${TOOLCHAIN_EXTERNAL_DOWNLOAD_INSTALL_DIR}" "-prune" "-o" )
for excludepath in ${HOST_EXCLUDEPATHS}; do
find_args+=( "-path" "${HOST_DIR}""${excludepath}" "-prune" "-o" )
done
# do not process the patchelf binary but a copy to work-around "file in use"
find_args+=( "-path" "${PATCHELF}" "-prune" "-o" )
cp "${PATCHELF}" "${PATCHELF}.__to_be_patched"
# we always want $ORIGIN-based rpaths to make it relocatable.
sanitize_extra_args+=( "--relative-to-file" )
;;
staging)
rootdir="${STAGING_DIR}"
# ELF files should not be in these sub-directories
for excludepath in ${STAGING_EXCLUDEPATHS}; do
find_args+=( "-path" "${STAGING_DIR}""${excludepath}" "-prune" "-o" )
done
# should be like for the target tree below
sanitize_extra_args+=( "--no-standard-lib-dirs" )
;;
target)
rootdir="${TARGET_DIR}"
for excludepath in ${TARGET_EXCLUDEPATHS}; do
find_args+=( "-path" "${TARGET_DIR}""${excludepath}" "-prune" "-o" )
done
# we don't want $ORIGIN-based rpaths but absolute paths without rootdir.
# we also want to remove rpaths pointing to /lib or /usr/lib.
sanitize_extra_args+=( "--no-standard-lib-dirs" )
;;
*)
usage
exit 1
;;
esac
find_args+=( "-type" "f" "-print" )
while read file ; do
# check if it's an ELF file
if ${PATCHELF} --print-rpath "${file}" > /dev/null 2>&1; then
# make files writable if necessary
changed=$(chmod -c u+w "${file}")
# call patchelf to sanitize the rpath
${PATCHELF} --make-rpath-relative "${rootdir}" ${sanitize_extra_args[@]} "${file}"
# restore the original permission
test "${changed}" != "" && chmod u-w "${file}"
fi
done < <(find "${rootdir}" ${find_args[@]})
# Restore patched patchelf utility
test "${tree}" = "host" && mv "${PATCHELF}.__to_be_patched" "${PATCHELF}"
# ignore errors
return 0
}
main ${@}

View File

@@ -0,0 +1,40 @@
#!/usr/bin/env bash
die() {
cat <<EOF >&2
Error: $@
Usage: ${0} -c GENIMAGE_CONFIG_FILE
EOF
exit 1
}
# Parse arguments and put into argument list of the script
opts="$(getopt -n "${0##*/}" -o c: -- "$@")" || exit $?
eval set -- "$opts"
GENIMAGE_TMP="${BUILD_DIR}/genimage.tmp"
while true ; do
case "$1" in
-c)
GENIMAGE_CFG="${2}";
shift 2 ;;
--) # Discard all non-option parameters
shift 1;
break ;;
*)
die "unknown option '${1}'" ;;
esac
done
[ -n "${GENIMAGE_CFG}" ] || die "Missing argument"
rm -rf "${GENIMAGE_TMP}"
genimage \
--rootpath "${TARGET_DIR}" \
--tmppath "${GENIMAGE_TMP}" \
--inputpath "${BINARIES_DIR}" \
--outputpath "${BINARIES_DIR}" \
--config "${GENIMAGE_CFG}"

View File

@@ -1,83 +0,0 @@
#!/usr/bin/env python
import argparse
import getdeveloperlib
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('patches', metavar='P', type=argparse.FileType('r'), nargs='*',
help='list of patches (use - to read patches from stdin)')
parser.add_argument('-a', dest='architecture', action='store',
help='find developers in charge of this architecture')
parser.add_argument('-p', dest='package', action='store',
help='find developers in charge of this package')
parser.add_argument('-c', dest='check', action='store_const',
const=True, help='list files not handled by any developer')
return parser.parse_args()
def __main__():
devs = getdeveloperlib.parse_developers()
if devs is None:
sys.exit(1)
args = parse_args()
# Check that only one action is given
action = 0
if args.architecture is not None:
action += 1
if args.package is not None:
action += 1
if args.check:
action += 1
if len(args.patches) != 0:
action += 1
if action > 1:
print("Cannot do more than one action")
return
if action == 0:
print("No action specified")
return
# Handle the check action
if args.check:
files = getdeveloperlib.check_developers(devs)
for f in files:
print(f)
# Handle the architecture action
if args.architecture is not None:
for dev in devs:
if args.architecture in dev.architectures:
print(dev.name)
return
# Handle the package action
if args.package is not None:
for dev in devs:
if args.package in dev.packages:
print(dev.name)
return
# Handle the patches action
if len(args.patches) != 0:
(files, infras) = getdeveloperlib.analyze_patches(args.patches)
matching_devs = set()
for dev in devs:
# See if we have developers matching by package name
for f in files:
if dev.hasfile(f):
matching_devs.add(dev.name)
# See if we have developers matching by package infra
for i in infras:
if i in dev.infras:
matching_devs.add(dev.name)
result = "--to buildroot@buildroot.org"
for dev in matching_devs:
result += " --cc \"%s\"" % dev
if result != "":
print("git send-email %s" % result)
__main__()

View File

@@ -1,200 +0,0 @@
import sys
import os
import re
import argparse
import glob
import subprocess
#
# Patch parsing functions
#
FIND_INFRA_IN_PATCH = re.compile("^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
def analyze_patch(patch):
"""Parse one patch and return the list of files modified, added or
removed by the patch."""
files = set()
infras = set()
for line in patch:
# If the patch is adding a package, find which infra it is
m = FIND_INFRA_IN_PATCH.match(line)
if m:
infras.add(m.group(2))
if not line.startswith("+++ "):
continue
line.strip()
fname = line[line.find("/") + 1 : ].strip()
if fname == "dev/null":
continue
files.add(fname)
return (files, infras)
FIND_INFRA_IN_MK = re.compile("^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
def fname_get_package_infra(fname):
"""Checks whether the file name passed as argument is a Buildroot .mk
file describing a package, and find the infrastructure it's using."""
if not fname.endswith(".mk"):
return None
if not os.path.exists(fname):
return None
with open(fname, "r") as f:
for l in f:
l = l.strip()
m = FIND_INFRA_IN_MK.match(l)
if m:
return m.group(2)
return None
def get_infras(files):
"""Search in the list of files for .mk files, and collect the package
infrastructures used by those .mk files."""
infras = set()
for fname in files:
infra = fname_get_package_infra(fname)
if infra:
infras.add(infra)
return infras
def analyze_patches(patches):
"""Parse a list of patches and returns the list of files modified,
added or removed by the patches, as well as the list of package
infrastructures used by those patches (if any)"""
allfiles = set()
allinfras = set()
for patch in patches:
(files, infras) = analyze_patch(patch)
allfiles = allfiles | files
allinfras = allinfras | infras
allinfras = allinfras | get_infras(allfiles)
return (allfiles, allinfras)
#
# DEVELOPERS file parsing functions
#
class Developer:
def __init__(self, name, files):
self.name = name
self.files = files
self.packages = parse_developer_packages(files)
self.architectures = parse_developer_architectures(files)
self.infras = parse_developer_infras(files)
def hasfile(self, f):
f = os.path.abspath(f)
for fs in self.files:
if f.startswith(fs):
return True
return False
def parse_developer_packages(fnames):
"""Given a list of file patterns, travel through the Buildroot source
tree to find which packages are implemented by those file
patterns, and return a list of those packages."""
packages = set()
for fname in fnames:
for root, dirs, files in os.walk(fname):
for f in files:
path = os.path.join(root, f)
if fname_get_package_infra(path):
pkg = os.path.splitext(f)[0]
packages.add(pkg)
return packages
def parse_arches_from_config_in(fname):
"""Given a path to an arch/Config.in.* file, parse it to get the list
of BR2_ARCH values for this architecture."""
arches = set()
with open(fname, "r") as f:
parsing_arches = False
for l in f:
l = l.strip()
if l == "config BR2_ARCH":
parsing_arches = True
continue
if parsing_arches:
m = re.match("^\s*default \"([^\"]*)\".*", l)
if m:
arches.add(m.group(1))
else:
parsing_arches = False
return arches
def parse_developer_architectures(fnames):
"""Given a list of file names, find the ones starting by
'arch/Config.in.', and use that to determine the architecture a
developer is working on."""
arches = set()
for fname in fnames:
if not re.match("^.*/arch/Config\.in\..*$", fname):
continue
arches = arches | parse_arches_from_config_in(fname)
return arches
def parse_developer_infras(fnames):
infras = set()
for fname in fnames:
m = re.match("^package/pkg-([^.]*).mk$", fname)
if m:
infras.add(m.group(1))
return infras
def parse_developers(basepath=None):
"""Parse the DEVELOPERS file and return a list of Developer objects."""
developers = []
linen = 0
if basepath == None:
basepath = os.getcwd()
with open(os.path.join(basepath, "DEVELOPERS"), "r") as f:
files = []
name = None
for l in f:
l = l.strip()
if l.startswith("#"):
continue
elif l.startswith("N:"):
if name is not None or len(files) != 0:
print("Syntax error in DEVELOPERS file, line %d" % linen)
name = l[2:].strip()
elif l.startswith("F:"):
fname = l[2:].strip()
dev_files = glob.glob(os.path.join(basepath, fname))
if len(dev_files) == 0:
print("WARNING: '%s' doesn't match any file" % fname)
files += dev_files
elif l == "":
if not name:
continue
developers.append(Developer(name, files))
files = []
name = None
else:
print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, l))
return None
linen += 1
# handle last developer
if name is not None:
developers.append(Developer(name, files))
return developers
def check_developers(developers, basepath=None):
"""Look at the list of files versioned in Buildroot, and returns the
list of files that are not handled by any developer"""
if basepath == None:
basepath = os.getcwd()
cmd = ["git", "--git-dir", os.path.join(basepath, ".git"), "ls-files"]
files = subprocess.check_output(cmd).strip().split("\n")
unhandled_files = []
for f in files:
handled = False
for d in developers:
if d.hasfile(os.path.join(basepath, f)):
handled = True
break
if not handled:
unhandled_files.append(f)
return unhandled_files

View File

@@ -64,14 +64,14 @@ except ImportError:
# Note: matplotlib.use() must be called *before* matplotlib.pyplot.
mpl.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import csv
import argparse
import matplotlib.pyplot as plt # noqa: E402
import matplotlib.font_manager as fm # noqa: E402
import csv # noqa: E402
import argparse # noqa: E402
steps = [ 'extract', 'patch', 'configure', 'build',
'install-target', 'install-staging', 'install-images',
'install-host']
steps = ['extract', 'patch', 'configure', 'build',
'install-target', 'install-staging', 'install-images',
'install-host']
default_colors = ['#e60004', '#009836', '#2e1d86', '#ffed00',
'#0068b5', '#f28e00', '#940084', '#97c000']
@@ -79,6 +79,7 @@ default_colors = ['#e60004', '#009836', '#2e1d86', '#ffed00',
alternate_colors = ['#00e0e0', '#3f7f7f', '#ff0000', '#00c000',
'#0080ff', '#c000ff', '#00eeee', '#e0e000']
class Package:
def __init__(self, name):
self.name = name
@@ -104,6 +105,7 @@ class Package:
return self.steps_duration[step]
return 0
# Generate an histogram of the time spent in each step of each
# package.
def pkg_histogram(data, output, order="build"):
@@ -132,10 +134,10 @@ def pkg_histogram(data, output, order="build"):
for i in range(0, len(vals)):
b = plt.bar(ind+0.1, vals[i], width=0.8, color=colors[i], bottom=bottom, linewidth=0.25)
legenditems.append(b[0])
bottom = [ bottom[j] + vals[i][j] for j in range(0, len(vals[i])) ]
bottom = [bottom[j] + vals[i][j] for j in range(0, len(vals[i]))]
# Draw the package names
plt.xticks(ind + .6, [ p.name for p in data ], rotation=-60, rotation_mode="anchor", fontsize=8, ha='left')
plt.xticks(ind + .6, [p.name for p in data], rotation=-60, rotation_mode="anchor", fontsize=8, ha='left')
# Adjust size of graph depending on the number of packages
# Ensure a minimal size twice as the default
@@ -172,6 +174,7 @@ def pkg_histogram(data, output, order="build"):
# Save graph
plt.savefig(output)
# Generate a pie chart with the time spent building each package.
def pkg_pie_time_per_package(data, output):
# Compute total build duration
@@ -184,7 +187,7 @@ def pkg_pie_time_per_package(data, output):
labels = []
values = []
other_value = 0
for p in data:
for p in sorted(data, key=lambda p: p.get_duration()):
if p.get_duration() < (total * 0.01):
other_value += p.get_duration()
else:
@@ -210,6 +213,7 @@ def pkg_pie_time_per_package(data, output):
plt.title('Build time per package')
plt.savefig(output)
# Generate a pie chart with a portion for the overall time spent in
# each step for all packages.
def pkg_pie_time_per_step(data, output):
@@ -236,6 +240,7 @@ def pkg_pie_time_per_step(data, output):
plt.title('Build time per step')
plt.savefig(output)
# Parses the csv file passed on standard input and returns a list of
# Package objects, filed with the duration of each step and the total
# duration of the package.
@@ -269,6 +274,7 @@ def read_data(input_file):
return pkgs
parser = argparse.ArgumentParser(description='Draw build time graphs')
parser.add_argument("--type", '-t', metavar="GRAPH_TYPE",
help="Type of graph (histogram, pie-packages, pie-steps)")

View File

@@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/env python
# Usage (the graphviz package must be installed in your distribution)
# ./support/scripts/graph-depends [-p package-name] > test.dot
@@ -30,7 +30,7 @@ import brpkgutil
# Modes of operation:
MODE_FULL = 1 # draw full dependency graph for all selected packages
MODE_PKG = 2 # draw dependency graph for a given package
MODE_PKG = 2 # draw dependency graph for a given package
mode = 0
# Limit drawing the dependency graph to this depth. 0 means 'no limit'.
@@ -49,18 +49,18 @@ parser.add_argument("--package", '-p', metavar="PACKAGE",
parser.add_argument("--depth", '-d', metavar="DEPTH", dest="depth", type=int, default=0,
help="Limit the dependency graph to DEPTH levels; 0 means no limit.")
parser.add_argument("--stop-on", "-s", metavar="PACKAGE", dest="stop_list", action="append",
help="Do not graph past this package (can be given multiple times)." \
+ " Can be a package name or a glob, " \
+ " 'virtual' to stop on virtual packages, or " \
+ "'host' to stop on host packages.")
help="Do not graph past this package (can be given multiple times)." +
" Can be a package name or a glob, " +
" 'virtual' to stop on virtual packages, or " +
"'host' to stop on host packages.")
parser.add_argument("--exclude", "-x", metavar="PACKAGE", dest="exclude_list", action="append",
help="Like --stop-on, but do not add PACKAGE to the graph.")
parser.add_argument("--colours", "-c", metavar="COLOR_LIST", dest="colours",
default="lightblue,grey,gainsboro",
help="Comma-separated list of the three colours to use" \
+ " to draw the top-level package, the target" \
+ " packages, and the host packages, in this order." \
+ " Defaults to: 'lightblue,grey,gainsboro'")
help="Comma-separated list of the three colours to use" +
" to draw the top-level package, the target" +
" packages, and the host packages, in this order." +
" Defaults to: 'lightblue,grey,gainsboro'")
parser.add_argument("--transitive", dest="transitive", action='store_true',
default=False)
parser.add_argument("--no-transitive", dest="transitive", action='store_false',
@@ -114,7 +114,7 @@ else:
# Get the colours: we need exactly three colours,
# so no need not split more than 4
# We'll let 'dot' validate the colours...
colours = args.colours.split(',',4)
colours = args.colours.split(',', 4)
if len(colours) != 3:
sys.stderr.write("Error: incorrect colour list '%s'\n" % args.colours)
sys.exit(1)
@@ -124,6 +124,7 @@ host_colour = colours[2]
allpkgs = []
# Execute the "make show-targets" command to get the list of the main
# Buildroot PACKAGES and return it formatted as a Python list. This
# list is used as the starting point for full dependency graphs
@@ -138,6 +139,7 @@ def get_targets():
return []
return output.split(' ')
# Recursive function that builds the tree of dependencies for a given
# list of packages. The dependencies are built in a list called
# 'dependencies', which contains tuples of the form (pkg1 ->
@@ -179,10 +181,12 @@ def get_all_depends(pkgs):
return dependencies
# The Graphviz "dot" utility doesn't like dashes in node names. So for
# node names, we strip all dashes.
def pkg_node_name(pkg):
return pkg.replace("-","")
return pkg.replace("-", "")
TARGET_EXCEPTIONS = [
"target-finalize",
@@ -225,35 +229,39 @@ for dep in dependencies:
# sub-dicts is "pkg2".
is_dep_cache = {}
def is_dep_cache_insert(pkg, pkg2, val):
try:
is_dep_cache[pkg].update({pkg2: val})
except KeyError:
is_dep_cache[pkg] = {pkg2: val}
# Retrieves from the cache whether pkg2 is a transitive dependency
# of pkg.
# Note: raises a KeyError exception if the dependency is not known.
def is_dep_cache_lookup(pkg, pkg2):
return is_dep_cache[pkg][pkg2]
# This function return True if pkg is a dependency (direct or
# transitive) of pkg2, dependencies being listed in the deps
# dictionary. Returns False otherwise.
# This is the un-cached version.
def is_dep_uncached(pkg,pkg2,deps):
def is_dep_uncached(pkg, pkg2, deps):
try:
for p in deps[pkg2]:
if pkg == p:
return True
if is_dep(pkg,p,deps):
if is_dep(pkg, p, deps):
return True
except KeyError:
pass
return False
# See is_dep_uncached() above; this is the cached version.
def is_dep(pkg,pkg2,deps):
def is_dep(pkg, pkg2, deps):
try:
return is_dep_cache_lookup(pkg, pkg2)
except KeyError:
@@ -261,6 +269,7 @@ def is_dep(pkg,pkg2,deps):
is_dep_cache_insert(pkg, pkg2, val)
return val
# This function eliminates transitive dependencies; for example, given
# these dependency chain: A->{B,C} and B->{C}, the A->{C} dependency is
# already covered by B->{C}, so C is a transitive dependency of A, via B.
@@ -269,30 +278,32 @@ def is_dep(pkg,pkg2,deps):
# - if d[i] is a dependency of any of the other dependencies d[j]
# - do not keep d[i]
# - otherwise keep d[i]
def remove_transitive_deps(pkg,deps):
def remove_transitive_deps(pkg, deps):
d = deps[pkg]
new_d = []
for i in range(len(d)):
keep_me = True
for j in range(len(d)):
if j==i:
if j == i:
continue
if is_dep(d[i],d[j],deps):
if is_dep(d[i], d[j], deps):
keep_me = False
if keep_me:
new_d.append(d[i])
return new_d
# This function removes the dependency on some 'mandatory' package, like the
# 'toolchain' package, or the 'skeleton' package
def remove_mandatory_deps(pkg,deps):
def remove_mandatory_deps(pkg, deps):
return [p for p in deps[pkg] if p not in ['toolchain', 'skeleton']]
# This function will check that there is no loop in the dependency chain
# As a side effect, it builds up the dependency cache.
def check_circular_deps(deps):
def recurse(pkg):
if not pkg in list(deps.keys()):
if pkg not in list(deps.keys()):
return
if pkg in not_loop:
return
@@ -314,24 +325,27 @@ def check_circular_deps(deps):
for pkg in list(deps.keys()):
recurse(pkg)
# This functions trims down the dependency list of all packages.
# It applies in sequence all the dependency-elimination methods.
def remove_extra_deps(deps):
for pkg in list(deps.keys()):
if not pkg == 'all':
deps[pkg] = remove_mandatory_deps(pkg,deps)
deps[pkg] = remove_mandatory_deps(pkg, deps)
for pkg in list(deps.keys()):
if not transitive or pkg == 'all':
deps[pkg] = remove_transitive_deps(pkg,deps)
deps[pkg] = remove_transitive_deps(pkg, deps)
return deps
check_circular_deps(dict_deps)
if check_only:
sys.exit(0)
dict_deps = remove_extra_deps(dict_deps)
dict_version = brpkgutil.get_version([pkg for pkg in allpkgs
if pkg != "all" and not pkg.startswith("root")])
if pkg != "all" and not pkg.startswith("root")])
# Print the attributes of a node: label and fill-color
def print_attrs(pkg):
@@ -344,8 +358,8 @@ def print_attrs(pkg):
color = root_colour
else:
if pkg.startswith('host') \
or pkg.startswith('toolchain') \
or pkg.startswith('rootfs'):
or pkg.startswith('toolchain') \
or pkg.startswith('rootfs'):
color = host_colour
else:
color = target_colour
@@ -356,6 +370,7 @@ def print_attrs(pkg):
outfile.write("%s [label = \"%s\"]\n" % (name, label))
outfile.write("%s [color=%s,style=filled]\n" % (name, color))
# Print the dependency graph of a package
def print_pkg_deps(depth, pkg):
if pkg in done_deps:
@@ -381,12 +396,13 @@ def print_pkg_deps(depth, pkg):
continue
add = True
for p in exclude_list:
if fnmatch(d,p):
if fnmatch(d, p):
add = False
break
if add:
outfile.write("%s -> %s [dir=%s]\n" % (pkg_node_name(pkg), pkg_node_name(d), arrow_dir))
print_pkg_deps(depth+1, d)
print_pkg_deps(depth + 1, d)
# Start printing the graph data
outfile.write("digraph G {\n")

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Try to hardlink a file into a directory, fallback to copy on failure.
#

View File

@@ -75,6 +75,14 @@ get_gid() {
'$1 == group { printf( "%d\n", $3 ); }' "${GROUP}"
}
#----------------------------------------------------------------------------
get_members() {
local group="${1}"
awk -F: -v group="${group}" \
'$1 == group { printf( "%s\n", $4 ); }' "${GROUP}"
}
#----------------------------------------------------------------------------
get_username() {
local uid="${1}"
@@ -211,16 +219,17 @@ generate_gid() {
add_one_group() {
local group="${1}"
local gid="${2}"
local _f
local members
# Generate a new GID if needed
if [ ${gid} -eq -1 ]; then
gid="$( generate_gid "${group}" )"
fi
members=$(get_members "$group")
# Remove any previous instance of this group, and re-add the new one
sed -i --follow-symlinks -e '/^'"${group}"':.*/d;' "${GROUP}"
printf "%s:x:%d:\n" "${group}" "${gid}" >>"${GROUP}"
printf "%s:x:%d:%s\n" "${group}" "${gid}" "${members}" >>"${GROUP}"
# Ditto for /etc/gshadow if it exists
if [ -f "${GSHADOW}" ]; then

View File

@@ -26,6 +26,7 @@
#
echo "<head>
<script src=\"https://www.kryogenix.org/code/browser/sorttable/sorttable.js\"></script>
<style type=\"text/css\">
table {
width: 100%;
@@ -59,7 +60,9 @@ td.lotsofpatches {
<a href=\"#results\">Results</a><br/>
<table>
<p id=\"sortable_hint\"></p>
<table class=\"sortable\">
<tr>
<td>Id</td>
<td>Package</td>
@@ -68,6 +71,7 @@ td.lotsofpatches {
<td class=\"centered\">License</td>
<td class=\"centered\">License files</td>
<td class=\"centered\">Hash file</td>
<td class=\"centered\">Warnings</td>
</tr>
"
@@ -80,6 +84,7 @@ python_packages=0
rebar_packages=0
virtual_packages=0
generic_packages=0
waf_packages=0
manual_packages=0
packages_with_licence=0
packages_without_licence=0
@@ -90,7 +95,7 @@ packages_without_hash_file=0
total_patch_count=0
cnt=0
for i in $(find boot/ linux/ package/ -name '*.mk' | sort) ; do
for i in $(find boot/ linux/ package/ toolchain/ -name '*.mk' | sort) ; do
if test \
$i = "boot/common.mk" -o \
@@ -118,7 +123,15 @@ for i in $(find boot/ linux/ package/ -name '*.mk' | sort) ; do
$i = "package/pkg-virtual.mk" -o \
$i = "package/pkg-download.mk" -o \
$i = "package/pkg-generic.mk" -o \
$i = "package/pkg-utils.mk" ; then
$i = "package/pkg-waf.mk" -o \
$i = "package/pkg-kernel-module.mk" -o \
$i = "package/pkg-utils.mk" -o \
$i = "package/nvidia-tegra23/nvidia-tegra23.mk" -o \
$i = "toolchain/toolchain-external/pkg-toolchain-external.mk" -o \
$i = "toolchain/toolchain-external/toolchain-external.mk" -o \
$i = "toolchain/toolchain.mk" -o \
$i = "toolchain/helpers.mk" -o \
$i = "toolchain/toolchain-wrapper.mk" ; then
echo "skipping $i" 1>&2
continue
fi
@@ -215,6 +228,16 @@ for i in $(find boot/ linux/ package/ -name '*.mk' | sort) ; do
hastarget=1
fi
if grep -E "\(toolchain-external-package\)" $i > /dev/null ; then
infratype="toolchain-external"
hastarget=1
fi
if grep -E "\(waf-package\)" $i > /dev/null ; then
infratype="waf"
hastarget=1
fi
pkg=$(basename $i)
dir=$(dirname $i)
pkg=${pkg%.mk}
@@ -243,6 +266,8 @@ for i in $(find boot/ linux/ package/ -name '*.mk' | sort) ; do
virtual_packages=$(($virtual_packages+1))
elif [ ${infratype} = "generic" ]; then
generic_packages=$(($generic_packages+1))
elif [ ${infratype} = "waf" ]; then
waf_packages=$(($waf_packages+1))
fi
if grep -qE "^${pkgvariable}_LICENSE[ ]*=" $i ; then
@@ -323,6 +348,14 @@ for i in $(find boot/ linux/ package/ -name '*.mk' | sort) ; do
echo "<td class=\"centered correct\">Yes</td>"
fi
file_list=$(find ${package_dir} -name '*.mk' -o -name '*.in*' -o -name '*.hash')
nwarnings=$(./utils/check-package ${file_list} 2>&1 | sed '/\([0-9]*\) warnings generated/!d; s//\1/')
if [ ${nwarnings} -eq 0 ] ; then
echo "<td class=\"centered correct\">${nwarnings}</td>"
else
echo "<td class=\"centered wrong\">${nwarnings}</td>"
fi
echo "</tr>"
done
@@ -367,6 +400,10 @@ echo "<td>Packages using the <i>virtual</i> infrastructure</td>"
echo "<td>$virtual_packages</td>"
echo "</tr>"
echo "<tr>"
echo "<td>Packages using the <i>waf</i> infrastructure</td>"
echo "<td>$waf_packages</td>"
echo "</tr>"
echo "<tr>"
echo "<td>Packages not using any infrastructure</td>"
echo "<td>$manual_packages</td>"
echo "</tr>"
@@ -407,4 +444,12 @@ echo "</table>"
echo "<hr/>"
echo "<i>Updated on $(LANG=C date), Git commit $(git log master -n 1 --pretty=format:%H)</i>"
echo "</body>"
echo "<script>
if (typeof sorttable === \"object\") {
document.getElementById(\"sortable_hint\").innerHTML =
\"hint: the table can be sorted by clicking the column headers\"
}
</script>
"
echo "</html>"

View File

@@ -1,24 +1,69 @@
#!/usr/bin/env python
# Wrapper for python2 and python3 around compileall to raise exception
# when a python byte code generation failed.
#
# Inspired from:
# http://stackoverflow.com/questions/615632/how-to-detect-errors-from-compileall-compile-dir
'''Wrapper for python2 and python3 around compileall to raise exception
when a python byte code generation failed.
Inspired from:
http://stackoverflow.com/questions/615632/how-to-detect-errors-from-compileall-compile-dir
'''
from __future__ import print_function
import sys
import py_compile
import compileall
import argparse
class ReportProblem:
def __nonzero__(self):
type, value, traceback = sys.exc_info()
if type is not None and issubclass(type, py_compile.PyCompileError):
print("Cannot compile %s" %value.file)
def check_for_errors(comparison):
'''Wrap comparison operator with code checking for PyCompileError.
If PyCompileError was raised, re-raise it again to abort execution,
otherwise perform comparison as expected.
'''
def operator(self, other):
exc_type, value, traceback = sys.exc_info()
if exc_type is not None and issubclass(exc_type,
py_compile.PyCompileError):
print("Cannot compile %s" % value.file)
raise value
return 1
report_problem = ReportProblem()
return comparison(self, other)
compileall.compile_dir(sys.argv[1], quiet=report_problem)
return operator
class ReportProblem(int):
'''Class that pretends to be an int() object but implements all of its
comparison operators such that it'd detect being called in
PyCompileError handling context and abort execution
'''
VALUE = 1
def __new__(cls, *args, **kwargs):
return int.__new__(cls, ReportProblem.VALUE, **kwargs)
@check_for_errors
def __lt__(self, other):
return ReportProblem.VALUE < other
@check_for_errors
def __eq__(self, other):
return ReportProblem.VALUE == other
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return not self < other and not self == other
def __ne__(self, other):
return not self == other
parser = argparse.ArgumentParser(description='Compile Python source files in a directory tree.')
parser.add_argument("target", metavar='DIRECTORY',
help='Directory to scan')
parser.add_argument("--force", action='store_true',
help="Force compilation even if alread compiled")
args = parser.parse_args()
compileall.compile_dir(args.target, force=args.force, quiet=ReportProblem())

View File

@@ -1,861 +0,0 @@
#!/usr/bin/env perl
# This chunk of stuff was generated by App::FatPacker. To find the original
# file's code, look for the end of this BEGIN block or the string 'FATPACK'
BEGIN {
my %fatpacked;
$fatpacked{"MetaCPAN/API/Tiny.pm"} = <<'METACPAN_API_TINY';
package MetaCPAN::API::Tiny;
{
$MetaCPAN::API::Tiny::VERSION = '1.131730';
}
use strict;
use warnings;
# ABSTRACT: A Tiny API client for MetaCPAN
use Carp;
use JSON::PP 'encode_json', 'decode_json';
use HTTP::Tiny;
sub new {
my ($class, @args) = @_;
$#_ % 2 == 0
or croak 'Arguments must be provided as name/value pairs';
my %params = @args;
die 'ua_args must be an array reference'
if $params{ua_args} && ref($params{ua_args}) ne 'ARRAY';
my $self = +{
base_url => $params{base_url} || 'http://api.metacpan.org/v0',
ua => $params{ua} || HTTP::Tiny->new(
$params{ua_args}
? @{$params{ua_args}}
: (agent => 'MetaCPAN::API::Tiny/'
. ($MetaCPAN::API::VERSION || 'xx'))),
};
return bless($self, $class);
}
sub _build_extra_params {
my $self = shift;
@_ % 2 == 0
or croak 'Incorrect number of params, must be key/value';
my %extra = @_;
my $ua = $self->{ua};
foreach my $key (keys %extra)
{
# The implementation in HTTP::Tiny uses + instead of %20, fix that
$extra{$key} = $ua->_uri_escape($extra{$key});
$extra{$key} =~ s/\+/%20/g;
}
my $params = join '&', map { "$_=" . $extra{$_} } sort keys %extra;
return $params;
}
# /source/{author}/{release}/{path}
sub source {
my $self = shift;
my %opts = @_ ? @_ : ();
my $url = '';
my $error = "Provide 'author' and 'release' and 'path'";
%opts or croak $error;
if (
defined ( my $author = $opts{'author'} ) &&
defined ( my $release = $opts{'release'} ) &&
defined ( my $path = $opts{'path'} )
) {
$url = "source/$author/$release/$path";
} else {
croak $error;
}
$url = $self->{base_url} . "/$url";
my $result = $self->{ua}->get($url);
$result->{'success'}
or croak "Failed to fetch '$url': " . $result->{'reason'};
return $result->{'content'};
}
# /release/{distribution}
# /release/{author}/{release}
sub release {
my $self = shift;
my %opts = @_ ? @_ : ();
my $url = '';
my $error = "Either provide 'distribution', or 'author' and 'release', " .
"or 'search'";
%opts or croak $error;
my %extra_opts = ();
if ( defined ( my $dist = $opts{'distribution'} ) ) {
$url = "release/$dist";
} elsif (
defined ( my $author = $opts{'author'} ) &&
defined ( my $release = $opts{'release'} )
) {
$url = "release/$author/$release";
} elsif ( defined ( my $search_opts = $opts{'search'} ) ) {
ref $search_opts && ref $search_opts eq 'HASH'
or croak $error;
%extra_opts = %{$search_opts};
$url = 'release/_search';
} else {
croak $error;
}
return $self->fetch( $url, %extra_opts );
}
# /pod/{module}
# /pod/{author}/{release}/{path}
sub pod {
my $self = shift;
my %opts = @_ ? @_ : ();
my $url = '';
my $error = "Either provide 'module' or 'author and 'release' and 'path'";
%opts or croak $error;
if ( defined ( my $module = $opts{'module'} ) ) {
$url = "pod/$module";
} elsif (
defined ( my $author = $opts{'author'} ) &&
defined ( my $release = $opts{'release'} ) &&
defined ( my $path = $opts{'path'} )
) {
$url = "pod/$author/$release/$path";
} else {
croak $error;
}
# check content-type
my %extra = ();
if ( defined ( my $type = $opts{'content-type'} ) ) {
$type =~ m{^ text/ (?: html|plain|x-pod|x-markdown ) $}x
or croak 'Incorrect content-type provided';
$extra{headers}{'content-type'} = $type;
}
$url = $self->{base_url}. "/$url";
my $result = $self->{ua}->get( $url, \%extra );
$result->{'success'}
or croak "Failed to fetch '$url': " . $result->{'reason'};
return $result->{'content'};
}
# /module/{module}
sub module {
my $self = shift;
my $name = shift;
$name or croak 'Please provide a module name';
return $self->fetch("module/$name");
}
# file() is a synonym of module
sub file { goto &module }
# /author/{author}
sub author {
my $self = shift;
my ( $pause_id, $url, %extra_opts );
if ( @_ == 1 ) {
$url = 'author/' . shift;
} elsif ( @_ == 2 ) {
my %opts = @_;
if ( defined $opts{'pauseid'} ) {
$url = "author/" . $opts{'pauseid'};
} elsif ( defined $opts{'search'} ) {
my $search_opts = $opts{'search'};
ref $search_opts && ref $search_opts eq 'HASH'
or croak "'search' key must be hashref";
%extra_opts = %{$search_opts};
$url = 'author/_search';
} else {
croak 'Unknown option given';
}
} else {
croak 'Please provide an author PAUSEID or a "search"';
}
return $self->fetch( $url, %extra_opts );
}
sub fetch {
my $self = shift;
my $url = shift;
my $extra = $self->_build_extra_params(@_);
my $base = $self->{base_url};
my $req_url = $extra ? "$base/$url?$extra" : "$base/$url";
my $result = $self->{ua}->get($req_url);
return $self->_decode_result( $result, $req_url );
}
sub post {
my $self = shift;
my $url = shift;
my $query = shift;
my $base = $self->{base_url};
defined $url
or croak 'First argument of URL must be provided';
ref $query and ref $query eq 'HASH'
or croak 'Second argument of query hashref must be provided';
my $query_json = encode_json( $query );
my $result = $self->{ua}->request(
'POST',
"$base/$url",
{
headers => { 'Content-Type' => 'application/json' },
content => $query_json,
}
);
return $self->_decode_result( $result, $url, $query_json );
}
sub _decode_result {
my $self = shift;
my ( $result, $url, $original ) = @_;
my $decoded_result;
ref $result and ref $result eq 'HASH'
or croak 'First argument must be hashref';
defined $url
or croak 'Second argument of a URL must be provided';
if ( defined ( my $success = $result->{'success'} ) ) {
my $reason = $result->{'reason'} || '';
$reason .= ( defined $original ? " (request: $original)" : '' );
$success or croak "Failed to fetch '$url': $reason";
} else {
croak 'Missing success in return value';
}
defined ( my $content = $result->{'content'} )
or croak 'Missing content in return value';
eval { $decoded_result = decode_json $content; 1 }
or do { croak "Couldn't decode '$content': $@" };
return $decoded_result;
}
1;
__END__
=pod
=head1 NAME
MetaCPAN::API::Tiny - A Tiny API client for MetaCPAN
=head1 VERSION
version 1.131730
=head1 DESCRIPTION
This is the Tiny version of L<MetaCPAN::API>. It implements a compatible API
with a few notable exceptions:
=over 4
=item Attributes are direct hash access
The attributes defined using Mo(o|u)se are now accessed via the blessed hash
directly. There are no accessors defined to access this elements.
=item Exception handling
Instead of using Try::Tiny, raw evals are used. This could potentially cause
issues, so just be aware.
=item Testing
Test::Fatal was replaced with an eval implementation of exception().
Test::TinyMocker usage is retained, but may be absorbed since it is pure perl
=back
=head1 CLASS_METHODS
=head2 new
new is the constructor for MetaCPAN::API::Tiny. In the non-tiny version of this
module, this is provided via Any::Moose built from the attributes defined. In
the tiny version, we define our own constructor. It takes the same arguments
and provides similar checks to MetaCPAN::API with regards to arguments passed.
=head1 PUBLIC_METHODS
=head2 source
my $source = $mcpan->source(
author => 'DOY',
release => 'Moose-2.0201',
path => 'lib/Moose.pm',
);
Searches MetaCPAN for a module or a specific release and returns the plain source.
=head2 release
my $result = $mcpan->release( distribution => 'Moose' );
# or
my $result = $mcpan->release( author => 'DOY', release => 'Moose-2.0001' );
Searches MetaCPAN for a dist.
You can do complex searches using 'search' parameter:
# example lifted from MetaCPAN docs
my $result = $mcpan->release(
search => {
author => "OALDERS AND ",
filter => "status:latest",
fields => "name",
size => 1,
},
);
=head2 pod
my $result = $mcpan->pod( module => 'Moose' );
# or
my $result = $mcpan->pod(
author => 'DOY',
release => 'Moose-2.0201',
path => 'lib/Moose.pm',
);
Searches MetaCPAN for a module or a specific release and returns the POD.
=head2 module
my $result = $mcpan->module('MetaCPAN::API');
Searches MetaCPAN and returns a module's ".pm" file.
=head2 file
A synonym of L</module>
=head2 author
my $result1 = $mcpan->author('XSAWYERX');
my $result2 = $mcpan->author( pauseid => 'XSAWYERX' );
Searches MetaCPAN for a specific author.
You can do complex searches using 'search' parameter:
# example lifted from MetaCPAN docs
my $result = $mcpan->author(
search => {
q => 'profile.name:twitter',
size => 1,
},
);
=head2 fetch
my $result = $mcpan->fetch('/release/distribution/Moose');
# with parameters
my $more = $mcpan->fetch(
'/release/distribution/Moose',
param => 'value',
);
This is a helper method for API implementations. It fetches a path from MetaCPAN, decodes the JSON from the content variable and returns it.
You don't really need to use it, but you can in case you want to write your own extension implementation to MetaCPAN::API.
It accepts an additional hash as "GET" parameters.
=head2 post
# /release&content={"query":{"match_all":{}},"filter":{"prefix":{"archive":"Cache-Cache-1.06"}}}
my $result = $mcpan->post(
'release',
{
query => { match_all => {} },
filter => { prefix => { archive => 'Cache-Cache-1.06' } },
},
);
The POST equivalent of the "fetch()" method. It gets the path and JSON request.
=head1 THANKS
Overall the tests and code were ripped directly from MetaCPAN::API and
tiny-fied. A big thanks to Sawyer X for writing the original module.
=head1 AUTHOR
Nicholas R. Perez <nperez@cpan.org>
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2013 by Nicholas R. Perez <nperez@cpan.org>.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
METACPAN_API_TINY
s/^ //mg for values %fatpacked;
unshift @INC, sub {
if (my $fat = $fatpacked{$_[1]}) {
if ($] < 5.008) {
return sub {
return 0 unless length $fat;
$fat =~ s/^([^\n]*\n?)//;
$_ = $1;
return 1;
};
}
open my $fh, '<', \$fat
or die "FatPacker error loading $_[1] (could be a perl installation issue?)";
return $fh;
}
return
};
} # END OF FATPACK CODE
use 5.010;
use strict;
use warnings;
use Fatal qw(open close);
use Getopt::Long;
use Pod::Usage;
use File::Basename;
use Module::CoreList;
use HTTP::Tiny;
use Safe;
use MetaCPAN::API::Tiny;
# Below, 5.024 should be aligned with the version of perl actually
# bundled in Buildroot:
die <<"MSG" if $] < 5.024;
This script needs a host perl with the same major version as Buildroot target perl.
Your current host perl is:
$^X
version $]
You may install a local one by running:
perlbrew install perl-5.24.0
MSG
my ($help, $man, $quiet, $force, $recommend, $test, $host);
my $target = 1;
GetOptions( 'help|?' => \$help,
'man' => \$man,
'quiet|q' => \$quiet,
'force|f' => \$force,
'host!' => \$host,
'target!' => \$target,
'recommend' => \$recommend,
'test' => \$test
) or pod2usage(-exitval => 1);
pod2usage(-exitval => 0) if $help;
pod2usage(-exitval => 0, -verbose => 2) if $man;
pod2usage(-exitval => 1) if scalar @ARGV == 0;
my %dist; # name -> metacpan data
my %need_target; # name -> 1 if target package is needed
my %need_host; # name -> 1 if host package is needed
my %need_dlopen; # name -> 1 if requires dynamic library
my %deps_build; # name -> list of host dependencies
my %deps_runtime; # name -> list of target dependencies
my %deps_optional; # name -> list of optional target dependencies
my %license_files; # name -> list of license files
my %checksum; # author -> list of checksum
my $mcpan = MetaCPAN::API::Tiny->new(base_url => 'http://fastapi.metacpan.org/v1');
my $ua = HTTP::Tiny->new();
sub get_checksum {
my ($url) = @_;
my($path) = $url =~ m|^[^:/?#]+://[^/?#]*([^?#]*)|;
my($basename, $dirname) = fileparse( $path );
unless ($checksum{$dirname}) {
my $response = $ua->get(qq{http://cpan.metacpan.org${dirname}CHECKSUMS});
$checksum{$dirname} = $response->{content};
}
my $chksum = Safe->new->reval($checksum{$dirname});
return $chksum->{$basename}, $basename;
}
sub get_manifest {
my ($author, $distname, $version) = @_;
my $url = qq{http://fastapi.metacpan.org/source/${author}/${distname}-${version}/MANIFEST};
my $response = $ua->get($url);
return $response->{content};
}
sub is_xs {
my ($manifest) = @_;
# This heuristic determines if a module is a native extension, by searching
# some file extension types in the MANIFEST of the distribution.
# It was inspired by http://deps.cpantesters.org/static/purity.html
return $manifest =~ m/\.(swg|xs|c|h|i)[\n\s]/;
}
sub find_license_files {
my ($manifest) = @_;
my @license_files;
foreach (split /\n/, $manifest) {
next if m|/|;
push @license_files, $_ if m/(ARTISTIC|COPYING|COPYRIGHT|LICENSE)/i;
}
if (scalar @license_files == 0 && $manifest =~ m/(README)[\n\s]/i) {
@license_files = ($1);
}
return \@license_files;
}
sub fetch {
my ($name, $need_target, $need_host, $top) = @_;
$need_target{$name} = $need_target if $need_target;
$need_host{$name} = $need_host if $need_host;
unless ($dist{$name} && !$top) {
say qq{fetch ${name}} unless $quiet;
my $result = $mcpan->release( distribution => $name );
$dist{$name} = $result;
my $manifest = get_manifest( $result->{author}, $name, $result->{version} );
$need_dlopen{$name} = is_xs( $manifest );
$license_files{$name} = find_license_files( $manifest );
my %build = ();
my %runtime = ();
my %optional = ();
foreach my $dep (@{$result->{dependency}}) {
my $modname = ${$dep}{module};
next if $modname eq q{perl};
next if $modname =~ m|^Alien|;
next if $modname =~ m|^Win32|;
next if !($test && $top) && $modname =~ m|^Test|;
next if Module::CoreList::is_core( $modname, undef, $] );
# we could use the host Module::CoreList data, because host perl and
# target perl have the same major version
next if ${$dep}{phase} eq q{develop};
next if !($test && $top) && ${$dep}{phase} eq q{test};
my $distname = $mcpan->module( $modname )->{distribution};
if (${$dep}{phase} eq q{runtime}) {
if (${$dep}{relationship} eq q{requires}) {
$runtime{$distname} = 1;
}
else {
$optional{$distname} = 1 if $recommend && $top;
}
}
else { # configure, build
$build{$distname} = 1;
}
}
$deps_build{$name} = [keys %build];
$deps_runtime{$name} = [keys %runtime];
$deps_optional{$name} = [keys %optional];
foreach my $distname (@{$deps_build{$name}}) {
fetch( $distname, 0, 1 );
}
foreach my $distname (@{$deps_runtime{$name}}) {
fetch( $distname, $need_target, $need_host );
$need_dlopen{$name} ||= $need_dlopen{$distname};
}
foreach my $distname (@{$deps_optional{$name}}) {
fetch( $distname, $need_target, $need_host );
}
}
return;
}
foreach my $distname (@ARGV) {
# Command-line's distributions
fetch( $distname, !!$target, !!$host, 1 );
}
say scalar keys %dist, q{ packages fetched.} unless $quiet;
# Buildroot package name: lowercase
sub fsname {
my $name = shift;
$name =~ s|_|-|g;
return q{perl-} . lc $name;
}
# Buildroot variable name: uppercase
sub brname {
my $name = shift;
$name =~ s|-|_|g;
return uc $name;
}
while (my ($distname, $dist) = each %dist) {
my $fsname = fsname( $distname );
my $dirname = q{package/} . $fsname;
my $cfgname = $dirname . q{/Config.in};
my $mkname = $dirname . q{/} . $fsname . q{.mk};
my $hashname = $dirname . q{/} . $fsname . q{.hash};
my $brname = brname( $fsname );
mkdir $dirname unless -d $dirname;
if ($need_target{$distname} && ($force || !-f $cfgname)) {
my $abstract = $dist->{abstract};
my $homepage = $dist->{resources}->{homepage} || qq{https://metacpan.org/release/${distname}};
say qq{write ${cfgname}} unless $quiet;
open my $fh, q{>}, $cfgname;
say {$fh} qq{config BR2_PACKAGE_${brname}};
say {$fh} qq{\tbool "${fsname}"};
say {$fh} qq{\tdepends on !BR2_STATIC_LIBS} if $need_dlopen{$distname};
foreach my $dep (sort @{$deps_runtime{$distname}}) {
my $brdep = brname( fsname( $dep ) );
say {$fh} qq{\tselect BR2_PACKAGE_${brdep}};
}
say {$fh} qq{\thelp};
say {$fh} qq{\t ${abstract}\n} if $abstract;
say {$fh} qq{\t ${homepage}};
if ($need_dlopen{$distname}) {
say {$fh} qq{\ncomment "${fsname} needs a toolchain w/ dynamic library"};
say {$fh} qq{\tdepends on BR2_STATIC_LIBS};
}
close $fh;
}
if ($force || !-f $mkname) {
my $version = $dist->{version};
my($path) = $dist->{download_url} =~ m|^[^:/?#]+://[^/?#]*([^?#]*)|;
# this URL contains only the scheme, auth and path parts (but no query and fragment parts)
# the scheme is not used, because the job is done by the BR download infrastructure
# the auth part is not used, because we use $(BR2_CPAN_MIRROR)
my($filename, $directories, $suffix) = fileparse( $path, q{tar.gz}, q{tgz} );
$directories =~ s|/$||;
my $dependencies = join q{ }, map( { q{host-} . fsname( $_ ); } sort @{$deps_build{$distname}} ),
map( { fsname( $_ ); } sort @{$deps_runtime{$distname}} );
my $host_dependencies = join q{ }, map { q{host-} . fsname( $_ ); } sort( @{$deps_build{$distname}},
@{$deps_runtime{$distname}} );
my $license = ref $dist->{license} eq 'ARRAY'
? join q{ or }, @{$dist->{license}}
: $dist->{license};
# BR requires license name as in http://spdx.org/licenses/
$license =~ s|apache_2_0|Apache-2.0|;
$license =~ s|artistic_2|Artistic-2.0|;
$license =~ s|mit|MIT|;
$license =~ s|openssl|OpenSSL|;
$license =~ s|perl_5|Artistic or GPLv1+|;
my $license_files = join q{ }, @{$license_files{$distname}};
say qq{write ${mkname}} unless $quiet;
open my $fh, q{>}, $mkname;
say {$fh} qq{################################################################################};
say {$fh} qq{#};
say {$fh} qq{# ${fsname}};
say {$fh} qq{#};
say {$fh} qq{################################################################################};
say {$fh} qq{};
say {$fh} qq{${brname}_VERSION = ${version}};
say {$fh} qq{${brname}_SOURCE = ${distname}-\$(${brname}_VERSION).${suffix}};
say {$fh} qq{${brname}_SITE = \$(BR2_CPAN_MIRROR)${directories}};
say {$fh} qq{${brname}_DEPENDENCIES = ${dependencies}} if $need_target{$distname} && $dependencies;
say {$fh} qq{HOST_${brname}_DEPENDENCIES = ${host_dependencies}} if $need_host{$distname} && $host_dependencies;
say {$fh} qq{${brname}_LICENSE = ${license}} if $license && $license ne q{unknown};
say {$fh} qq{${brname}_LICENSE_FILES = ${license_files}} if $license_files;
say {$fh} qq{};
foreach (sort @{$deps_optional{$distname}}) {
next if grep { $_ eq $distname; } @{$deps_runtime{$_}}; # avoid cyclic dependencies
my $opt_brname = brname( $_ );
my $opt_fsname = fsname( $_ );
say {$fh} qq{ifeq (\$(BR2_PACKAGE_PERL_${opt_brname}),y)};
say {$fh} qq{${brname}_DEPENDENCIES += ${opt_fsname}};
say {$fh} qq{endif};
say {$fh} qq{};
}
say {$fh} qq{\$(eval \$(perl-package))} if $need_target{$distname};
say {$fh} qq{\$(eval \$(host-perl-package))} if $need_host{$distname};
close $fh;
}
if ($force || !-f $hashname) {
my($checksum, $filename) = get_checksum($dist->{download_url});
my $md5 = $checksum->{md5};
my $sha256 = $checksum->{sha256};
say qq{write ${hashname}} unless $quiet;
open my $fh, q{>}, $hashname;
say {$fh} qq{# retrieved by scancpan from http://cpan.metacpan.org/};
say {$fh} qq{md5 ${md5} ${filename}};
say {$fh} qq{sha256 ${sha256} ${filename}};
close $fh;
}
}
my %pkg;
my $cfgname = q{package/Config.in};
if (-f $cfgname) {
open my $fh, q{<}, $cfgname;
while (<$fh>) {
chomp;
$pkg{$_} = 1 if m|package/perl-|;
}
close $fh;
}
foreach my $distname (keys %need_target) {
my $fsname = fsname( $distname );
$pkg{qq{\tsource "package/${fsname}/Config.in"}} = 1;
}
say qq{${cfgname} must contain the following lines:};
say join qq{\n}, sort keys %pkg;
__END__
=head1 NAME
support/scripts/scancpan Try-Tiny Moo
=head1 SYNOPSIS
supports/scripts/scancpan [options] [distname ...]
Options:
-help
-man
-quiet
-force
-target/-notarget
-host/-nohost
-recommend
-test
=head1 OPTIONS
=over 8
=item B<-help>
Prints a brief help message and exits.
=item B<-man>
Prints the manual page and exits.
=item B<-quiet>
Executes without output
=item B<-force>
Forces the overwriting of existing files.
=item B<-target/-notarget>
Switches package generation for the target variant (the default is C<-target>).
=item B<-host/-nohost>
Switches package generation for the host variant (the default is C<-nohost>).
=item B<-recommend>
Adds I<recommended> dependencies.
=item B<-test>
Adds dependencies for test.
=back
=head1 DESCRIPTION
This script creates templates of the Buildroot package files for all the
Perl/CPAN distributions required by the specified distnames. The
dependencies and metadata are fetched from https://metacpan.org/.
After running this script, it is necessary to check the generated files.
You have to manually add the license files (PERL_FOO_LICENSE_FILES variable).
For distributions that link against a target library, you have to add the
buildroot package name for that library to the DEPENDENCIES variable.
See the Buildroot documentation for details on the usage of the Perl
infrastructure.
The major version of the host perl must be aligned on the target one,
in order to work with the right CoreList data.
=head1 LICENSE
Copyright (C) 2013-2016 by Francois Perrad <francois.perrad@gadz.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
This script is a part of Buildroot.
This script requires the module C<MetaCPAN::API::Tiny> (version 1.131730)
which was included at the beginning of this file by the tool C<fatpack>.
See L<http://search.cpan.org/~nperez/MetaCPAN-API-Tiny-1.131730/>.
See L<http://search.cpan.org/search?query=App-FatPacker&mode=dist>.
These both libraries are free software and may be distributed under the same
terms as perl itself.
And perl may be distributed under the terms of Artistic v1 or GPL v1 license.
=cut

View File

@@ -1,653 +0,0 @@
#!/usr/bin/python2
"""
Utility for building Buildroot packages for existing PyPI packages
Any package built by scanpypi should be manually checked for
errors.
"""
from __future__ import print_function
import argparse
import json
import urllib2
import sys
import os
import shutil
import StringIO
import tarfile
import zipfile
import errno
import hashlib
import re
import textwrap
import tempfile
import imp
from functools import wraps
def setup_decorator(func, method):
"""
Decorator for distutils.core.setup and setuptools.setup.
Puts the arguments with which setup is called as a dict
Add key 'method' which should be either 'setuptools' or 'distutils'.
Keyword arguments:
func -- either setuptools.setup or distutils.core.setup
method -- either 'setuptools' or 'distutils'
"""
@wraps(func)
def closure(*args, **kwargs):
# Any python packages calls its setup function to be installed.
# Argument 'name' of this setup function is the package's name
BuildrootPackage.setup_args[kwargs['name']] = kwargs
BuildrootPackage.setup_args[kwargs['name']]['method'] = method
return closure
# monkey patch
import setuptools
setuptools.setup = setup_decorator(setuptools.setup, 'setuptools')
import distutils
distutils.core.setup = setup_decorator(setuptools.setup, 'distutils')
def find_file_upper_case(filenames, path='./'):
"""
List generator:
Recursively find files that matches one of the specified filenames.
Returns a relative path starting with path argument.
Keyword arguments:
filenames -- List of filenames to be found
path -- Path to the directory to search
"""
for root, dirs, files in os.walk(path):
for file in files:
if file.upper() in filenames:
yield (os.path.join(root, file))
def pkg_buildroot_name(pkg_name):
"""
Returns the Buildroot package name for the PyPI package pkg_name.
Remove all non alphanumeric characters except -
Also lowers the name and adds 'python-' suffix
Keyword arguments:
pkg_name -- String to rename
"""
name = re.sub('[^\w-]', '', pkg_name.lower())
prefix = 'python-'
pattern = re.compile('^(?!' + prefix + ')(.+?)$')
name = pattern.sub(r'python-\1', name)
return name
class DownloadFailed(Exception):
pass
class BuildrootPackage():
"""This class's methods are not meant to be used individually please
use them in the correct order:
__init__
download_package
extract_package
load_module
get_requirements
create_package_mk
create_hash_file
create_config_in
"""
setup_args = {}
def __init__(self, real_name, pkg_folder):
self.real_name = real_name
self.buildroot_name = pkg_buildroot_name(self.real_name)
self.pkg_dir = os.path.join(pkg_folder, self.buildroot_name)
self.mk_name = self.buildroot_name.upper().replace('-', '_')
self.as_string = None
self.md5_sum = None
self.metadata = None
self.metadata_name = None
self.metadata_url = None
self.pkg_req = None
self.setup_metadata = None
self.tmp_extract = None
self.used_url = None
self.filename = None
self.url = None
self.version = None
def fetch_package_info(self):
"""
Fetch a package's metadata from the python package index
"""
self.metadata_url = 'https://pypi.python.org/pypi/{pkg}/json'.format(
pkg=self.real_name)
try:
pkg_json = urllib2.urlopen(self.metadata_url).read().decode()
except urllib2.HTTPError as error:
print('ERROR:', error.getcode(), error.msg, file=sys.stderr)
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
.format(pkg=self.real_name))
raise
except urllib2.URLError:
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
.format(pkg=self.real_name))
raise
self.metadata = json.loads(pkg_json)
self.version = self.metadata['info']['version']
self.metadata_name = self.metadata['info']['name']
def download_package(self):
"""
Download a package using metadata from pypi
"""
try:
self.metadata['urls'][0]['filename']
except IndexError:
print(
'Non-conventional package, ',
'please check carefully after creation')
self.metadata['urls'] = [{
'packagetype': 'sdist',
'url': self.metadata['info']['download_url'],
'md5_digest': None}]
# In this case, we can't get the name of the downloaded file
# from the pypi api, so we need to find it, this should work
urlpath = urllib2.urlparse.urlparse(
self.metadata['info']['download_url']).path
# urlparse().path give something like
# /path/to/file-version.tar.gz
# We use basename to remove /path/to
self.metadata['urls'][0]['filename'] = os.path.basename(urlpath)
for download_url in self.metadata['urls']:
if 'bdist' in download_url['packagetype']:
continue
try:
print('Downloading package {pkg} from {url}...'.format(
pkg=self.real_name, url=download_url['url']))
download = urllib2.urlopen(download_url['url'])
except urllib2.HTTPError as http_error:
download = http_error
else:
self.used_url = download_url
self.as_string = download.read()
if not download_url['md5_digest']:
break
self.md5_sum = hashlib.md5(self.as_string).hexdigest()
if self.md5_sum == download_url['md5_digest']:
break
else:
if download.__class__ == urllib2.HTTPError:
raise download
raise DownloadFailed('Failed to downloas package {pkg}'
.format(pkg=self.real_name))
self.filename = self.used_url['filename']
self.url = self.used_url['url']
def extract_package(self, tmp_path):
"""
Extract the package contents into a directrory
Keyword arguments:
tmp_path -- directory where you want the package to be extracted
"""
as_file = StringIO.StringIO(self.as_string)
if self.filename[-3:] == 'zip':
with zipfile.ZipFile(as_file) as as_zipfile:
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
try:
os.makedirs(tmp_pkg)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.message, file=sys.stderr)
return None, None
print('WARNING:', exception.message, file=sys.stderr)
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
as_zipfile.extractall(tmp_pkg)
else:
with tarfile.open(fileobj=as_file) as as_tarfile:
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
try:
os.makedirs(tmp_pkg)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.message, file=sys.stderr)
return None, None
print('WARNING:', exception.message, file=sys.stderr)
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
as_tarfile.extractall(tmp_pkg)
tmp_extract = '{folder}/{name}-{version}'
self.tmp_extract = tmp_extract.format(
folder=tmp_pkg,
name=self.metadata_name,
version=self.version)
def load_setup(self):
"""
Loads the corresponding setup and store its metadata
"""
current_dir = os.getcwd()
os.chdir(self.tmp_extract)
sys.path.append(self.tmp_extract)
s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract])
setup = imp.load_module('setup', s_file, s_path, s_desc)
try:
self.setup_metadata = self.setup_args[self.metadata_name]
except KeyError:
# This means setup was not called which most likely mean that it is
# called through the if __name__ == '__main__' directive.
# In this case, we can only pray that it is called through a
# function called main() in setup.py.
setup.main([]) # Will raise AttributeError if not found
self.setup_metadata = self.setup_args[self.metadata_name]
# Here we must remove the module the hard way.
# We must do this because of a very sepcific case: if a package calls
# setup from the __main__ but does not come with a 'main()' function,
# for some reason setup.main([]) will successfully call the main
# function of a previous package...
sys.modules.pop('setup',None)
del setup
os.chdir(current_dir)
sys.path.remove(self.tmp_extract)
def get_requirements(self, pkg_folder):
"""
Retrieve dependencies from the metadata found in the setup.py script of
a pypi package.
Keyword Arguments:
pkg_folder -- location of the already created packages
"""
if 'install_requires' not in self.setup_metadata:
self.pkg_req = None
return set()
self.pkg_req = self.setup_metadata['install_requires']
self.pkg_req = [re.sub('([-.\w]+).*', r'\1', req)
for req in self.pkg_req]
req_not_found = self.pkg_req
self.pkg_req = map(pkg_buildroot_name, self.pkg_req)
pkg_tuples = zip(req_not_found, self.pkg_req)
# pkg_tuples is a list of tuples that looks like
# ('werkzeug','python-werkzeug') because I need both when checking if
# dependencies already exist or are already in the download list
req_not_found = set(
pkg[0] for pkg in pkg_tuples
if not os.path.isdir(pkg[1])
)
return req_not_found
def __create_mk_header(self):
"""
Create the header of the <package_name>.mk file
"""
header = ['#' * 80 + '\n']
header.append('#\n')
header.append('# {name}\n'.format(name=self.buildroot_name))
header.append('#\n')
header.append('#' * 80 + '\n')
header.append('\n')
return header
def __create_mk_download_info(self):
"""
Create the lines refering to the download information of the
<package_name>.mk file
"""
lines = []
version_line = '{name}_VERSION = {version}\n'.format(
name=self.mk_name,
version=self.version)
lines.append(version_line)
targz = self.filename.replace(
self.version,
'$({name}_VERSION)'.format(name=self.mk_name))
targz_line = '{name}_SOURCE = {filename}\n'.format(
name=self.mk_name,
filename=targz)
lines.append(targz_line)
if self.filename not in self.url:
# Sometimes the filename is in the url, sometimes it's not
site_url = self.url
else:
site_url = self.url[:self.url.find(self.filename)]
site_line = '{name}_SITE = {url}'.format(name=self.mk_name,
url=site_url)
site_line = site_line.rstrip('/') + '\n'
lines.append(site_line)
return lines
def __create_mk_setup(self):
"""
Create the line refering to the setup method of the package of the
<package_name>.mk file
There are two things you can use to make an installer
for a python package: distutils or setuptools
distutils comes with python but does not support dependencies.
distutils is mostly still there for backward support.
setuptools is what smart people use,
but it is not shipped with python :(
"""
lines = []
setup_type_line = '{name}_SETUP_TYPE = {method}\n'.format(
name=self.mk_name,
method=self.setup_metadata['method'])
lines.append(setup_type_line)
return lines
def __create_mk_license(self):
"""
Create the lines referring to the package's license informations of the
<package_name>.mk file
The license is found using the metadata from pypi.
In the metadata, the license can be found either with standard names in
the classifiers part or with naming from the packager in the "License"
part.
From the classifiers, the license is "translated" according to
buildroot standards if need be (i.e. from Apache Software License to
Apache-2.0).
From the License part, we cannot guess what formatting the packager
used. Hence, it is likely to be incorrect. (i.e. Apache License 2.0
instead of Apache-2.0).
The license's files are found by searching the package for files named
license or license.txt (case insensitive).
If more than one license file is found, the user is asked to select
which ones he wants to use.
"""
license_dict = {
'Apache Software License': 'Apache-2.0',
'BSD License': 'BSD',
'European Union Public Licence 1.0': 'EUPLv1.0',
'European Union Public Licence 1.1': 'EUPLv1.1',
"GNU General Public License": "GPL",
"GNU General Public License v2": "GPLv2",
"GNU General Public License v2 or later": "GPLv2+",
"GNU General Public License v3": "GPLv3",
"GNU General Public License v3 or later": "GPLv3+",
"GNU Lesser General Public License v2": "LGPLv2.1",
"GNU Lesser General Public License v2 or later": "LGPLv2.1+",
"GNU Lesser General Public License v3": "LGPLv3",
"GNU Lesser General Public License v3 or later": "LGPLv3+",
"GNU Library or Lesser General Public License": "LGPLv2",
"ISC License": "ISC",
"MIT License": "MIT",
"Mozilla Public License 1.0": "MPL-1.0",
"Mozilla Public License 1.1": "MPL-1.1",
"Mozilla Public License 2.0": "MPL-2.0",
"Zope Public License": "ZPL"
}
regexp = re.compile('^License :* *.* *:+ (.*)( \(.*\))?$')
classifiers_licenses = [regexp.sub(r"\1", lic)
for lic in self.metadata['info']['classifiers']
if regexp.match(lic)]
licenses = map(lambda x: license_dict[x] if x in license_dict else x,
classifiers_licenses)
lines = []
if not len(licenses):
print('WARNING: License has been set to "{license}". It is most'
' likely wrong, please change it if need be'.format(
license=', '.join(licenses)))
licenses = [self.metadata['info']['license']]
license_line = '{name}_LICENSE = {license}\n'.format(
name=self.mk_name,
license=', '.join(licenses))
lines.append(license_line)
filenames = ['LICENCE', 'LICENSE', 'LICENSE.TXT', 'COPYING',
'COPYING.TXT']
license_files = list(find_file_upper_case(filenames, self.tmp_extract))
license_files = [license.replace(self.tmp_extract, '')[1:]
for license in license_files]
if len(license_files) > 0:
if len(license_files) > 1:
print('More than one file found for license:',
', '.join(license_files))
license_files = [filename
for index, filename in enumerate(license_files)]
license_file_line = ('{name}_LICENSE_FILES ='
' {files}\n'.format(
name=self.mk_name,
files=' '.join(license_files)))
lines.append(license_file_line)
else:
print('WARNING: No license file found,'
' please specify it manually afterwards')
license_file_line = '# No license file found\n'
return lines
def __create_mk_requirements(self):
"""
Create the lines referring to the dependencies of the of the
<package_name>.mk file
Keyword Arguments:
pkg_name -- name of the package
pkg_req -- dependencies of the package
"""
lines = []
dependencies_line = ('{name}_DEPENDENCIES ='
' {reqs}\n'.format(
name=self.mk_name,
reqs=' '.join(self.pkg_req)))
lines.append(dependencies_line)
return lines
def create_package_mk(self):
"""
Create the lines corresponding to the <package_name>.mk file
"""
pkg_mk = '{name}.mk'.format(name=self.buildroot_name)
path_to_mk = os.path.join(self.pkg_dir, pkg_mk)
print('Creating {file}...'.format(file=path_to_mk))
lines = self.__create_mk_header()
lines += self.__create_mk_download_info()
lines += self.__create_mk_setup()
lines += self.__create_mk_license()
lines.append('\n')
lines.append('$(eval $(python-package))')
lines.append('\n')
with open(path_to_mk, 'w') as mk_file:
mk_file.writelines(lines)
def create_hash_file(self):
"""
Create the lines corresponding to the <package_name>.hash files
"""
pkg_hash = '{name}.hash'.format(name=self.buildroot_name)
path_to_hash = os.path.join(self.pkg_dir, pkg_hash)
print('Creating {filename}...'.format(filename=path_to_hash))
lines = []
if self.used_url['md5_digest']:
md5_comment = '# md5 from {url}, sha256 locally computed\n'.format(
url=self.metadata_url)
lines.append(md5_comment)
hash_line = '{method}\t{digest} {filename}\n'.format(
method='md5',
digest=self.used_url['md5_digest'],
filename=self.filename)
lines.append(hash_line)
digest = hashlib.sha256(self.as_string).hexdigest()
hash_line = '{method}\t{digest} {filename}\n'.format(
method='sha256',
digest=digest,
filename=self.filename)
lines.append(hash_line)
with open(path_to_hash, 'w') as hash_file:
hash_file.writelines(lines)
def create_config_in(self):
"""
Creates the Config.in file of a package
"""
path_to_config = os.path.join(self.pkg_dir, 'Config.in')
print('Creating {file}...'.format(file=path_to_config))
lines = []
config_line = 'config BR2_PACKAGE_{name}\n'.format(
name=self.mk_name)
lines.append(config_line)
bool_line = '\tbool "{name}"\n'.format(name=self.buildroot_name)
lines.append(bool_line)
if self.pkg_req:
for dep in self.pkg_req:
dep_line = '\tselect BR2_PACKAGE_{req} # runtime\n'.format(
req=dep.upper().replace('-', '_'))
lines.append(dep_line)
lines.append('\thelp\n')
help_lines = textwrap.wrap(self.metadata['info']['summary'],
initial_indent='\t ',
subsequent_indent='\t ')
# make sure a help text is terminated with a full stop
if help_lines[-1][-1] != '.':
help_lines[-1] += '.'
# \t + two spaces is 3 char long
help_lines.append('')
help_lines.append('\t ' + self.metadata['info']['home_page'])
help_lines = map(lambda x: x + '\n', help_lines)
lines += help_lines
with open(path_to_config, 'w') as config_file:
config_file.writelines(lines)
def main():
# Building the parser
parser = argparse.ArgumentParser(
description="Creates buildroot packages from the metadata of "
"an existing PyPI packages and include it "
"in menuconfig")
parser.add_argument("packages",
help="list of packages to be created",
nargs='+')
parser.add_argument("-o", "--output",
help="""
Output directory for packages.
Default is ./package
""",
default='./package')
args = parser.parse_args()
packages = list(set(args.packages))
# tmp_path is where we'll extract the files later
tmp_prefix = 'scanpypi-'
pkg_folder = args.output
tmp_path = tempfile.mkdtemp(prefix=tmp_prefix)
try:
for real_pkg_name in packages:
package = BuildrootPackage(real_pkg_name, pkg_folder)
print('buildroot package name for {}:'.format(package.real_name),
package.buildroot_name)
# First we download the package
# Most of the info we need can only be found inside the package
print('Package:', package.buildroot_name)
print('Fetching package', package.real_name)
try:
package.fetch_package_info()
except (urllib2.URLError, urllib2.HTTPError):
continue
if package.metadata_name.lower() == 'setuptools':
# setuptools imports itself, that does not work very well
# with the monkey path at the begining
print('Error: setuptools cannot be built using scanPyPI')
continue
try:
package.download_package()
except urllib2.HTTPError as error:
print('Error: {code} {reason}'.format(code=error.code,
reason=error.reason))
print('Error downloading package :', package.buildroot_name)
print()
continue
# extract the tarball
try:
package.extract_package(tmp_path)
except (tarfile.ReadError, zipfile.BadZipfile):
print('Error extracting package {}'.format(package.real_name))
print()
continue
# Loading the package install info from the package
try:
package.load_setup()
except ImportError as err:
if 'buildutils' in err.message:
print('This package needs buildutils')
else:
raise
continue
except AttributeError:
print('Error: Could not install package {pkg}'.format(
pkg=package.real_name))
continue
# Package requirement are an argument of the setup function
req_not_found = package.get_requirements(pkg_folder)
req_not_found = req_not_found.difference(packages)
packages += req_not_found
if req_not_found:
print('Added packages \'{pkgs}\' as dependencies of {pkg}'
.format(pkgs=", ".join(req_not_found),
pkg=package.buildroot_name))
print('Checking if package {name} already exists...'.format(
name=package.pkg_dir))
try:
os.makedirs(package.pkg_dir)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.message, file=sys.stderr)
continue
print('Error: Package {name} already exists'
.format(name=package.pkg_dir))
del_pkg = raw_input(
'Do you want to delete existing package ? [y/N]')
if del_pkg.lower() == 'y':
shutil.rmtree(package.pkg_dir)
os.makedirs(package.pkg_dir)
else:
continue
package.create_package_mk()
package.create_hash_file()
package.create_config_in()
print()
# printing an empty line for visual confort
finally:
shutil.rmtree(tmp_path)
if __name__ == "__main__":
main()

View File

@@ -35,6 +35,7 @@ except ImportError:
colors = ['#e60004', '#009836', '#2e1d86', '#ffed00',
'#0068b5', '#f28e00', '#940084', '#97c000']
#
# This function adds a new file to 'filesdict', after checking its
# size. The 'filesdict' contain the relative path of the file as the
@@ -54,6 +55,7 @@ def add_file(filesdict, relpath, abspath, pkg):
sz = os.stat(abspath).st_size
filesdict[relpath] = (pkg, sz)
#
# This function returns a dict where each key is the path of a file in
# the root filesystem, and the value is a tuple containing two
@@ -73,6 +75,7 @@ def build_package_dict(builddir):
add_file(filesdict, fpath, fullpath, pkg)
return filesdict
#
# This function builds a dictionary that contains the name of a
# package as key, and the size of the files installed by this package
@@ -103,7 +106,7 @@ def build_package_size(filesdict, builddir):
seeninodes.add(st.st_ino)
frelpath = os.path.relpath(fpath, os.path.join(builddir, "target"))
if not frelpath in filesdict:
if frelpath not in filesdict:
print("WARNING: %s is not part of any package" % frelpath)
pkg = "unknown"
else:
@@ -113,6 +116,7 @@ def build_package_size(filesdict, builddir):
return pkgsize
#
# Given a dict returned by build_package_size(), this function
# generates a pie chart of the size installed by each package.
@@ -127,7 +131,7 @@ def draw_graph(pkgsize, outputf):
labels = []
values = []
other_value = 0
for (p, sz) in pkgsize.items():
for (p, sz) in sorted(pkgsize.items(), key=lambda x: x[1]):
if sz < (total * 0.01):
other_value += sz
else:
@@ -150,6 +154,7 @@ def draw_graph(pkgsize, outputf):
plt.title("Total filesystem size: %d kB" % (total / 1000.), fontsize=10, y=.96)
plt.savefig(outputf)
#
# Generate a CSV file with statistics about the size of each file, its
# size contribution to the package and to the overall system.
@@ -208,6 +213,7 @@ def gen_packages_csv(pkgsizes, outputf):
for (pkg, size) in pkgsizes.items():
wr.writerow([pkg, size, "%.1f" % (float(size) / total * 100)])
parser = argparse.ArgumentParser(description='Draw size statistics graphs')
parser.add_argument("--builddir", '-i', metavar="BUILDDIR", required=True,

View File

@@ -1,127 +0,0 @@
#!/usr/bin/env python
# Copyright (C) 2016 Thomas De Schampheleire <thomas.de.schampheleire@gmail.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# TODO (improvements)
# - support K,M,G size suffixes for threshold
# - output CSV file in addition to stdout reporting
import csv
import argparse
import sys
def read_file_size_csv(inputf, detail=None):
"""Extract package or file sizes from CSV file into size dictionary"""
sizes = {}
reader = csv.reader(inputf)
header = next(reader)
if (header[0] != 'File name' or header[1] != 'Package name' or
header[2] != 'File size' or header[3] != 'Package size'):
print(("Input file %s does not contain the expected header. Are you "
"sure this file corresponds to the file-size-stats.csv "
"file created by 'make graph-size'?") % inputf.name)
sys.exit(1)
for row in reader:
if detail:
sizes[row[0]] = int(row[2])
else:
sizes[row[1]] = int(row[3])
return sizes
def compare_sizes(old, new):
"""Return delta/added/removed dictionaries based on two input size
dictionaries"""
delta = {}
oldkeys = set(old.keys())
newkeys = set(new.keys())
# packages/files in both
for entry in newkeys.intersection(oldkeys):
delta[entry] = ('', new[entry] - old[entry])
# packages/files only in new
for entry in newkeys.difference(oldkeys):
delta[entry] = ('added', new[entry])
# packages/files only in old
for entry in oldkeys.difference(newkeys):
delta[entry] = ('removed', -old[entry])
return delta
def print_results(result, threshold):
"""Print the given result dictionary sorted by size, ignoring any entries
below or equal to threshold"""
from six import iteritems
list_result = list(iteritems(result))
# result is a dictionary: name -> (flag, size difference)
# list_result is a list of tuples: (name, (flag, size difference))
for entry in sorted(list_result, key=lambda entry: entry[1][1]):
if threshold is not None and abs(entry[1][1]) <= threshold:
continue
print('%12s %7s %s' % (entry[1][1], entry[1][0], entry[0]))
# main #########################################################################
description = """
Compare rootfs size between Buildroot compilations, for example after changing
configuration options or after switching to another Buildroot release.
This script compares the file-size-stats.csv file generated by 'make graph-size'
with the corresponding file from another Buildroot compilation.
The size differences can be reported per package or per file.
Size differences smaller or equal than a given threshold can be ignored.
"""
parser = argparse.ArgumentParser(description=description,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--detail', action='store_true',
help='''report differences for individual files rather than
packages''')
parser.add_argument('-t', '--threshold', type=int,
help='''ignore size differences smaller or equal than this
value (bytes)''')
parser.add_argument('old_file_size_csv', type=argparse.FileType('r'),
metavar='old-file-size-stats.csv',
help="""old CSV file with file and package size statistics,
generated by 'make graph-size'""")
parser.add_argument('new_file_size_csv', type=argparse.FileType('r'),
metavar='new-file-size-stats.csv',
help='new CSV file with file and package size statistics')
args = parser.parse_args()
if args.detail:
keyword = 'file'
else:
keyword = 'package'
old_sizes = read_file_size_csv(args.old_file_size_csv, args.detail)
new_sizes = read_file_size_csv(args.new_file_size_csv, args.detail)
delta = compare_sizes(old_sizes, new_sizes)
print('Size difference per %s (bytes), threshold = %s' % (keyword, args.threshold))
print(80*'-')
print_results(delta, args.threshold)
print(80*'-')
print_results({'TOTAL': ('', sum(new_sizes.values()) - sum(old_sizes.values()))},
threshold=None)

View File

@@ -1,195 +0,0 @@
#!/bin/bash
set -e
TOOLCHAINS_URL='http://autobuild.buildroot.org/toolchains/configs/toolchain-configs.csv'
main() {
local o O opts
local cfg dir pkg random toolchain
local ret nb nb_skip nb_fail
local -a toolchains
o='hc:d:p:r:'
O='help,config-snippet:build-dir:package:,random:'
opts="$(getopt -n "${my_name}" -o "${o}" -l "${O}" -- "${@}")"
eval set -- "${opts}"
random=0
while [ ${#} -gt 0 ]; do
case "${1}" in
(-h|--help)
help; exit 0
;;
(-c|--config-snippet)
cfg="${2}"; shift 2
;;
(-d|--build-dir)
dir="${2}"; shift 2
;;
(-p|--package)
pkg="${2}"; shift 2
;;
(-r|--random)
random="${2}"; shift 2
;;
(--)
shift; break
;;
esac
done
if [ -z "${cfg}" ]; then
printf "error: no config snippet specified\n" >&2; exit 1
fi
if [ ! -e "${cfg}" ]; then
printf "error: %s: no such file\n" "${cfg}" >&2; exit 1
fi
if [ -z "${dir}" ]; then
dir="${HOME}/br-test-pkg"
fi
# Extract the URLs of the toolchains; drop internal toolchains
# E.g.: http://server/path/to/name.config,arch,libc
# --> http://server/path/to/name.config
toolchains=($(curl -s "${TOOLCHAINS_URL}" \
|sed -r -e 's/,.*//; /internal/d;' \
|if [ ${random} -gt 0 ]; then \
sort -R |head -n ${random}
else
cat
fi |sort
)
)
if [ ${#toolchains[@]} -eq 0 ]; then
printf "error: no toolchain found (networking issue?)\n" >&2; exit 1
fi
nb=0
nb_skip=0
nb_fail=0
for toolchain in "${toolchains[@]}"; do
build_one "${dir}" "${toolchain}" "${cfg}" "${pkg}" && ret=0 || ret=${?}
case ${ret} in
(0) ;;
(1) : $((nb_skip++));;
(2) : $((nb_fail++));;
esac
: $((nb++))
done
printf "%d builds, %d skipped, %d failed\n" ${nb} ${nb_skip} ${nb_fail}
}
build_one() {
local dir="${1}"
local url="${2}"
local cfg="${3}"
local pkg="${4}"
local toolchain
# Using basename(1) on a URL works nicely
toolchain="$(basename "${url}" .config)"
printf "%40s: " "${toolchain}"
dir="${dir}/${toolchain}"
mkdir -p "${dir}"
if ! curl -s "${url}" >"${dir}/.config"; then
printf "FAILED\n"
return 2
fi
cat >>"${dir}/.config" <<-_EOF_
BR2_INIT_NONE=y
BR2_SYSTEM_BIN_SH_NONE=y
# BR2_PACKAGE_BUSYBOX is not set
# BR2_TARGET_ROOTFS_TAR is not set
_EOF_
cat "${cfg}" >>"${dir}/.config"
if ! make O="${dir}" olddefconfig >/dev/null 2>&1; then
printf "FAILED\n"
return 2
fi
# We want all the options from the snippet to be present as-is (set
# or not set) in the actual .config; if one of them is not, it means
# some dependency from the toolchain or arch is not available, in
# which case this config is untestable and we skip it.
# We don't care about the locale to sort in, as long as both sort are
# done in the same locale.
comm -23 <(sort "${cfg}") <(sort "${dir}/.config") >"${dir}/missing.config"
if [ -s "${dir}/missing.config" ]; then
printf "SKIPPED\n"
return 1
fi
# Remove file, it's empty anyway.
rm -f "${dir}/missing.config"
if [ -n "${pkg}" ]; then
if ! make O="${dir}" "${pkg}-dirclean" >> "${dir}/logfile" 2>&1; then
printf "FAILED\n"
return 2
fi
fi
# shellcheck disable=SC2086
if ! make O="${dir}" ${pkg} >> "${dir}/logfile" 2>&1; then
printf "FAILED\n"
return 2
fi
printf "OK\n"
}
help() {
cat <<_EOF_
test-pkg: test-build a package against various toolchains and architectures
The supplied config snippet is appended to each toolchain config, the
resulting configuration is checked to ensure it still contains all options
specified in the snippet; if any is missing, the build is skipped, on the
assumption that the package under test requires a toolchain or architecture
feature that is missing.
In case failures are noticed, you can fix the package and just re-run the
same command again; it will re-run the test where it failed. If you did
specify a package (with -p), the package build dir will be removed first.
The list of toolchains is retrieved from the Buildroot autobuilders, available
at ${TOOLCHAINS_URL}.
Options:
-h, --help
Print this help.
-c CFG, --config-snippet CFG
Use the CFG file as the source for the config snippet. This file
should contain all the config options required to build a package.
-d DIR, --build-dir DIR
Do the builds in directory DIR, one sub-dir per toolchain.
-p PKG, --package PKG
Test-build the package PKG, by running 'make PKG'; if not specified,
just runs 'make'.
-r N, --random N
Limit the tests to the N randomly selected toolchains, instead of
building with all toolchains.
Example:
Testing libcec would require a config snippet that contains:
BR2_PACKAGE_LIBCEC=y
Testing libcurl with openSSL support would require a snippet such as:
BR2_PACKAGE_OPENSSL=y
BR2_PACKAGE_LIBCURL=y
_EOF_
}
my_name="${0##*/}"
main "${@}"

Some files were not shown because too many files have changed in this diff Show More