Update buidlroot to version 2016.08.1

This commit is contained in:
2016-11-16 22:07:29 +01:00
parent 807ab03547
commit a1061efbc2
3636 changed files with 59539 additions and 25783 deletions

View File

@@ -31,6 +31,8 @@
# applied. The list of the patches applied is stored in '.applied_patches_list'
# file in the build directory.
set -e
silent=
if [ "$1" = "-s" ] ; then
# add option to be used by the patch tool
@@ -63,8 +65,12 @@ find ${builddir}/ '(' -name '*.rej' -o -name '.*.rej' ')' -print0 | \
xargs -0 -r rm -f
function apply_patch {
path=$1
patch=$2
path="${1%%/}"
patch="${2}"
case "${path}" in
/*) ;;
*) path="$PWD/${path}";;
esac
if [ "$3" ]; then
type="series"; uncomp="cat"
else
@@ -99,7 +105,15 @@ function apply_patch {
echo "Error: missing patch file ${path}/$patch"
exit 1
fi
echo $patch >> ${builddir}/.applied_patches_list
existing="$(grep -E "/${patch}\$" ${builddir}/.applied_patches_list || true)"
if [ -n "${existing}" ]; then
echo "Error: duplicate filename '${patch}'"
echo "Conflicting files are:"
echo " already applied: ${existing}"
echo " to be applied : ${path}/${patch}"
exit 1
fi
echo "${path}/${patch}" >> ${builddir}/.applied_patches_list
${uncomp} "${path}/$patch" | patch -g0 -p1 -E -d "${builddir}" -t -N $silent
if [ $? != 0 ] ; then
echo "Patch failed! Please fix ${patch}!"
@@ -141,6 +155,7 @@ function scan_patchdir {
fi
}
touch ${builddir}/.applied_patches_list
scan_patchdir "$patchdir" "$patchpattern"
# Check for rejects...

View File

@@ -26,7 +26,7 @@ main() {
printf "*** ERROR: package %s installs executables without proper RPATH:\n" "${pkg}"
fi
printf "*** %s\n" "${file}"
done < <( find "${hostdir}"/usr/{bin,sbin} -type f -exec file {} + 2>/dev/null \
done < <( find "${hostdir}"/{,usr/}{bin,sbin} -type f -exec file {} + 2>/dev/null \
|sed -r -e '/^([^:]+):.*\<ELF\>.*\<executable\>.*/!d' \
-e 's//\1/' \
)

View File

@@ -276,7 +276,7 @@ parser.add_argument("--order", '-O', metavar="GRAPH_ORDER",
help="Ordering of packages: build or duration (for histogram only)")
parser.add_argument("--alternate-colors", '-c', action="store_true",
help="Use alternate colour-scheme")
parser.add_argument("--input", '-i', metavar="OUTPUT",
parser.add_argument("--input", '-i', metavar="INPUT",
help="Input file (usually $(O)/build/build-time.log)")
parser.add_argument("--output", '-o', metavar="OUTPUT", required=True,
help="Output file (.pdf or .png extension)")

View File

@@ -73,7 +73,7 @@ else:
if check_only:
sys.stderr.write("don't specify outfile and check-only at the same time\n")
sys.exit(1)
outfile = open(args.outfile, "wb")
outfile = open(args.outfile, "w")
if args.package is None:
mode = MODE_FULL

View File

@@ -0,0 +1,35 @@
#!/bin/bash
# Try to hardlink a file into a directory, fallback to copy on failure.
#
# Hardlink-or-copy the source file in the first argument into the
# destination directory in the second argument, using the basename in
# the third argument as basename for the destination file. If the third
# argument is missing, use the basename of the source file as basename
# for the destination file.
#
# In either case, remove the destination prior to doing the
# hardlink-or-copy.
#
# Note that this is NOT an atomic operation.
set -e
main() {
local src_file="${1}"
local dst_dir="${2}"
local dst_file="${3}"
if [ -n "${dst_file}" ]; then
dst_file="${dst_dir}/${dst_file}"
else
dst_file="${dst_dir}/${src_file##*/}"
fi
mkdir -p "${dst_dir}"
rm -f "${dst_file}"
ln -f "${src_file}" "${dst_file}" 2>/dev/null \
|| cp -f "${src_file}" "${dst_file}"
}
main "${@}"

View File

@@ -97,7 +97,6 @@ for i in $(find boot/ linux/ package/ -name '*.mk' | sort) ; do
$i = "linux/linux-ext-fbtft.mk" -o \
$i = "linux/linux-ext-xenomai.mk" -o \
$i = "linux/linux-ext-rtai.mk" -o \
$i = "package/efl/efl.mk" -o \
$i = "package/freescale-imx/freescale-imx.mk" -o \
$i = "package/gcc/gcc.mk" -o \
$i = "package/gstreamer/gstreamer.mk" -o \

View File

@@ -0,0 +1,24 @@
#!/usr/bin/env python
# Wrapper for python2 and python3 around compileall to raise exception
# when a python byte code generation failed.
#
# Inspired from:
# http://stackoverflow.com/questions/615632/how-to-detect-errors-from-compileall-compile-dir
from __future__ import print_function
import sys
import py_compile
import compileall
class ReportProblem:
def __nonzero__(self):
type, value, traceback = sys.exc_info()
if type is not None and issubclass(type, py_compile.PyCompileError):
print("Cannot compile %s" %value.file)
raise value
return 1
report_problem = ReportProblem()
compileall.compile_dir(sys.argv[1], quiet=report_problem)

View File

@@ -471,7 +471,7 @@ unshift @INC, sub {
} # END OF FATPACK CODE
use 5.022; # same major version as target perl
use 5.010;
use strict;
use warnings;
use Fatal qw(open close);
@@ -484,6 +484,19 @@ use HTTP::Tiny;
use Safe;
use MetaCPAN::API::Tiny;
# Below, 5.022 should be aligned with the version of perl actually
# bundled in Buildroot:
die <<"MSG" if $] < 5.022;
This script needs a host perl with the same major version as Buildroot target perl.
Your current host perl is:
$^X
version $]
You may install a local one by running:
perlbrew install perl-5.22.2
MSG
my ($help, $man, $quiet, $force, $recommend, $test, $host);
my $target = 1;
GetOptions( 'help|?' => \$help,
@@ -505,6 +518,7 @@ my %need_host; # name -> 1 if host package is needed
my %need_dlopen; # name -> 1 if requires dynamic library
my %deps_build; # name -> list of host dependencies
my %deps_runtime; # name -> list of target dependencies
my %deps_optional; # name -> list of optional target dependencies
my %license_files; # name -> list of license files
my %checksum; # author -> list of checksum
my $mcpan = MetaCPAN::API::Tiny->new();
@@ -551,10 +565,10 @@ sub find_license_files {
}
sub fetch {
my ($name, $need_target, $need_host) = @_;
my ($name, $need_target, $need_host, $top) = @_;
$need_target{$name} = $need_target if $need_target;
$need_host{$name} = $need_host if $need_host;
unless ($dist{$name}) {
unless ($dist{$name} && !$top) {
say qq{fetch ${name}} unless $quiet;
my $result = $mcpan->release( distribution => $name );
$dist{$name} = $result;
@@ -563,21 +577,26 @@ sub fetch {
$license_files{$name} = find_license_files( $manifest );
my %build = ();
my %runtime = ();
my %optional = ();
foreach my $dep (@{$result->{dependency}}) {
my $modname = ${$dep}{module};
next if $modname eq q{perl};
next if $modname =~ m|^Alien|;
next if $modname =~ m|^Win32|;
next if !$test && $modname =~ m|^Test|;
next if !($test && $top) && $modname =~ m|^Test|;
next if Module::CoreList::is_core( $modname, undef, $] );
# we could use the host Module::CoreList data, because host perl and
# target perl have the same major version
next if ${$dep}{phase} eq q{develop};
next if !$test && ${$dep}{phase} eq q{test};
next if !$recommend && ${$dep}{relationship} ne q{requires};
next if !($test && $top) && ${$dep}{phase} eq q{test};
my $distname = $mcpan->module( $modname )->{distribution};
if (${$dep}{phase} eq q{runtime}) {
$runtime{$distname} = 1;
if (${$dep}{relationship} eq q{requires}) {
$runtime{$distname} = 1;
}
else {
$optional{$distname} = 1 if $recommend && $top;
}
}
else { # configure, build
$build{$distname} = 1;
@@ -585,6 +604,7 @@ sub fetch {
}
$deps_build{$name} = [keys %build];
$deps_runtime{$name} = [keys %runtime];
$deps_optional{$name} = [keys %optional];
foreach my $distname (@{$deps_build{$name}}) {
fetch( $distname, 0, 1 );
}
@@ -592,13 +612,16 @@ sub fetch {
fetch( $distname, $need_target, $need_host );
$need_dlopen{$name} ||= $need_dlopen{$distname};
}
foreach my $distname (@{$deps_optional{$name}}) {
fetch( $distname, $need_target, $need_host );
}
}
return;
}
foreach my $distname (@ARGV) {
# Command-line's distributions
fetch( $distname, !!$target, !!$host );
fetch( $distname, !!$target, !!$host, 1 );
}
say scalar keys %dist, q{ packages fetched.} unless $quiet;
@@ -683,6 +706,15 @@ while (my ($distname, $dist) = each %dist) {
say {$fh} qq{${brname}_LICENSE = ${license}} if $license && $license ne q{unknown};
say {$fh} qq{${brname}_LICENSE_FILES = ${license_files}} if $license_files;
say {$fh} qq{};
foreach (sort @{$deps_optional{$distname}}) {
next if grep { $_ eq $distname; } @{$deps_runtime{$_}}; # avoid cyclic dependencies
my $opt_brname = brname( $_ );
my $opt_fsname = fsname( $_ );
say {$fh} qq{ifeq (\$(BR2_PACKAGE_PERL_${opt_brname}),y)};
say {$fh} qq{${brname}_DEPENDENCIES += ${opt_fsname}};
say {$fh} qq{endif};
say {$fh} qq{};
}
say {$fh} qq{\$(eval \$(perl-package))} if $need_target{$distname};
say {$fh} qq{\$(eval \$(host-perl-package))} if $need_host{$distname};
close $fh;
@@ -727,10 +759,6 @@ support/scripts/scancpan Try-Tiny Moo
=head1 SYNOPSIS
curl -kL http://install.perlbrew.pl | bash
perlbrew install perl-5.18.2
supports/scripts/scancpan [options] [distname ...]
Options:
@@ -800,7 +828,7 @@ in order to work with the right CoreList data.
=head1 LICENSE
Copyright (C) 2013-2014 by Francois Perrad <francois.perrad@gadz.org>
Copyright (C) 2013-2016 by Francois Perrad <francois.perrad@gadz.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by

View File

@@ -0,0 +1,653 @@
#!/usr/bin/python2
"""
Utility for building Buildroot packages for existing PyPI packages
Any package built by scanpypi should be manually checked for
errors.
"""
from __future__ import print_function
import argparse
import json
import urllib2
import sys
import os
import shutil
import StringIO
import tarfile
import zipfile
import errno
import hashlib
import re
import textwrap
import tempfile
import imp
from functools import wraps
def setup_decorator(func, method):
"""
Decorator for distutils.core.setup and setuptools.setup.
Puts the arguments with which setup is called as a dict
Add key 'method' which should be either 'setuptools' or 'distutils'.
Keyword arguments:
func -- either setuptools.setup or distutils.core.setup
method -- either 'setuptools' or 'distutils'
"""
@wraps(func)
def closure(*args, **kwargs):
# Any python packages calls its setup function to be installed.
# Argument 'name' of this setup function is the package's name
BuildrootPackage.setup_args[kwargs['name']] = kwargs
BuildrootPackage.setup_args[kwargs['name']]['method'] = method
return closure
# monkey patch
import setuptools
setuptools.setup = setup_decorator(setuptools.setup, 'setuptools')
import distutils
distutils.core.setup = setup_decorator(setuptools.setup, 'distutils')
def find_file_upper_case(filenames, path='./'):
"""
List generator:
Recursively find files that matches one of the specified filenames.
Returns a relative path starting with path argument.
Keyword arguments:
filenames -- List of filenames to be found
path -- Path to the directory to search
"""
for root, dirs, files in os.walk(path):
for file in files:
if file.upper() in filenames:
yield (os.path.join(root, file))
def pkg_buildroot_name(pkg_name):
"""
Returns the Buildroot package name for the PyPI package pkg_name.
Remove all non alphanumeric characters except -
Also lowers the name and adds 'python-' suffix
Keyword arguments:
pkg_name -- String to rename
"""
name = re.sub('[^\w-]', '', pkg_name.lower())
prefix = 'python-'
pattern = re.compile('^(?!' + prefix + ')(.+?)$')
name = pattern.sub(r'python-\1', name)
return name
class DownloadFailed(Exception):
pass
class BuildrootPackage():
"""This class's methods are not meant to be used individually please
use them in the correct order:
__init__
download_package
extract_package
load_module
get_requirements
create_package_mk
create_hash_file
create_config_in
"""
setup_args = {}
def __init__(self, real_name, pkg_folder):
self.real_name = real_name
self.buildroot_name = pkg_buildroot_name(self.real_name)
self.pkg_dir = os.path.join(pkg_folder, self.buildroot_name)
self.mk_name = self.buildroot_name.upper().replace('-', '_')
self.as_string = None
self.md5_sum = None
self.metadata = None
self.metadata_name = None
self.metadata_url = None
self.pkg_req = None
self.setup_metadata = None
self.tmp_extract = None
self.used_url = None
self.filename = None
self.url = None
self.version = None
def fetch_package_info(self):
"""
Fetch a package's metadata from the python package index
"""
self.metadata_url = 'https://pypi.python.org/pypi/{pkg}/json'.format(
pkg=self.real_name)
try:
pkg_json = urllib2.urlopen(self.metadata_url).read().decode()
except urllib2.HTTPError as error:
print('ERROR:', error.getcode(), error.msg, file=sys.stderr)
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
.format(pkg=self.real_name))
raise
except urllib2.URLError:
print('ERROR: Could not find package {pkg}.\n'
'Check syntax inside the python package index:\n'
'https://pypi.python.org/pypi/ '
.format(pkg=self.real_name))
raise
self.metadata = json.loads(pkg_json)
self.version = self.metadata['info']['version']
self.metadata_name = self.metadata['info']['name']
def download_package(self):
"""
Download a package using metadata from pypi
"""
try:
self.metadata['urls'][0]['filename']
except IndexError:
print(
'Non-conventional package, ',
'please check carefully after creation')
self.metadata['urls'] = [{
'packagetype': 'sdist',
'url': self.metadata['info']['download_url'],
'md5_digest': None}]
# In this case, we can't get the name of the downloaded file
# from the pypi api, so we need to find it, this should work
urlpath = urllib2.urlparse.urlparse(
self.metadata['info']['download_url']).path
# urlparse().path give something like
# /path/to/file-version.tar.gz
# We use basename to remove /path/to
self.metadata['urls'][0]['filename'] = os.path.basename(urlpath)
for download_url in self.metadata['urls']:
if 'bdist' in download_url['packagetype']:
continue
try:
print('Downloading package {pkg} from {url}...'.format(
pkg=self.real_name, url=download_url['url']))
download = urllib2.urlopen(download_url['url'])
except urllib2.HTTPError as http_error:
download = http_error
else:
self.used_url = download_url
self.as_string = download.read()
if not download_url['md5_digest']:
break
self.md5_sum = hashlib.md5(self.as_string).hexdigest()
if self.md5_sum == download_url['md5_digest']:
break
else:
if download.__class__ == urllib2.HTTPError:
raise download
raise DownloadFailed('Failed to downloas package {pkg}'
.format(pkg=self.real_name))
self.filename = self.used_url['filename']
self.url = self.used_url['url']
def extract_package(self, tmp_path):
"""
Extract the package contents into a directrory
Keyword arguments:
tmp_path -- directory where you want the package to be extracted
"""
as_file = StringIO.StringIO(self.as_string)
if self.filename[-3:] == 'zip':
with zipfile.ZipFile(as_file) as as_zipfile:
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
try:
os.makedirs(tmp_pkg)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.message, file=sys.stderr)
return None, None
print('WARNING:', exception.message, file=sys.stderr)
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
as_zipfile.extractall(tmp_pkg)
else:
with tarfile.open(fileobj=as_file) as as_tarfile:
tmp_pkg = os.path.join(tmp_path, self.buildroot_name)
try:
os.makedirs(tmp_pkg)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.message, file=sys.stderr)
return None, None
print('WARNING:', exception.message, file=sys.stderr)
print('Removing {pkg}...'.format(pkg=tmp_pkg))
shutil.rmtree(tmp_pkg)
os.makedirs(tmp_pkg)
as_tarfile.extractall(tmp_pkg)
tmp_extract = '{folder}/{name}-{version}'
self.tmp_extract = tmp_extract.format(
folder=tmp_pkg,
name=self.metadata_name,
version=self.version)
def load_setup(self):
"""
Loads the corresponding setup and store its metadata
"""
current_dir = os.getcwd()
os.chdir(self.tmp_extract)
sys.path.append(self.tmp_extract)
s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract])
setup = imp.load_module('setup', s_file, s_path, s_desc)
try:
self.setup_metadata = self.setup_args[self.metadata_name]
except KeyError:
# This means setup was not called which most likely mean that it is
# called through the if __name__ == '__main__' directive.
# In this case, we can only pray that it is called through a
# function called main() in setup.py.
setup.main([]) # Will raise AttributeError if not found
self.setup_metadata = self.setup_args[self.metadata_name]
# Here we must remove the module the hard way.
# We must do this because of a very sepcific case: if a package calls
# setup from the __main__ but does not come with a 'main()' function,
# for some reason setup.main([]) will successfully call the main
# function of a previous package...
sys.modules.pop('setup',None)
del setup
os.chdir(current_dir)
sys.path.remove(self.tmp_extract)
def get_requirements(self, pkg_folder):
"""
Retrieve dependencies from the metadata found in the setup.py script of
a pypi package.
Keyword Arguments:
pkg_folder -- location of the already created packages
"""
if 'install_requires' not in self.setup_metadata:
self.pkg_req = None
return set()
self.pkg_req = self.setup_metadata['install_requires']
self.pkg_req = [re.sub('([-.\w]+).*', r'\1', req)
for req in self.pkg_req]
req_not_found = self.pkg_req
self.pkg_req = map(pkg_buildroot_name, self.pkg_req)
pkg_tuples = zip(req_not_found, self.pkg_req)
# pkg_tuples is a list of tuples that looks like
# ('werkzeug','python-werkzeug') because I need both when checking if
# dependencies already exist or are already in the download list
req_not_found = set(
pkg[0] for pkg in pkg_tuples
if not os.path.isdir(pkg[1])
)
return req_not_found
def __create_mk_header(self):
"""
Create the header of the <package_name>.mk file
"""
header = ['#' * 80 + '\n']
header.append('#\n')
header.append('# {name}\n'.format(name=self.buildroot_name))
header.append('#\n')
header.append('#' * 80 + '\n')
header.append('\n')
return header
def __create_mk_download_info(self):
"""
Create the lines refering to the download information of the
<package_name>.mk file
"""
lines = []
version_line = '{name}_VERSION = {version}\n'.format(
name=self.mk_name,
version=self.version)
lines.append(version_line)
targz = self.filename.replace(
self.version,
'$({name}_VERSION)'.format(name=self.mk_name))
targz_line = '{name}_SOURCE = {filename}\n'.format(
name=self.mk_name,
filename=targz)
lines.append(targz_line)
if self.filename not in self.url:
# Sometimes the filename is in the url, sometimes it's not
site_url = self.url
else:
site_url = self.url[:self.url.find(self.filename)]
site_line = '{name}_SITE = {url}'.format(name=self.mk_name,
url=site_url)
site_line = site_line.rstrip('/') + '\n'
lines.append(site_line)
return lines
def __create_mk_setup(self):
"""
Create the line refering to the setup method of the package of the
<package_name>.mk file
There are two things you can use to make an installer
for a python package: distutils or setuptools
distutils comes with python but does not support dependencies.
distutils is mostly still there for backward support.
setuptools is what smart people use,
but it is not shipped with python :(
"""
lines = []
setup_type_line = '{name}_SETUP_TYPE = {method}\n'.format(
name=self.mk_name,
method=self.setup_metadata['method'])
lines.append(setup_type_line)
return lines
def __create_mk_license(self):
"""
Create the lines referring to the package's license informations of the
<package_name>.mk file
The license is found using the metadata from pypi.
In the metadata, the license can be found either with standard names in
the classifiers part or with naming from the packager in the "License"
part.
From the classifiers, the license is "translated" according to
buildroot standards if need be (i.e. from Apache Software License to
Apache-2.0).
From the License part, we cannot guess what formatting the packager
used. Hence, it is likely to be incorrect. (i.e. Apache License 2.0
instead of Apache-2.0).
The license's files are found by searching the package for files named
license or license.txt (case insensitive).
If more than one license file is found, the user is asked to select
which ones he wants to use.
"""
license_dict = {
'Apache Software License': 'Apache-2.0',
'BSD License': 'BSD',
'European Union Public Licence 1.0': 'EUPLv1.0',
'European Union Public Licence 1.1': 'EUPLv1.1',
"GNU General Public License": "GPL",
"GNU General Public License v2": "GPLv2",
"GNU General Public License v2 or later": "GPLv2+",
"GNU General Public License v3": "GPLv3",
"GNU General Public License v3 or later": "GPLv3+",
"GNU Lesser General Public License v2": "LGPLv2.1",
"GNU Lesser General Public License v2 or later": "LGPLv2.1+",
"GNU Lesser General Public License v3": "LGPLv3",
"GNU Lesser General Public License v3 or later": "LGPLv3+",
"GNU Library or Lesser General Public License": "LGPLv2",
"ISC License": "ISC",
"MIT License": "MIT",
"Mozilla Public License 1.0": "MPL-1.0",
"Mozilla Public License 1.1": "MPL-1.1",
"Mozilla Public License 2.0": "MPL-2.0",
"Zope Public License": "ZPL"
}
regexp = re.compile('^License :* *.* *:+ (.*)( \(.*\))?$')
classifiers_licenses = [regexp.sub(r"\1", lic)
for lic in self.metadata['info']['classifiers']
if regexp.match(lic)]
licenses = map(lambda x: license_dict[x] if x in license_dict else x,
classifiers_licenses)
lines = []
if not len(licenses):
print('WARNING: License has been set to "{license}". It is most'
' likely wrong, please change it if need be'.format(
license=', '.join(licenses)))
licenses = [self.metadata['info']['license']]
license_line = '{name}_LICENSE = {license}\n'.format(
name=self.mk_name,
license=', '.join(licenses))
lines.append(license_line)
filenames = ['LICENCE', 'LICENSE', 'LICENSE.TXT', 'COPYING',
'COPYING.TXT']
license_files = list(find_file_upper_case(filenames, self.tmp_extract))
license_files = [license.replace(self.tmp_extract, '')[1:]
for license in license_files]
if len(license_files) > 0:
if len(license_files) > 1:
print('More than one file found for license:',
', '.join(license_files))
license_files = [filename
for index, filename in enumerate(license_files)]
license_file_line = ('{name}_LICENSE_FILES ='
' {files}\n'.format(
name=self.mk_name,
files=' '.join(license_files)))
lines.append(license_file_line)
else:
print('WARNING: No license file found,'
' please specify it manually afterwards')
license_file_line = '# No license file found\n'
return lines
def __create_mk_requirements(self):
"""
Create the lines referring to the dependencies of the of the
<package_name>.mk file
Keyword Arguments:
pkg_name -- name of the package
pkg_req -- dependencies of the package
"""
lines = []
dependencies_line = ('{name}_DEPENDENCIES ='
' {reqs}\n'.format(
name=self.mk_name,
reqs=' '.join(self.pkg_req)))
lines.append(dependencies_line)
return lines
def create_package_mk(self):
"""
Create the lines corresponding to the <package_name>.mk file
"""
pkg_mk = '{name}.mk'.format(name=self.buildroot_name)
path_to_mk = os.path.join(self.pkg_dir, pkg_mk)
print('Creating {file}...'.format(file=path_to_mk))
lines = self.__create_mk_header()
lines += self.__create_mk_download_info()
lines += self.__create_mk_setup()
lines += self.__create_mk_license()
lines.append('\n')
lines.append('$(eval $(python-package))')
lines.append('\n')
with open(path_to_mk, 'w') as mk_file:
mk_file.writelines(lines)
def create_hash_file(self):
"""
Create the lines corresponding to the <package_name>.hash files
"""
pkg_hash = '{name}.hash'.format(name=self.buildroot_name)
path_to_hash = os.path.join(self.pkg_dir, pkg_hash)
print('Creating {filename}...'.format(filename=path_to_hash))
lines = []
if self.used_url['md5_digest']:
md5_comment = '# md5 from {url}, sha256 locally computed\n'.format(
url=self.metadata_url)
lines.append(md5_comment)
hash_line = '{method}\t{digest} {filename}\n'.format(
method='md5',
digest=self.used_url['md5_digest'],
filename=self.filename)
lines.append(hash_line)
digest = hashlib.sha256(self.as_string).hexdigest()
hash_line = '{method}\t{digest} {filename}\n'.format(
method='sha256',
digest=digest,
filename=self.filename)
lines.append(hash_line)
with open(path_to_hash, 'w') as hash_file:
hash_file.writelines(lines)
def create_config_in(self):
"""
Creates the Config.in file of a package
"""
path_to_config = os.path.join(self.pkg_dir, 'Config.in')
print('Creating {file}...'.format(file=path_to_config))
lines = []
config_line = 'config BR2_PACKAGE_{name}\n'.format(
name=self.mk_name)
lines.append(config_line)
bool_line = '\tbool "{name}"\n'.format(name=self.buildroot_name)
lines.append(bool_line)
if self.pkg_req:
for dep in self.pkg_req:
dep_line = '\tselect BR2_PACKAGE_{req} # runtime\n'.format(
req=dep.upper().replace('-', '_'))
lines.append(dep_line)
lines.append('\thelp\n')
help_lines = textwrap.wrap(self.metadata['info']['summary'],
initial_indent='\t ',
subsequent_indent='\t ')
# make sure a help text is terminated with a full stop
if help_lines[-1][-1] != '.':
help_lines[-1] += '.'
# \t + two spaces is 3 char long
help_lines.append('')
help_lines.append('\t ' + self.metadata['info']['home_page'])
help_lines = map(lambda x: x + '\n', help_lines)
lines += help_lines
with open(path_to_config, 'w') as config_file:
config_file.writelines(lines)
def main():
# Building the parser
parser = argparse.ArgumentParser(
description="Creates buildroot packages from the metadata of "
"an existing PyPI packages and include it "
"in menuconfig")
parser.add_argument("packages",
help="list of packages to be created",
nargs='+')
parser.add_argument("-o", "--output",
help="""
Output directory for packages.
Default is ./package
""",
default='./package')
args = parser.parse_args()
packages = list(set(args.packages))
# tmp_path is where we'll extract the files later
tmp_prefix = 'scanpypi-'
pkg_folder = args.output
tmp_path = tempfile.mkdtemp(prefix=tmp_prefix)
try:
for real_pkg_name in packages:
package = BuildrootPackage(real_pkg_name, pkg_folder)
print('buildroot package name for {}:'.format(package.real_name),
package.buildroot_name)
# First we download the package
# Most of the info we need can only be found inside the package
print('Package:', package.buildroot_name)
print('Fetching package', package.real_name)
try:
package.fetch_package_info()
except (urllib2.URLError, urllib2.HTTPError):
continue
if package.metadata_name.lower() == 'setuptools':
# setuptools imports itself, that does not work very well
# with the monkey path at the begining
print('Error: setuptools cannot be built using scanPyPI')
continue
try:
package.download_package()
except urllib2.HTTPError as error:
print('Error: {code} {reason}'.format(code=error.code,
reason=error.reason))
print('Error downloading package :', package.buildroot_name)
print()
continue
# extract the tarball
try:
package.extract_package(tmp_path)
except (tarfile.ReadError, zipfile.BadZipfile):
print('Error extracting package {}'.format(package.real_name))
print()
continue
# Loading the package install info from the package
try:
package.load_setup()
except ImportError as err:
if 'buildutils' in err.message:
print('This package needs buildutils')
else:
raise
continue
except AttributeError:
print('Error: Could not install package {pkg}'.format(
pkg=package.real_name))
continue
# Package requirement are an argument of the setup function
req_not_found = package.get_requirements(pkg_folder)
req_not_found = req_not_found.difference(packages)
packages += req_not_found
if req_not_found:
print('Added packages \'{pkgs}\' as dependencies of {pkg}'
.format(pkgs=", ".join(req_not_found),
pkg=package.buildroot_name))
print('Checking if package {name} already exists...'.format(
name=package.pkg_dir))
try:
os.makedirs(package.pkg_dir)
except OSError as exception:
if exception.errno != errno.EEXIST:
print("ERROR: ", exception.message, file=sys.stderr)
continue
print('Error: Package {name} already exists'
.format(name=package.pkg_dir))
del_pkg = raw_input(
'Do you want to delete existing package ? [y/N]')
if del_pkg.lower() == 'y':
shutil.rmtree(package.pkg_dir)
os.makedirs(package.pkg_dir)
else:
continue
package.create_package_mk()
package.create_hash_file()
package.create_config_in()
print()
# printing an empty line for visual confort
finally:
shutil.rmtree(tmp_path)
if __name__ == "__main__":
main()

View File

@@ -66,7 +66,7 @@ def build_package_dict(builddir):
filesdict = {}
with open(os.path.join(builddir, "build", "packages-file-list.txt")) as filelistf:
for l in filelistf.readlines():
pkg, fpath = l.split(",")
pkg, fpath = l.split(",", 1)
# remove the initial './' in each file path
fpath = fpath.strip()[2:]
fullpath = os.path.join(builddir, "target", fpath)