Fix include path to differ with libjson-c
This commit is contained in:
@@ -1,32 +0,0 @@
|
||||
# This is only for jsoncpp developers/contributors.
|
||||
# We use this to sign releases, generate documentation, etc.
|
||||
VER?=$(shell cat version)
|
||||
|
||||
default:
|
||||
@echo "VER=${VER}"
|
||||
sign: jsoncpp-${VER}.tar.gz
|
||||
gpg --armor --detach-sign $<
|
||||
gpg --verify $<.asc
|
||||
# Then upload .asc to the release.
|
||||
jsoncpp-%.tar.gz:
|
||||
curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@
|
||||
dox:
|
||||
python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in
|
||||
rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/
|
||||
# Then 'git add -A' and 'git push' in jsoncpp-docs.
|
||||
build:
|
||||
mkdir -p build/debug
|
||||
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=ON -G "Unix Makefiles" ../..
|
||||
make -C build/debug
|
||||
|
||||
# Currently, this depends on include/json/version.h generated
|
||||
# by cmake.
|
||||
test-amalgamate:
|
||||
python2.7 amalgamate.py
|
||||
python3.4 amalgamate.py
|
||||
cd dist; gcc -I. -c jsoncpp.cpp
|
||||
|
||||
clean:
|
||||
\rm -rf *.gz *.asc dist/
|
||||
|
||||
.PHONY: build
|
||||
@@ -1 +0,0 @@
|
||||
# module
|
||||
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 7 .NET 2003",
|
||||
"Visual Studio 9 2008",
|
||||
"Visual Studio 9 2008 Win64",
|
||||
"Visual Studio 10",
|
||||
"Visual Studio 10 Win64",
|
||||
"Visual Studio 11",
|
||||
"Visual Studio 11 Win64"
|
||||
]
|
||||
},
|
||||
{"generator": ["MinGW Makefiles"],
|
||||
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 6",
|
||||
"Visual Studio 7",
|
||||
"Visual Studio 8 2005"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,202 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Baptiste Lepilleur, 2009
|
||||
|
||||
from __future__ import print_function
|
||||
from dircache import listdir
|
||||
import re
|
||||
import fnmatch
|
||||
import os.path
|
||||
|
||||
|
||||
# These fnmatch expressions are used by default to prune the directory tree
|
||||
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||
prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS '
|
||||
|
||||
# These fnmatch expressions are used by default to exclude files and dirs
|
||||
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||
##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
|
||||
|
||||
# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree
|
||||
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||
default_excludes = '''
|
||||
**/*~
|
||||
**/#*#
|
||||
**/.#*
|
||||
**/%*%
|
||||
**/._*
|
||||
**/CVS
|
||||
**/CVS/**
|
||||
**/.cvsignore
|
||||
**/SCCS
|
||||
**/SCCS/**
|
||||
**/vssver.scc
|
||||
**/.svn
|
||||
**/.svn/**
|
||||
**/.git
|
||||
**/.git/**
|
||||
**/.gitignore
|
||||
**/.bzr
|
||||
**/.bzr/**
|
||||
**/.hg
|
||||
**/.hg/**
|
||||
**/_MTN
|
||||
**/_MTN/**
|
||||
**/_darcs
|
||||
**/_darcs/**
|
||||
**/.DS_Store '''
|
||||
|
||||
DIR = 1
|
||||
FILE = 2
|
||||
DIR_LINK = 4
|
||||
FILE_LINK = 8
|
||||
LINKS = DIR_LINK | FILE_LINK
|
||||
ALL_NO_LINK = DIR | FILE
|
||||
ALL = DIR | FILE | LINKS
|
||||
|
||||
_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)')
|
||||
|
||||
def ant_pattern_to_re(ant_pattern):
|
||||
"""Generates a regular expression from the ant pattern.
|
||||
Matching convention:
|
||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||
*.py: match 'script.py' but not 'a/script.py'
|
||||
"""
|
||||
rex = ['^']
|
||||
next_pos = 0
|
||||
sep_rex = r'(?:/|%s)' % re.escape(os.path.sep)
|
||||
## print 'Converting', ant_pattern
|
||||
for match in _ANT_RE.finditer(ant_pattern):
|
||||
## print 'Matched', match.group()
|
||||
## print match.start(0), next_pos
|
||||
if match.start(0) != next_pos:
|
||||
raise ValueError("Invalid ant pattern")
|
||||
if match.group(1): # /**/
|
||||
rex.append(sep_rex + '(?:.*%s)?' % sep_rex)
|
||||
elif match.group(2): # **/
|
||||
rex.append('(?:.*%s)?' % sep_rex)
|
||||
elif match.group(3): # /**
|
||||
rex.append(sep_rex + '.*')
|
||||
elif match.group(4): # *
|
||||
rex.append('[^/%s]*' % re.escape(os.path.sep))
|
||||
elif match.group(5): # /
|
||||
rex.append(sep_rex)
|
||||
else: # somepath
|
||||
rex.append(re.escape(match.group(6)))
|
||||
next_pos = match.end()
|
||||
rex.append('$')
|
||||
return re.compile(''.join(rex))
|
||||
|
||||
def _as_list(l):
|
||||
if isinstance(l, basestring):
|
||||
return l.split()
|
||||
return l
|
||||
|
||||
def glob(dir_path,
|
||||
includes = '**/*',
|
||||
excludes = default_excludes,
|
||||
entry_type = FILE,
|
||||
prune_dirs = prune_dirs,
|
||||
max_depth = 25):
|
||||
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
|
||||
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||
dir_path = dir_path.replace('/',os.path.sep)
|
||||
entry_type_filter = entry_type
|
||||
|
||||
def is_pruned_dir(dir_name):
|
||||
for pattern in prune_dirs:
|
||||
if fnmatch.fnmatch(dir_name, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
def apply_filter(full_path, filter_rexs):
|
||||
"""Return True if at least one of the filter regular expression match full_path."""
|
||||
for rex in filter_rexs:
|
||||
if rex.match(full_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def glob_impl(root_dir_path):
|
||||
child_dirs = [root_dir_path]
|
||||
while child_dirs:
|
||||
dir_path = child_dirs.pop()
|
||||
for entry in listdir(dir_path):
|
||||
full_path = os.path.join(dir_path, entry)
|
||||
## print 'Testing:', full_path,
|
||||
is_dir = os.path.isdir(full_path)
|
||||
if is_dir and not is_pruned_dir(entry): # explore child directory ?
|
||||
## print '===> marked for recursion',
|
||||
child_dirs.append(full_path)
|
||||
included = apply_filter(full_path, include_filter)
|
||||
rejected = apply_filter(full_path, exclude_filter)
|
||||
if not included or rejected: # do not include entry ?
|
||||
## print '=> not included or rejected'
|
||||
continue
|
||||
link = os.path.islink(full_path)
|
||||
is_file = os.path.isfile(full_path)
|
||||
if not is_file and not is_dir:
|
||||
## print '=> unknown entry type'
|
||||
continue
|
||||
if link:
|
||||
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||
else:
|
||||
entry_type = is_file and FILE or DIR
|
||||
## print '=> type: %d' % entry_type,
|
||||
if (entry_type & entry_type_filter) != 0:
|
||||
## print ' => KEEP'
|
||||
yield os.path.join(dir_path, entry)
|
||||
## else:
|
||||
## print ' => TYPE REJECTED'
|
||||
return list(glob_impl(dir_path))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import unittest
|
||||
|
||||
class AntPatternToRETest(unittest.TestCase):
|
||||
## def test_conversion(self):
|
||||
## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern)
|
||||
|
||||
def test_matching(self):
|
||||
test_cases = [ ('path',
|
||||
['path'],
|
||||
['somepath', 'pathsuffix', '/path', '/path']),
|
||||
('*.py',
|
||||
['source.py', 'source.ext.py', '.py'],
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']),
|
||||
('**/path',
|
||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']),
|
||||
('path/**',
|
||||
['path/a', 'path/path/a', 'path//'],
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']),
|
||||
('/**/path',
|
||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath']),
|
||||
('a/b',
|
||||
['a/b'],
|
||||
['somea/b', 'a/bsuffix', 'a/b/c']),
|
||||
('**/*.py',
|
||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||
['script.pyc', 'script.pyo', 'a.py/b']),
|
||||
('src/**/*.py',
|
||||
['src/a.py', 'src/dir/a.py'],
|
||||
['a/src/a.py', '/src/a.py']),
|
||||
]
|
||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||
def local_path(paths):
|
||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||
test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches)))
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re(ant_pattern)
|
||||
print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
|
||||
for accepted_match in accepted_matches:
|
||||
print('Accepted?:', accepted_match)
|
||||
self.assertTrue(rex.match(accepted_match) is not None)
|
||||
for rejected_match in rejected_matches:
|
||||
print('Rejected?:', rejected_match)
|
||||
self.assertTrue(rex.match(rejected_match) is None)
|
||||
|
||||
unittest.main()
|
||||
@@ -1,64 +0,0 @@
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
|
||||
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
if not os.path.isfile(path):
|
||||
raise ValueError('Path "%s" is not a file' % path)
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError as msg:
|
||||
print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
|
||||
return False
|
||||
try:
|
||||
raw_lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||
if raw_lines != fixed_lines:
|
||||
print('%s =>' % path, end=' ')
|
||||
if not is_dry_run:
|
||||
f = open(path, "wb")
|
||||
try:
|
||||
f.writelines(fixed_lines)
|
||||
finally:
|
||||
f.close()
|
||||
if verbose:
|
||||
print(is_dry_run and ' NEED FIX' or ' FIXED')
|
||||
return True
|
||||
##
|
||||
##
|
||||
##
|
||||
##def _do_fix(is_dry_run = True):
|
||||
## from waftools import antglob
|
||||
## python_sources = antglob.glob('.',
|
||||
## includes = '**/*.py **/wscript **/wscript_build',
|
||||
## excludes = antglob.default_excludes + './waf.py',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
|
||||
## for path in python_sources:
|
||||
## _fix_python_source(path, is_dry_run)
|
||||
##
|
||||
## cpp_sources = antglob.glob('.',
|
||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
|
||||
## for path in cpp_sources:
|
||||
## _fix_source_eol(path, is_dry_run)
|
||||
##
|
||||
##
|
||||
##def dry_fix(context):
|
||||
## _do_fix(is_dry_run = True)
|
||||
##
|
||||
##def fix(context):
|
||||
## _do_fix(is_dry_run = False)
|
||||
##
|
||||
##def shutdown():
|
||||
## pass
|
||||
##
|
||||
##def check(context):
|
||||
## # Unit tests are run when "check" target is used
|
||||
## ut = UnitTest.unit_test()
|
||||
## ut.change_to_testfile_dir = True
|
||||
## ut.want_to_see_test_output = True
|
||||
## ut.want_to_see_test_error = True
|
||||
## ut.run()
|
||||
## ut.print_results()
|
||||
@@ -1,189 +0,0 @@
|
||||
"""Script to generate doxygen documentation.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from devtools import tarball
|
||||
from contextlib import contextmanager
|
||||
import subprocess
|
||||
import traceback
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
@contextmanager
|
||||
def cd(newdir):
|
||||
"""
|
||||
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
|
||||
"""
|
||||
prevdir = os.getcwd()
|
||||
os.chdir(newdir)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(prevdir)
|
||||
|
||||
def find_program(*filenames):
|
||||
"""find a program in folders path_lst, and sets env[var]
|
||||
@param filenames: a list of possible names of the program to search for
|
||||
@return: the full path of the filename if found, or '' if filename could not be found
|
||||
"""
|
||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
|
||||
for filename in filenames:
|
||||
for name in [filename+ext for ext in suffixes.split()]:
|
||||
for directory in paths:
|
||||
full_path = os.path.join(directory, name)
|
||||
if os.path.isfile(full_path):
|
||||
return full_path
|
||||
return ''
|
||||
|
||||
def do_subst_in_file(targetfile, sourcefile, dict):
|
||||
"""Replace all instances of the keys of dict with their values.
|
||||
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
|
||||
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
|
||||
"""
|
||||
with open(sourcefile, 'r') as f:
|
||||
contents = f.read()
|
||||
for (k,v) in list(dict.items()):
|
||||
v = v.replace('\\','\\\\')
|
||||
contents = re.sub(k, v, contents)
|
||||
with open(targetfile, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
def getstatusoutput(cmd):
|
||||
"""cmd is a list.
|
||||
"""
|
||||
try:
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
output, _ = process.communicate()
|
||||
status = process.returncode
|
||||
except:
|
||||
status = -1
|
||||
output = traceback.format_exc()
|
||||
return status, output
|
||||
|
||||
def run_cmd(cmd, silent=False):
|
||||
"""Raise exception on failure.
|
||||
"""
|
||||
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
|
||||
print(info)
|
||||
sys.stdout.flush()
|
||||
if silent:
|
||||
status, output = getstatusoutput(cmd)
|
||||
else:
|
||||
status, output = os.system(' '.join(cmd)), ''
|
||||
if status:
|
||||
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
|
||||
raise Exception(msg)
|
||||
|
||||
def assert_is_exe(path):
|
||||
if not path:
|
||||
raise Exception('path is empty.')
|
||||
if not os.path.isfile(path):
|
||||
raise Exception('%r is not a file.' %path)
|
||||
if not os.access(path, os.X_OK):
|
||||
raise Exception('%r is not executable by this user.' %path)
|
||||
|
||||
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||
assert_is_exe(doxygen_path)
|
||||
config_file = os.path.abspath(config_file)
|
||||
with cd(working_dir):
|
||||
cmd = [doxygen_path, config_file]
|
||||
run_cmd(cmd, is_silent)
|
||||
|
||||
def build_doc(options, make_release=False):
|
||||
if make_release:
|
||||
options.make_tarball = True
|
||||
options.with_dot = True
|
||||
options.with_html_help = True
|
||||
options.with_uml_look = True
|
||||
options.open = False
|
||||
options.silent = True
|
||||
|
||||
version = open('version', 'rt').read().strip()
|
||||
output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
top_dir = os.path.abspath('.')
|
||||
html_output_dirname = 'jsoncpp-api-html-' + version
|
||||
tarball_path = os.path.join('dist', html_output_dirname + '.tar.gz')
|
||||
warning_log_path = os.path.join(output_dir, '../jsoncpp-doxygen-warning.log')
|
||||
html_output_path = os.path.join(output_dir, html_output_dirname)
|
||||
def yesno(bool):
|
||||
return bool and 'YES' or 'NO'
|
||||
subst_keys = {
|
||||
'%JSONCPP_VERSION%': version,
|
||||
'%DOC_TOPDIR%': '',
|
||||
'%TOPDIR%': top_dir,
|
||||
'%HTML_OUTPUT%': os.path.join('..', output_dir, html_output_dirname),
|
||||
'%HAVE_DOT%': yesno(options.with_dot),
|
||||
'%DOT_PATH%': os.path.split(options.dot_path)[0],
|
||||
'%HTML_HELP%': yesno(options.with_html_help),
|
||||
'%UML_LOOK%': yesno(options.with_uml_look),
|
||||
'%WARNING_LOG_PATH%': os.path.join('..', warning_log_path)
|
||||
}
|
||||
|
||||
if os.path.isdir(output_dir):
|
||||
print('Deleting directory:', output_dir)
|
||||
shutil.rmtree(output_dir)
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
do_subst_in_file('doc/doxyfile', options.doxyfile_input_path, subst_keys)
|
||||
run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent)
|
||||
if not options.silent:
|
||||
print(open(warning_log_path, 'r').read())
|
||||
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
||||
print('Generated documentation can be found in:')
|
||||
print(index_path)
|
||||
if options.open:
|
||||
import webbrowser
|
||||
webbrowser.open('file://' + index_path)
|
||||
if options.make_tarball:
|
||||
print('Generating doc tarball to', tarball_path)
|
||||
tarball_sources = [
|
||||
output_dir,
|
||||
'README.md',
|
||||
'LICENSE',
|
||||
'NEWS.txt',
|
||||
'version'
|
||||
]
|
||||
tarball_basedir = os.path.join(output_dir, html_output_dirname)
|
||||
tarball.make_tarball(tarball_path, tarball_sources, tarball_basedir, html_output_dirname)
|
||||
return tarball_path, html_output_dirname
|
||||
|
||||
def main():
|
||||
usage = """%prog
|
||||
Generates doxygen documentation in build/doxygen.
|
||||
Optionaly makes a tarball of the documentation to dist/.
|
||||
|
||||
Must be started in the project top directory.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False,
|
||||
help="""Enable usage of DOT to generate collaboration diagram""")
|
||||
parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'),
|
||||
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
||||
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
|
||||
help="""Path to Doxygen tool. [Default: %default]""")
|
||||
parser.add_option('--in', dest="doxyfile_input_path", action='store', default='doc/doxyfile.in',
|
||||
help="""Path to doxygen inputs. [Default: %default]""")
|
||||
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
|
||||
help="""Enable generation of Microsoft HTML HELP""")
|
||||
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
|
||||
help="""Generates DOT graph without UML look [Default: False]""")
|
||||
parser.add_option('--open', dest="open", action='store_true', default=False,
|
||||
help="""Open the HTML index in the web browser after generation""")
|
||||
parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False,
|
||||
help="""Generates a tarball of the documentation in dist/ directory""")
|
||||
parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False,
|
||||
help="""Hides doxygen output""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
build_doc(options)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,384 +0,0 @@
|
||||
"""Tag the sandbox for release, make source and doc tarballs.
|
||||
|
||||
Requires Python 2.6
|
||||
|
||||
Example of invocation (use to test the script):
|
||||
python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev
|
||||
|
||||
When testing this script:
|
||||
python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev
|
||||
|
||||
Example of invocation when doing a release:
|
||||
python makerelease.py 0.5.0 0.6.0-dev
|
||||
|
||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
import doxybuild
|
||||
import subprocess
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
import shutil
|
||||
import urllib2
|
||||
import tempfile
|
||||
import os
|
||||
import time
|
||||
from devtools import antglob, fixeol, tarball
|
||||
import amalgamate
|
||||
|
||||
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
||||
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
|
||||
SOURCEFORGE_PROJECT = 'jsoncpp'
|
||||
|
||||
def set_version(version):
|
||||
with open('version','wb') as f:
|
||||
f.write(version.strip())
|
||||
|
||||
def rmdir_if_exist(dir_path):
|
||||
if os.path.isdir(dir_path):
|
||||
shutil.rmtree(dir_path)
|
||||
|
||||
class SVNError(Exception):
|
||||
pass
|
||||
|
||||
def svn_command(command, *args):
|
||||
cmd = ['svn', '--non-interactive', command] + list(args)
|
||||
print('Running:', ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
error = SVNError('SVN command failed:\n' + stdout)
|
||||
error.returncode = process.returncode
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def check_no_pending_commit():
|
||||
"""Checks that there is no pending commit in the sandbox."""
|
||||
stdout = svn_command('status', '--xml')
|
||||
etree = ElementTree.fromstring(stdout)
|
||||
msg = []
|
||||
for entry in etree.getiterator('entry'):
|
||||
path = entry.get('path')
|
||||
status = entry.find('wc-status').get('item')
|
||||
if status != 'unversioned' and path != 'version':
|
||||
msg.append('File "%s" has pending change (status="%s")' % (path, status))
|
||||
if msg:
|
||||
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!')
|
||||
return '\n'.join(msg)
|
||||
|
||||
def svn_join_url(base_url, suffix):
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
if suffix.startswith('/'):
|
||||
suffix = suffix[1:]
|
||||
return base_url + suffix
|
||||
|
||||
def svn_check_if_tag_exist(tag_url):
|
||||
"""Checks if a tag exist.
|
||||
Returns: True if the tag exist, False otherwise.
|
||||
"""
|
||||
try:
|
||||
list_stdout = svn_command('list', tag_url)
|
||||
except SVNError as e:
|
||||
if e.returncode != 1 or not str(e).find('tag_url'):
|
||||
raise e
|
||||
# otherwise ignore error, meaning tag does not exist
|
||||
return False
|
||||
return True
|
||||
|
||||
def svn_commit(message):
|
||||
"""Commit the sandbox, providing the specified comment.
|
||||
"""
|
||||
svn_command('ci', '-m', message)
|
||||
|
||||
def svn_tag_sandbox(tag_url, message):
|
||||
"""Makes a tag based on the sandbox revisions.
|
||||
"""
|
||||
svn_command('copy', '-m', message, '.', tag_url)
|
||||
|
||||
def svn_remove_tag(tag_url, message):
|
||||
"""Removes an existing tag.
|
||||
"""
|
||||
svn_command('delete', '-m', message, tag_url)
|
||||
|
||||
def svn_export(tag_url, export_dir):
|
||||
"""Exports the tag_url revision to export_dir.
|
||||
Target directory, including its parent is created if it does not exist.
|
||||
If the directory export_dir exist, it is deleted before export proceed.
|
||||
"""
|
||||
rmdir_if_exist(export_dir)
|
||||
svn_command('export', tag_url, export_dir)
|
||||
|
||||
def fix_sources_eol(dist_dir):
|
||||
"""Set file EOL for tarball distribution.
|
||||
"""
|
||||
print('Preparing exported source file EOL for distribution...')
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
win_sources = antglob.glob(dist_dir,
|
||||
includes = '**/*.sln **/*.vcproj',
|
||||
prune_dirs = prune_dirs)
|
||||
unix_sources = antglob.glob(dist_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
|
||||
sconscript *.json *.expected AUTHORS LICENSE''',
|
||||
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
|
||||
prune_dirs = prune_dirs)
|
||||
for path in win_sources:
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n')
|
||||
for path in unix_sources:
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n')
|
||||
|
||||
def download(url, target_path):
|
||||
"""Download file represented by url to target_path.
|
||||
"""
|
||||
f = urllib2.urlopen(url)
|
||||
try:
|
||||
data = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
fout = open(target_path, 'wb')
|
||||
try:
|
||||
fout.write(data)
|
||||
finally:
|
||||
fout.close()
|
||||
|
||||
def check_compile(distcheck_top_dir, platform):
|
||||
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
|
||||
print('Running:', ' '.join(cmd))
|
||||
log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform)
|
||||
flog = open(log_path, 'wb')
|
||||
try:
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=flog,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=distcheck_top_dir)
|
||||
stdout = process.communicate()[0]
|
||||
status = (process.returncode == 0)
|
||||
finally:
|
||||
flog.close()
|
||||
return (status, log_path)
|
||||
|
||||
def write_tempfile(content, **kwargs):
|
||||
fd, path = tempfile.mkstemp(**kwargs)
|
||||
f = os.fdopen(fd, 'wt')
|
||||
try:
|
||||
f.write(content)
|
||||
finally:
|
||||
f.close()
|
||||
return path
|
||||
|
||||
class SFTPError(Exception):
|
||||
pass
|
||||
|
||||
def run_sftp_batch(userhost, sftp, batch, retry=0):
|
||||
path = write_tempfile(batch, suffix='.sftp', text=True)
|
||||
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
|
||||
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
|
||||
error = None
|
||||
for retry_index in range(0, max(1,retry)):
|
||||
heading = retry_index == 0 and 'Running:' or 'Retrying:'
|
||||
print(heading, ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode != 0:
|
||||
error = SFTPError('SFTP batch failed:\n' + stdout)
|
||||
else:
|
||||
break
|
||||
if error:
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def sourceforge_web_synchro(sourceforge_project, doc_dir,
|
||||
user=None, sftp='sftp'):
|
||||
"""Notes: does not synchronize sub-directory of doc-dir.
|
||||
"""
|
||||
userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
|
||||
stdout = run_sftp_batch(userhost, sftp, """
|
||||
cd htdocs
|
||||
dir
|
||||
exit
|
||||
""")
|
||||
existing_paths = set()
|
||||
collect = 0
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
if not collect and line.endswith('> dir'):
|
||||
collect = True
|
||||
elif collect and line.endswith('> exit'):
|
||||
break
|
||||
elif collect == 1:
|
||||
collect = 2
|
||||
elif collect == 2:
|
||||
path = line.strip().split()[-1:]
|
||||
if path and path[0] not in ('.', '..'):
|
||||
existing_paths.add(path[0])
|
||||
upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
|
||||
paths_to_remove = existing_paths - upload_paths
|
||||
if paths_to_remove:
|
||||
print('Removing the following file from web:')
|
||||
print('\n'.join(paths_to_remove))
|
||||
stdout = run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
rm %s
|
||||
exit""" % ' '.join(paths_to_remove))
|
||||
print('Uploading %d files:' % len(upload_paths))
|
||||
batch_size = 10
|
||||
upload_paths = list(upload_paths)
|
||||
start_time = time.time()
|
||||
for index in range(0,len(upload_paths),batch_size):
|
||||
paths = upload_paths[index:index+batch_size]
|
||||
file_per_sec = (time.time() - start_time) / (index+1)
|
||||
remaining_files = len(upload_paths) - index
|
||||
remaining_sec = file_per_sec * remaining_files
|
||||
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
|
||||
run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
lcd %s
|
||||
mput %s
|
||||
exit""" % (doc_dir, ' '.join(paths)), retry=3)
|
||||
|
||||
def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'):
|
||||
userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
|
||||
run_sftp_batch(userhost, sftp, """
|
||||
mput %s
|
||||
exit
|
||||
""" % (' '.join(paths),))
|
||||
|
||||
|
||||
def main():
|
||||
usage = """%prog release_version next_dev_version
|
||||
Update 'version' file to release_version and commit.
|
||||
Generates the document tarball.
|
||||
Tags the sandbox revision with release_version.
|
||||
Update 'version' file to next_dev_version and commit.
|
||||
|
||||
Performs an svn export of tag release version, and build a source tarball.
|
||||
|
||||
Must be started in the project top directory.
|
||||
|
||||
Warning: --force should only be used when developping/testing the release script.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'),
|
||||
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
||||
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'),
|
||||
help="""Path to Doxygen tool. [Default: %default]""")
|
||||
parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False,
|
||||
help="""Ignore pending commit. [Default: %default]""")
|
||||
parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
|
||||
help="""Overwrite release existing tag if it exist. [Default: %default]""")
|
||||
parser.add_option('-p', '--platforms', dest="platforms", action='store', default='',
|
||||
help="""Comma separated list of platform passed to scons for build check.""")
|
||||
parser.add_option('--no-test', dest="no_test", action='store_true', default=False,
|
||||
help="""Skips build check.""")
|
||||
parser.add_option('--no-web', dest="no_web", action='store_true', default=False,
|
||||
help="""Do not update web site.""")
|
||||
parser.add_option('-u', '--upload-user', dest="user", action='store',
|
||||
help="""Sourceforge user for SFTP documentation upload.""")
|
||||
parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'),
|
||||
help="""Path of the SFTP compatible binary used to upload the documentation.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 2:
|
||||
parser.error('release_version missing on command-line.')
|
||||
release_version = args[0]
|
||||
next_version = args[1]
|
||||
|
||||
if not options.platforms and not options.no_test:
|
||||
parser.error('You must specify either --platform or --no-test option.')
|
||||
|
||||
if options.ignore_pending_commit:
|
||||
msg = ''
|
||||
else:
|
||||
msg = check_no_pending_commit()
|
||||
if not msg:
|
||||
print('Setting version to', release_version)
|
||||
set_version(release_version)
|
||||
svn_commit('Release ' + release_version)
|
||||
tag_url = svn_join_url(SVN_TAG_ROOT, release_version)
|
||||
if svn_check_if_tag_exist(tag_url):
|
||||
if options.retag_release:
|
||||
svn_remove_tag(tag_url, 'Overwriting previous tag')
|
||||
else:
|
||||
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
|
||||
sys.exit(1)
|
||||
svn_tag_sandbox(tag_url, 'Release ' + release_version)
|
||||
|
||||
print('Generated doxygen document...')
|
||||
## doc_dirname = r'jsoncpp-api-html-0.5.0'
|
||||
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
|
||||
doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True)
|
||||
doc_distcheck_dir = 'dist/doccheck'
|
||||
tarball.decompress(doc_tarball_path, doc_distcheck_dir)
|
||||
doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname)
|
||||
|
||||
export_dir = 'dist/export'
|
||||
svn_export(tag_url, export_dir)
|
||||
fix_sources_eol(export_dir)
|
||||
|
||||
source_dir = 'jsoncpp-src-' + release_version
|
||||
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
||||
print('Generating source tarball to', source_tarball_path)
|
||||
tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir)
|
||||
|
||||
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
|
||||
amalgamation_dir = 'dist/amalgamation'
|
||||
amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h')
|
||||
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||
tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir],
|
||||
amalgamation_dir, prefix_dir=amalgamation_source_dir)
|
||||
|
||||
# Decompress source tarball, download and install scons-local
|
||||
distcheck_dir = 'dist/distcheck'
|
||||
distcheck_top_dir = distcheck_dir + '/' + source_dir
|
||||
print('Decompressing source tarball to', distcheck_dir)
|
||||
rmdir_if_exist(distcheck_dir)
|
||||
tarball.decompress(source_tarball_path, distcheck_dir)
|
||||
scons_local_path = 'dist/scons-local.tar.gz'
|
||||
print('Downloading scons-local to', scons_local_path)
|
||||
download(SCONS_LOCAL_URL, scons_local_path)
|
||||
print('Decompressing scons-local to', distcheck_top_dir)
|
||||
tarball.decompress(scons_local_path, distcheck_top_dir)
|
||||
|
||||
# Run compilation
|
||||
print('Compiling decompressed tarball')
|
||||
all_build_status = True
|
||||
for platform in options.platforms.split(','):
|
||||
print('Testing platform:', platform)
|
||||
build_status, log_path = check_compile(distcheck_top_dir, platform)
|
||||
print('see build log:', log_path)
|
||||
print(build_status and '=> ok' or '=> FAILED')
|
||||
all_build_status = all_build_status and build_status
|
||||
if not build_status:
|
||||
print('Testing failed on at least one platform, aborting...')
|
||||
svn_remove_tag(tag_url, 'Removing tag due to failed testing')
|
||||
sys.exit(1)
|
||||
if options.user:
|
||||
if not options.no_web:
|
||||
print('Uploading documentation using user', options.user)
|
||||
sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp)
|
||||
print('Completed documentation upload')
|
||||
print('Uploading source and documentation tarballs for release using user', options.user)
|
||||
sourceforge_release_tarball(SOURCEFORGE_PROJECT,
|
||||
[source_tarball_path, doc_tarball_path],
|
||||
user=options.user, sftp=options.sftp)
|
||||
print('Source and doc release tarballs uploaded')
|
||||
else:
|
||||
print('No upload user specified. Web site and download tarbal were not uploaded.')
|
||||
print('Tarball can be found at:', doc_tarball_path)
|
||||
|
||||
# Set next version number and commit
|
||||
set_version(next_version)
|
||||
svn_commit('Released ' + release_version)
|
||||
else:
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -4,9 +4,9 @@
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <json/assertions.h>
|
||||
#include <json/reader.h>
|
||||
#include <json/value.h>
|
||||
#include <jsoncpp/assertions.h>
|
||||
#include <jsoncpp/reader.h>
|
||||
#include <jsoncpp/value.h>
|
||||
#include "json_tool.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <utility>
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <json/assertions.h>
|
||||
#include <json/value.h>
|
||||
#include <json/writer.h>
|
||||
#include <jsoncpp/assertions.h>
|
||||
#include <jsoncpp/value.h>
|
||||
#include <jsoncpp/writer.h>
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <math.h>
|
||||
#include <sstream>
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <json/writer.h>
|
||||
#include <jsoncpp/writer.h>
|
||||
#include "json_tool.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <iomanip>
|
||||
|
||||
Reference in New Issue
Block a user